diff --git a/Cargo.lock b/Cargo.lock index 8e35435e3f9b7..b0d6aa5659a3c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -719,6 +719,7 @@ version = "0.0.0" dependencies = [ "anstyle-svg", "build_helper", + "camino", "colored", "diff", "getopts", @@ -4671,6 +4672,7 @@ name = "rustdoc-gui-test" version = "0.1.0" dependencies = [ "build_helper", + "camino", "compiletest", "getopts", "walkdir", diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 64ad1c968565e..1bc69abe6f9fa 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -22,6 +22,7 @@ use std::cell::RefCell; use std::marker::PhantomData; use std::ops::{ControlFlow, Deref}; +use borrow_set::LocalsStateAtExit; use root_cx::BorrowCheckRootCtxt; use rustc_abi::FieldIdx; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; @@ -304,33 +305,13 @@ fn do_mir_borrowck<'tcx>( root_cx.set_tainted_by_errors(e); } - let mut local_names = IndexVec::from_elem(None, &input_body.local_decls); - for var_debug_info in &input_body.var_debug_info { - if let VarDebugInfoContents::Place(place) = var_debug_info.value { - if let Some(local) = place.as_local() { - if let Some(prev_name) = local_names[local] - && var_debug_info.name != prev_name - { - span_bug!( - var_debug_info.source_info.span, - "local {:?} has many names (`{}` vs `{}`)", - local, - prev_name, - var_debug_info.name - ); - } - local_names[local] = Some(var_debug_info.name); - } - } - } - // Replace all regions with fresh inference variables. This // requires first making our own copy of the MIR. This copy will // be modified (in place) to contain non-lexical lifetimes. It // will have a lifetime tied to the inference context. let mut body_owned = input_body.clone(); let mut promoted = input_promoted.to_owned(); - let free_regions = nll::replace_regions_in_mir(&infcx, &mut body_owned, &mut promoted); + let universal_regions = nll::replace_regions_in_mir(&infcx, &mut body_owned, &mut promoted); let body = &body_owned; // no further changes let location_table = PoloniusLocationTable::new(body); @@ -355,7 +336,7 @@ fn do_mir_borrowck<'tcx>( } = nll::compute_regions( root_cx, &infcx, - free_regions, + universal_regions, body, &promoted, &location_table, @@ -368,24 +349,23 @@ fn do_mir_borrowck<'tcx>( // Dump MIR results into a file, if that is enabled. This lets us // write unit-tests, as well as helping with debugging. nll::dump_nll_mir(&infcx, body, ®ioncx, &opt_closure_req, &borrow_set); + polonius::dump_polonius_mir( + &infcx, + body, + ®ioncx, + &opt_closure_req, + &borrow_set, + polonius_diagnostics.as_ref(), + ); // We also have a `#[rustc_regions]` annotation that causes us to dump // information. - let diags_buffer = &mut BorrowckDiagnosticsBuffer::default(); - nll::dump_annotation(&infcx, body, ®ioncx, &opt_closure_req, diags_buffer); - - let movable_coroutine = - // The first argument is the coroutine type passed by value - if let Some(local) = body.local_decls.raw.get(1) - // Get the interior types and args which typeck computed - && let ty::Coroutine(def_id, _) = *local.ty.kind() - && tcx.coroutine_movability(def_id) == hir::Movability::Movable -{ - true -} else { - false -}; + nll::dump_annotation(&infcx, body, ®ioncx, &opt_closure_req); + let movable_coroutine = body.coroutine.is_some() + && tcx.coroutine_movability(def.to_def_id()) == hir::Movability::Movable; + + let diags_buffer = &mut BorrowckDiagnosticsBuffer::default(); // While promoteds should mostly be correct by construction, we need to check them for // invalid moves to detect moving out of arrays:`struct S; fn main() { &([S][0]); }`. for promoted_body in &promoted { @@ -403,7 +383,6 @@ fn do_mir_borrowck<'tcx>( location_table: &location_table, movable_coroutine, fn_self_span_reported: Default::default(), - locals_are_invalidated_at_exit, access_place_error_reported: Default::default(), reservation_error_reported: Default::default(), uninitialized_error_reported: Default::default(), @@ -435,6 +414,26 @@ fn do_mir_borrowck<'tcx>( promoted_mbcx.report_move_errors(); } + let mut local_names = IndexVec::from_elem(None, &body.local_decls); + for var_debug_info in &body.var_debug_info { + if let VarDebugInfoContents::Place(place) = var_debug_info.value { + if let Some(local) = place.as_local() { + if let Some(prev_name) = local_names[local] + && var_debug_info.name != prev_name + { + span_bug!( + var_debug_info.source_info.span, + "local {:?} has many names (`{}` vs `{}`)", + local, + prev_name, + var_debug_info.name + ); + } + local_names[local] = Some(var_debug_info.name); + } + } + } + let mut mbcx = MirBorrowckCtxt { root_cx, infcx: &infcx, @@ -442,7 +441,6 @@ fn do_mir_borrowck<'tcx>( move_data: &move_data, location_table: &location_table, movable_coroutine, - locals_are_invalidated_at_exit, fn_self_span_reported: Default::default(), access_place_error_reported: Default::default(), reservation_error_reported: Default::default(), @@ -455,9 +453,9 @@ fn do_mir_borrowck<'tcx>( local_names, region_names: RefCell::default(), next_region_name: RefCell::new(1), - polonius_output, move_errors: Vec::new(), diags_buffer, + polonius_output: polonius_output.as_deref(), polonius_diagnostics: polonius_diagnostics.as_ref(), }; @@ -474,16 +472,6 @@ fn do_mir_borrowck<'tcx>( mbcx.report_move_errors(); - // If requested, dump polonius MIR. - polonius::dump_polonius_mir( - &infcx, - body, - ®ioncx, - &borrow_set, - polonius_diagnostics.as_ref(), - &opt_closure_req, - ); - // For each non-user used mutable variable, check if it's been assigned from // a user-declared local. If so, then put that local into the used_mut set. // Note that this set is expected to be small - only upvars from closures @@ -514,7 +502,6 @@ fn do_mir_borrowck<'tcx>( }; let body_with_facts = if consumer_options.is_some() { - let output_facts = mbcx.polonius_output; Some(Box::new(BodyWithBorrowckFacts { body: body_owned, promoted, @@ -522,7 +509,7 @@ fn do_mir_borrowck<'tcx>( region_inference_context: regioncx, location_table: polonius_input.as_ref().map(|_| location_table), input_facts: polonius_input, - output_facts, + output_facts: polonius_output, })) } else { None @@ -655,13 +642,6 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> { location_table: &'a PoloniusLocationTable, movable_coroutine: bool, - /// This keeps track of whether local variables are free-ed when the function - /// exits even without a `StorageDead`, which appears to be the case for - /// constants. - /// - /// I'm not sure this is the right approach - @eddyb could you try and - /// figure this out? - locals_are_invalidated_at_exit: bool, /// This field keeps track of when borrow errors are reported in the access_place function /// so that there is no duplicate reporting. This field cannot also be used for the conflicting /// borrow errors that is handled by the `reservation_error_reported` field as the inclusion @@ -709,12 +689,11 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> { /// The counter for generating new region names. next_region_name: RefCell, - /// Results of Polonius analysis. - polonius_output: Option>, - diags_buffer: &'a mut BorrowckDiagnosticsBuffer<'infcx, 'tcx>, move_errors: Vec>, + /// Results of Polonius analysis. + polonius_output: Option<&'a PoloniusOutput>, /// When using `-Zpolonius=next`: the data used to compute errors and diagnostics. polonius_diagnostics: Option<&'a PoloniusDiagnosticsContext>, } @@ -938,13 +917,20 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt< | TerminatorKind::Return | TerminatorKind::TailCall { .. } | TerminatorKind::CoroutineDrop => { - // Returning from the function implicitly kills storage for all locals and statics. - // Often, the storage will already have been killed by an explicit - // StorageDead, but we don't always emit those (notably on unwind paths), - // so this "extra check" serves as a kind of backup. - for i in state.borrows.iter() { - let borrow = &self.borrow_set[i]; - self.check_for_invalidation_at_exit(loc, borrow, span); + match self.borrow_set.locals_state_at_exit() { + LocalsStateAtExit::AllAreInvalidated => { + // Returning from the function implicitly kills storage for all locals and statics. + // Often, the storage will already have been killed by an explicit + // StorageDead, but we don't always emit those (notably on unwind paths), + // so this "extra check" serves as a kind of backup. + for i in state.borrows.iter() { + let borrow = &self.borrow_set[i]; + self.check_for_invalidation_at_exit(loc, borrow, span); + } + } + // If we do not implicitly invalidate all locals on exit, + // we check for conflicts when dropping or moving this local. + LocalsStateAtExit::SomeAreInvalidated { has_storage_dead_or_moved: _ } => {} } } @@ -1716,22 +1702,15 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { // we'll have a memory leak) and assume that all statics have a destructor. // // FIXME: allow thread-locals to borrow other thread locals? - - let (might_be_alive, will_be_dropped) = - if self.body.local_decls[root_place.local].is_ref_to_thread_local() { - // Thread-locals might be dropped after the function exits - // We have to dereference the outer reference because - // borrows don't conflict behind shared references. - root_place.projection = TyCtxtConsts::DEREF_PROJECTION; - (true, true) - } else { - (false, self.locals_are_invalidated_at_exit) - }; - - if !will_be_dropped { - debug!("place_is_invalidated_at_exit({:?}) - won't be dropped", place); - return; - } + let might_be_alive = if self.body.local_decls[root_place.local].is_ref_to_thread_local() { + // Thread-locals might be dropped after the function exits + // We have to dereference the outer reference because + // borrows don't conflict behind shared references. + root_place.projection = TyCtxtConsts::DEREF_PROJECTION; + true + } else { + false + }; let sd = if might_be_alive { Deep } else { Shallow(None) }; diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs index 8a2a34f207aa0..fe899bb054fa9 100644 --- a/compiler/rustc_borrowck/src/nll.rs +++ b/compiler/rustc_borrowck/src/nll.rs @@ -21,7 +21,7 @@ use tracing::{debug, instrument}; use crate::borrow_set::BorrowSet; use crate::consumers::ConsumerOptions; -use crate::diagnostics::{BorrowckDiagnosticsBuffer, RegionErrors}; +use crate::diagnostics::RegionErrors; use crate::polonius::PoloniusDiagnosticsContext; use crate::polonius::legacy::{ PoloniusFacts, PoloniusFactsExt, PoloniusLocationTable, PoloniusOutput, @@ -117,11 +117,6 @@ pub(crate) fn compute_regions<'a, 'tcx>( Rc::clone(&location_map), ); - // Create the region inference context, taking ownership of the - // region inference data that was contained in `infcx`, and the - // base constraints generated by the type-check. - let var_infos = infcx.get_region_var_infos(); - // If requested, emit legacy polonius facts. polonius::legacy::emit_facts( &mut polonius_facts, @@ -134,13 +129,8 @@ pub(crate) fn compute_regions<'a, 'tcx>( &constraints, ); - let mut regioncx = RegionInferenceContext::new( - infcx, - var_infos, - constraints, - universal_region_relations, - location_map, - ); + let mut regioncx = + RegionInferenceContext::new(infcx, constraints, universal_region_relations, location_map); // If requested for `-Zpolonius=next`, convert NLL constraints to localized outlives constraints // and use them to compute loan liveness. @@ -297,7 +287,6 @@ pub(super) fn dump_annotation<'tcx, 'infcx>( body: &Body<'tcx>, regioncx: &RegionInferenceContext<'tcx>, closure_region_requirements: &Option>, - diagnostics_buffer: &mut BorrowckDiagnosticsBuffer<'infcx, 'tcx>, ) { let tcx = infcx.tcx; let base_def_id = tcx.typeck_root_def_id(body.source.def_id()); @@ -335,13 +324,11 @@ pub(super) fn dump_annotation<'tcx, 'infcx>( } else { let mut err = infcx.dcx().struct_span_note(def_span, "no external requirements"); regioncx.annotate(tcx, &mut err); - err }; // FIXME(@lcnr): We currently don't dump the inferred hidden types here. - - diagnostics_buffer.buffer_non_error(err); + err.emit(); } fn for_each_region_constraint<'tcx>( diff --git a/compiler/rustc_borrowck/src/polonius/dump.rs b/compiler/rustc_borrowck/src/polonius/dump.rs index eb53a98832c33..6a943e1920821 100644 --- a/compiler/rustc_borrowck/src/polonius/dump.rs +++ b/compiler/rustc_borrowck/src/polonius/dump.rs @@ -24,9 +24,9 @@ pub(crate) fn dump_polonius_mir<'tcx>( infcx: &BorrowckInferCtxt<'tcx>, body: &Body<'tcx>, regioncx: &RegionInferenceContext<'tcx>, + closure_region_requirements: &Option>, borrow_set: &BorrowSet<'tcx>, polonius_diagnostics: Option<&PoloniusDiagnosticsContext>, - closure_region_requirements: &Option>, ) { let tcx = infcx.tcx; if !tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs index 569c46e6403f2..c82c7341f0287 100644 --- a/compiler/rustc_borrowck/src/region_infer/mod.rs +++ b/compiler/rustc_borrowck/src/region_infer/mod.rs @@ -9,7 +9,7 @@ use rustc_errors::Diag; use rustc_hir::def_id::CRATE_DEF_ID; use rustc_index::IndexVec; use rustc_infer::infer::outlives::test_type_match; -use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound, VerifyIfEq}; +use rustc_infer::infer::region_constraints::{GenericKind, VerifyBound, VerifyIfEq}; use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin}; use rustc_middle::bug; use rustc_middle::mir::{ @@ -145,7 +145,7 @@ pub struct RegionInferenceContext<'tcx> { /// variables are identified by their index (`RegionVid`). The /// definition contains information about where the region came /// from as well as its final inferred value. - pub(crate) definitions: IndexVec>, + pub(crate) definitions: Frozen>>, /// The liveness constraints added to each region. For most /// regions, these start out empty and steadily grow, though for @@ -385,6 +385,26 @@ fn sccs_info<'tcx>(infcx: &BorrowckInferCtxt<'tcx>, sccs: &ConstraintSccs) { debug!("SCC edges {:#?}", scc_node_to_edges); } +fn create_definitions<'tcx>( + infcx: &BorrowckInferCtxt<'tcx>, + universal_regions: &UniversalRegions<'tcx>, +) -> Frozen>> { + // Create a RegionDefinition for each inference variable. + let mut definitions: IndexVec<_, _> = infcx + .get_region_var_infos() + .iter() + .map(|info| RegionDefinition::new(info.universe, info.origin)) + .collect(); + + // Add the external name for all universal regions. + for (external_name, variable) in universal_regions.named_universal_regions_iter() { + debug!("region {variable:?} has external name {external_name:?}"); + definitions[variable].external_name = Some(external_name); + } + + Frozen::freeze(definitions) +} + impl<'tcx> RegionInferenceContext<'tcx> { /// Creates a new region inference context with a total of /// `num_region_variables` valid inference variables; the first N @@ -395,7 +415,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// of constraints produced by the MIR type check. pub(crate) fn new( infcx: &BorrowckInferCtxt<'tcx>, - var_infos: VarInfos, constraints: MirTypeckRegionConstraints<'tcx>, universal_region_relations: Frozen>, location_map: Rc, @@ -426,11 +445,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { infcx.set_tainted_by_errors(guar); } - // Create a RegionDefinition for each inference variable. - let definitions: IndexVec<_, _> = var_infos - .iter() - .map(|info| RegionDefinition::new(info.universe, info.origin)) - .collect(); + let definitions = create_definitions(infcx, &universal_regions); let constraint_sccs = outlives_constraints.add_outlives_static(&universal_regions, &definitions); @@ -526,18 +541,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// means that the `R1: !1` constraint here will cause /// `R1` to become `'static`. fn init_free_and_bound_regions(&mut self) { - // Update the names (if any) - // This iterator has unstable order but we collect it all into an IndexVec - for (external_name, variable) in - self.universal_region_relations.universal_regions.named_universal_regions_iter() - { - debug!( - "init_free_and_bound_regions: region {:?} has external name {:?}", - variable, external_name - ); - self.definitions[variable].external_name = Some(external_name); - } - for variable in self.definitions.indices() { let scc = self.constraint_sccs.scc(variable); diff --git a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs index ccb257ae09367..57516565147eb 100644 --- a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs +++ b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs @@ -21,7 +21,6 @@ use crate::{ClosureOutlivesSubject, ClosureRegionRequirements, ConstraintCategor pub(crate) struct ConstraintConversion<'a, 'tcx> { infcx: &'a InferCtxt<'tcx>, - tcx: TyCtxt<'tcx>, universal_regions: &'a UniversalRegions<'tcx>, /// Each RBP `GK: 'a` is assumed to be true. These encode /// relationships like `T: 'a` that are added via implicit bounds @@ -34,7 +33,6 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> { /// logic expecting to see (e.g.) `ReStatic`, and if we supplied /// our special inference variable there, we would mess that up. region_bound_pairs: &'a RegionBoundPairs<'tcx>, - implicit_region_bound: ty::Region<'tcx>, param_env: ty::ParamEnv<'tcx>, known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>], locations: Locations, @@ -49,7 +47,6 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { infcx: &'a InferCtxt<'tcx>, universal_regions: &'a UniversalRegions<'tcx>, region_bound_pairs: &'a RegionBoundPairs<'tcx>, - implicit_region_bound: ty::Region<'tcx>, param_env: ty::ParamEnv<'tcx>, known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>], locations: Locations, @@ -59,10 +56,8 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { ) -> Self { Self { infcx, - tcx: infcx.tcx, universal_regions, region_bound_pairs, - implicit_region_bound, param_env, known_type_outlives_obligations, locations, @@ -96,7 +91,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { // into a vector. These are the regions that we will be // relating to one another. let closure_mapping = &UniversalRegions::closure_mapping( - self.tcx, + self.infcx.tcx, closure_args, closure_requirements.num_external_vids, closure_def_id, @@ -111,7 +106,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { let subject = match outlives_requirement.subject { ClosureOutlivesSubject::Region(re) => closure_mapping[re].into(), ClosureOutlivesSubject::Ty(subject_ty) => { - subject_ty.instantiate(self.tcx, |vid| closure_mapping[vid]).into() + subject_ty.instantiate(self.infcx.tcx, |vid| closure_mapping[vid]).into() } }; @@ -127,14 +122,14 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { predicate: ty::OutlivesPredicate<'tcx, ty::GenericArg<'tcx>>, constraint_category: ConstraintCategory<'tcx>, ) { + let tcx = self.infcx.tcx; debug!("generate: constraints at: {:#?}", self.locations); // Extract out various useful fields we'll need below. let ConstraintConversion { - tcx, infcx, + universal_regions, region_bound_pairs, - implicit_region_bound, known_type_outlives_obligations, .. } = *self; @@ -145,7 +140,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { break; } - if !self.tcx.recursion_limit().value_within_limit(iteration) { + if !tcx.recursion_limit().value_within_limit(iteration) { bug!( "FIXME(-Znext-solver): Overflowed when processing region obligations: {outlives_predicates:#?}" ); @@ -170,10 +165,11 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { ); } + let implicit_region_bound = + ty::Region::new_var(tcx, universal_regions.implicit_region_bound()); // we don't actually use this for anything, but // the `TypeOutlives` code needs an origin. let origin = infer::RelateParamBound(self.span, t1, None); - TypeOutlives::new( &mut *self, tcx, @@ -205,7 +201,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { /// are dealt with during trait solving. fn replace_placeholders_with_nll>>(&mut self, value: T) -> T { if value.has_placeholders() { - fold_regions(self.tcx, value, |r, _| match r.kind() { + fold_regions(self.infcx.tcx, value, |r, _| match r.kind() { ty::RePlaceholder(placeholder) => { self.constraints.placeholder_region(self.infcx, placeholder) } diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs index eaac633b512d6..536a27763d29c 100644 --- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs +++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs @@ -49,14 +49,12 @@ pub(crate) struct CreateResult<'tcx> { pub(crate) fn create<'tcx>( infcx: &InferCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, - implicit_region_bound: ty::Region<'tcx>, universal_regions: UniversalRegions<'tcx>, constraints: &mut MirTypeckRegionConstraints<'tcx>, ) -> CreateResult<'tcx> { UniversalRegionRelationsBuilder { infcx, param_env, - implicit_region_bound, constraints, universal_regions, region_bound_pairs: Default::default(), @@ -181,7 +179,6 @@ struct UniversalRegionRelationsBuilder<'a, 'tcx> { infcx: &'a InferCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, universal_regions: UniversalRegions<'tcx>, - implicit_region_bound: ty::Region<'tcx>, constraints: &'a mut MirTypeckRegionConstraints<'tcx>, // outputs: @@ -320,7 +317,6 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { self.infcx, &self.universal_regions, &self.region_bound_pairs, - self.implicit_region_bound, param_env, &known_type_outlives_obligations, Locations::All(span), diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index a17dff5d2715e..05e0bb3f9f343 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -113,7 +113,6 @@ pub(crate) fn type_check<'a, 'tcx>( move_data: &MoveData<'tcx>, location_map: Rc, ) -> MirTypeckResults<'tcx> { - let implicit_region_bound = ty::Region::new_var(infcx.tcx, universal_regions.fr_fn_body); let mut constraints = MirTypeckRegionConstraints { placeholder_indices: PlaceholderIndices::default(), placeholder_index_to_region: IndexVec::default(), @@ -129,13 +128,7 @@ pub(crate) fn type_check<'a, 'tcx>( region_bound_pairs, normalized_inputs_and_output, known_type_outlives_obligations, - } = free_region_relations::create( - infcx, - infcx.param_env, - implicit_region_bound, - universal_regions, - &mut constraints, - ); + } = free_region_relations::create(infcx, infcx.param_env, universal_regions, &mut constraints); let pre_obligations = infcx.take_registered_region_obligations(); assert!( @@ -160,7 +153,6 @@ pub(crate) fn type_check<'a, 'tcx>( user_type_annotations: &body.user_type_annotations, region_bound_pairs, known_type_outlives_obligations, - implicit_region_bound, reported_errors: Default::default(), universal_regions: &universal_region_relations.universal_regions, location_table, @@ -226,7 +218,6 @@ struct TypeChecker<'a, 'tcx> { user_type_annotations: &'a CanonicalUserTypeAnnotations<'tcx>, region_bound_pairs: RegionBoundPairs<'tcx>, known_type_outlives_obligations: Vec>, - implicit_region_bound: ty::Region<'tcx>, reported_errors: FxIndexSet<(Ty<'tcx>, Span)>, universal_regions: &'a UniversalRegions<'tcx>, location_table: &'a PoloniusLocationTable, @@ -422,7 +413,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.infcx, self.universal_regions, &self.region_bound_pairs, - self.implicit_region_bound, self.infcx.param_env, &self.known_type_outlives_obligations, locations, @@ -2507,7 +2497,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.infcx, self.universal_regions, &self.region_bound_pairs, - self.implicit_region_bound, self.infcx.param_env, &self.known_type_outlives_obligations, locations, diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index 5c57ab99a8592..c11e14d214c42 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -438,6 +438,10 @@ impl<'tcx> UniversalRegions<'tcx> { } } + pub(crate) fn implicit_region_bound(&self) -> RegionVid { + self.fr_fn_body + } + pub(crate) fn tainted_by_errors(&self) -> Option { self.indices.tainted_by_errors.get() } diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 77ec598e62a17..460a06f9c0604 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -12,7 +12,7 @@ use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId}; use rustc_ast_pretty::pprust; use rustc_attr_parsing::{AttributeKind, find_attr}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; -use rustc_errors::{Applicability, ErrorGuaranteed}; +use rustc_errors::{Applicability, Diag, ErrorGuaranteed}; use rustc_feature::Features; use rustc_hir as hir; use rustc_lint_defs::BuiltinLintDiag; @@ -27,19 +27,18 @@ use rustc_span::hygiene::Transparency; use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, kw, sym}; use tracing::{debug, instrument, trace, trace_span}; -use super::diagnostics; use super::macro_parser::{NamedMatches, NamedParseResult}; +use super::{SequenceRepetition, diagnostics}; use crate::base::{ DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult, SyntaxExtension, SyntaxExtensionKind, TTMacroExpander, }; use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment}; -use crate::mbe; use crate::mbe::diagnostics::{annotate_doc_comment, parse_failure_msg}; -use crate::mbe::macro_check; use crate::mbe::macro_parser::NamedMatch::*; use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser}; use crate::mbe::transcribe::transcribe; +use crate::mbe::{self, KleeneOp, macro_check}; pub(crate) struct ParserAnyMacro<'a> { parser: Parser<'a>, @@ -640,6 +639,37 @@ fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool { } } +/// Checks if a `vis` nonterminal fragment is unnecessarily wrapped in an optional repetition. +/// +/// When a `vis` fragment (which can already be empty) is wrapped in `$(...)?`, +/// this suggests removing the redundant repetition syntax since it provides no additional benefit. +fn check_redundant_vis_repetition( + err: &mut Diag<'_>, + sess: &Session, + seq: &SequenceRepetition, + span: &DelimSpan, +) { + let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne; + let is_vis = seq.tts.first().map_or(false, |tt| { + matches!(tt, mbe::TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis))) + }); + + if is_vis && is_zero_or_one { + err.note("a `vis` fragment can already be empty"); + err.multipart_suggestion( + "remove the `$(` and `)?`", + vec![ + ( + sess.source_map().span_extend_to_prev_char_before(span.open, '$', true), + "".to_string(), + ), + (span.close.with_hi(seq.kleene.span.hi()), "".to_string()), + ], + Applicability::MaybeIncorrect, + ); + } +} + /// Checks that the lhs contains no repetition which could match an empty token /// tree, because then the matcher would hang indefinitely. fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> { @@ -654,8 +684,10 @@ fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), TokenTree::Sequence(span, seq) => { if is_empty_token_tree(sess, seq) { let sp = span.entire(); - let guar = sess.dcx().span_err(sp, "repetition matches empty token tree"); - return Err(guar); + let mut err = + sess.dcx().struct_span_err(sp, "repetition matches empty token tree"); + check_redundant_vis_repetition(&mut err, sess, seq, span); + return Err(err.emit()); } check_lhs_no_empty_seq(sess, &seq.tts)? } diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 98213affc5bfc..36e375c778d3f 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -565,6 +565,8 @@ declare_features! ( (incomplete, mut_ref, "1.79.0", Some(123076)), /// Allows using `#[naked]` on functions. (unstable, naked_functions, "1.9.0", Some(90957)), + /// Allows using `#[naked]` on `extern "Rust"` functions. + (unstable, naked_functions_rustic_abi, "CURRENT_RUSTC_VERSION", Some(138997)), /// Allows using `#[target_feature(enable = "...")]` on `#[naked]` on functions. (unstable, naked_functions_target_feature, "1.86.0", Some(138568)), /// Allows specifying the as-needed link modifier diff --git a/compiler/rustc_hir_typeck/src/cast.rs b/compiler/rustc_hir_typeck/src/cast.rs index 8f5fddd19d7f6..b19d9efe2c6f5 100644 --- a/compiler/rustc_hir_typeck/src/cast.rs +++ b/compiler/rustc_hir_typeck/src/cast.rs @@ -1042,30 +1042,31 @@ impl<'a, 'tcx> CastCheck<'tcx> { m_cast: ty::TypeAndMut<'tcx>, ) -> Result> { // array-ptr-cast: allow mut-to-mut, mut-to-const, const-to-const - if m_expr.mutbl >= m_cast.mutbl { - if let ty::Array(ety, _) = m_expr.ty.kind() { - // Due to the limitations of LLVM global constants, - // region pointers end up pointing at copies of - // vector elements instead of the original values. - // To allow raw pointers to work correctly, we - // need to special-case obtaining a raw pointer - // from a region pointer to a vector. - - // Coerce to a raw pointer so that we generate RawPtr in MIR. - let array_ptr_type = Ty::new_ptr(fcx.tcx, m_expr.ty, m_expr.mutbl); - fcx.coerce(self.expr, self.expr_ty, array_ptr_type, AllowTwoPhase::No, None) - .unwrap_or_else(|_| { - bug!( + if m_expr.mutbl >= m_cast.mutbl + && let ty::Array(ety, _) = m_expr.ty.kind() + && fcx.can_eq(fcx.param_env, *ety, m_cast.ty) + { + // Due to the limitations of LLVM global constants, + // region pointers end up pointing at copies of + // vector elements instead of the original values. + // To allow raw pointers to work correctly, we + // need to special-case obtaining a raw pointer + // from a region pointer to a vector. + + // Coerce to a raw pointer so that we generate RawPtr in MIR. + let array_ptr_type = Ty::new_ptr(fcx.tcx, m_expr.ty, m_expr.mutbl); + fcx.coerce(self.expr, self.expr_ty, array_ptr_type, AllowTwoPhase::No, None) + .unwrap_or_else(|_| { + bug!( "could not cast from reference to array to pointer to array ({:?} to {:?})", self.expr_ty, array_ptr_type, ) - }); + }); - // this will report a type mismatch if needed - fcx.demand_eqtype(self.span, *ety, m_cast.ty); - return Ok(CastKind::ArrayPtrCast); - } + // this will report a type mismatch if needed + fcx.demand_eqtype(self.span, *ety, m_cast.ty); + return Ok(CastKind::ArrayPtrCast); } Err(CastError::IllegalCast) diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index f1571cf4c8317..fd899425f62d2 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -103,15 +103,6 @@ fn coerce_mutbls<'tcx>( if from_mutbl >= to_mutbl { Ok(()) } else { Err(TypeError::Mutability) } } -/// Do not require any adjustments, i.e. coerce `x -> x`. -fn identity(_: Ty<'_>) -> Vec> { - vec![] -} - -fn simple<'tcx>(kind: Adjust) -> impl FnOnce(Ty<'tcx>) -> Vec> { - move |target| vec![Adjustment { kind, target }] -} - /// This always returns `Ok(...)`. fn success<'tcx>( adj: Vec>, @@ -131,7 +122,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { Coerce { fcx, cause, allow_two_phase, use_lub: false, coerce_never } } - fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { + fn unify_raw(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { debug!("unify(a: {:?}, b: {:?}, use_lub: {})", a, b, self.use_lub); self.commit_if_ok(|_| { let at = self.at(&self.cause, self.fcx.param_env); @@ -161,13 +152,30 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { }) } + /// Unify two types (using sub or lub). + fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { + self.unify_raw(a, b) + .and_then(|InferOk { value: ty, obligations }| success(vec![], ty, obligations)) + } + /// Unify two types (using sub or lub) and produce a specific coercion. - fn unify_and(&self, a: Ty<'tcx>, b: Ty<'tcx>, f: F) -> CoerceResult<'tcx> - where - F: FnOnce(Ty<'tcx>) -> Vec>, - { - self.unify(a, b) - .and_then(|InferOk { value: ty, obligations }| success(f(ty), ty, obligations)) + fn unify_and( + &self, + a: Ty<'tcx>, + b: Ty<'tcx>, + adjustments: impl IntoIterator>, + final_adjustment: Adjust, + ) -> CoerceResult<'tcx> { + self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| { + success( + adjustments + .into_iter() + .chain(std::iter::once(Adjustment { target: ty, kind: final_adjustment })) + .collect(), + ty, + obligations, + ) + }) } #[instrument(skip(self))] @@ -180,10 +188,14 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // Coercing from `!` to any type is allowed: if a.is_never() { if self.coerce_never { - return success(simple(Adjust::NeverToAny)(b), b, PredicateObligations::new()); + return success( + vec![Adjustment { kind: Adjust::NeverToAny, target: b }], + b, + PredicateObligations::new(), + ); } else { // Otherwise the only coercion we can do is unification. - return self.unify_and(a, b, identity); + return self.unify(a, b); } } @@ -191,7 +203,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // we have no information about the source type. This will always // ultimately fall back to some form of subtyping. if a.is_ty_var() { - return self.coerce_from_inference_variable(a, b, identity); + return self.coerce_from_inference_variable(a, b); } // Consider coercing the subtype to a DST @@ -247,7 +259,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { ty::FnPtr(a_sig_tys, a_hdr) => { // We permit coercion of fn pointers to drop the // unsafe qualifier. - self.coerce_from_fn_pointer(a, a_sig_tys.with(a_hdr), b) + self.coerce_from_fn_pointer(a_sig_tys.with(a_hdr), b) } ty::Closure(closure_def_id_a, args_a) => { // Non-capturing closures are coercible to @@ -257,7 +269,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } _ => { // Otherwise, just use unification rules. - self.unify_and(a, b, identity) + self.unify(a, b) } } } @@ -265,12 +277,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { /// Coercing *from* an inference variable. In this case, we have no information /// about the source type, so we can't really do a true coercion and we always /// fall back to subtyping (`unify_and`). - fn coerce_from_inference_variable( - &self, - a: Ty<'tcx>, - b: Ty<'tcx>, - make_adjustments: impl FnOnce(Ty<'tcx>) -> Vec>, - ) -> CoerceResult<'tcx> { + fn coerce_from_inference_variable(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { debug!("coerce_from_inference_variable(a={:?}, b={:?})", a, b); assert!(a.is_ty_var() && self.shallow_resolve(a) == a); assert!(self.shallow_resolve(b) == b); @@ -298,12 +305,11 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { "coerce_from_inference_variable: two inference variables, target_ty={:?}, obligations={:?}", target_ty, obligations ); - let adjustments = make_adjustments(target_ty); - InferResult::Ok(InferOk { value: (adjustments, target_ty), obligations }) + success(vec![], target_ty, obligations) } else { // One unresolved type variable: just apply subtyping, we may be able // to do something useful. - self.unify_and(a, b, make_adjustments) + self.unify(a, b) } } @@ -331,7 +337,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { coerce_mutbls(mt_a.mutbl, mutbl_b)?; (r_a, mt_a) } - _ => return self.unify_and(a, b, identity), + _ => return self.unify(a, b), }; let span = self.cause.span; @@ -437,7 +443,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { referent_ty, mutbl_b, // [1] above ); - match self.unify(derefd_ty_a, b) { + match self.unify_raw(derefd_ty_a, b) { Ok(ok) => { found = Some(ok); break; @@ -579,13 +585,13 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // We only have the latter, so we use an inference variable // for the former and let type inference do the rest. let coerce_target = self.next_ty_var(self.cause.span); - let mut coercion = self.unify_and(coerce_target, target, |target| { - let unsize = Adjustment { kind: Adjust::Pointer(PointerCoercion::Unsize), target }; - match reborrow { - None => vec![unsize], - Some((ref deref, ref autoref)) => vec![deref.clone(), autoref.clone(), unsize], - } - })?; + + let mut coercion = self.unify_and( + coerce_target, + target, + reborrow.into_iter().flat_map(|(deref, autoref)| [deref, autoref]), + Adjust::Pointer(PointerCoercion::Unsize), + )?; let mut selcx = traits::SelectionContext::new(self); @@ -708,7 +714,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { && let ty::Dynamic(b_data, _, ty::DynStar) = b.kind() && a_data.principal_def_id() == b_data.principal_def_id() { - return self.unify_and(a, b, |_| vec![]); + return self.unify(a, b); } // Check the obligations of the cast -- for example, when casting @@ -808,23 +814,15 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // To complete the reborrow, we need to make sure we can unify the inner types, and if so we // add the adjustments. - self.unify_and(a, b, |_inner_ty| { - vec![Adjustment { kind: Adjust::ReborrowPin(mut_b), target: b }] - }) + self.unify_and(a, b, [], Adjust::ReborrowPin(mut_b)) } - fn coerce_from_safe_fn( + fn coerce_from_safe_fn( &self, - a: Ty<'tcx>, fn_ty_a: ty::PolyFnSig<'tcx>, b: Ty<'tcx>, - to_unsafe: F, - normal: G, - ) -> CoerceResult<'tcx> - where - F: FnOnce(Ty<'tcx>) -> Vec>, - G: FnOnce(Ty<'tcx>) -> Vec>, - { + adjustment: Option, + ) -> CoerceResult<'tcx> { self.commit_if_ok(|snapshot| { let outer_universe = self.infcx.universe(); @@ -833,9 +831,19 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { && hdr_b.safety.is_unsafe() { let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a); - self.unify_and(unsafe_a, b, to_unsafe) + self.unify_and( + unsafe_a, + b, + adjustment + .map(|kind| Adjustment { kind, target: Ty::new_fn_ptr(self.tcx, fn_ty_a) }), + Adjust::Pointer(PointerCoercion::UnsafeFnPointer), + ) } else { - self.unify_and(a, b, normal) + let a = Ty::new_fn_ptr(self.tcx, fn_ty_a); + match adjustment { + Some(adjust) => self.unify_and(a, b, [], adjust), + None => self.unify(a, b), + } }; // FIXME(#73154): This is a hack. Currently LUB can generate @@ -852,7 +860,6 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { fn coerce_from_fn_pointer( &self, - a: Ty<'tcx>, fn_ty_a: ty::PolyFnSig<'tcx>, b: Ty<'tcx>, ) -> CoerceResult<'tcx> { @@ -861,15 +868,9 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { //! let b = self.shallow_resolve(b); - debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b); - - self.coerce_from_safe_fn( - a, - fn_ty_a, - b, - simple(Adjust::Pointer(PointerCoercion::UnsafeFnPointer)), - identity, - ) + debug!(?fn_ty_a, ?b, "coerce_from_fn_pointer"); + + self.coerce_from_safe_fn(fn_ty_a, b, None) } fn coerce_from_fn_item(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { @@ -916,30 +917,16 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { self.at(&self.cause, self.param_env).normalize(a_sig); obligations.extend(o1); - let a_fn_pointer = Ty::new_fn_ptr(self.tcx, a_sig); let InferOk { value, obligations: o2 } = self.coerce_from_safe_fn( - a_fn_pointer, a_sig, b, - |unsafe_ty| { - vec![ - Adjustment { - kind: Adjust::Pointer(PointerCoercion::ReifyFnPointer), - target: a_fn_pointer, - }, - Adjustment { - kind: Adjust::Pointer(PointerCoercion::UnsafeFnPointer), - target: unsafe_ty, - }, - ] - }, - simple(Adjust::Pointer(PointerCoercion::ReifyFnPointer)), + Some(Adjust::Pointer(PointerCoercion::ReifyFnPointer)), )?; obligations.extend(o2); Ok(InferOk { value, obligations }) } - _ => self.unify_and(a, b, identity), + _ => self.unify(a, b), } } @@ -983,10 +970,11 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { self.unify_and( pointer_ty, b, - simple(Adjust::Pointer(PointerCoercion::ClosureFnPointer(safety))), + [], + Adjust::Pointer(PointerCoercion::ClosureFnPointer(safety)), ) } - _ => self.unify_and(a, b, identity), + _ => self.unify(a, b), } } @@ -1001,7 +989,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let (is_ref, mt_a) = match *a.kind() { ty::Ref(_, ty, mutbl) => (true, ty::TypeAndMut { ty, mutbl }), ty::RawPtr(ty, mutbl) => (false, ty::TypeAndMut { ty, mutbl }), - _ => return self.unify_and(a, b, identity), + _ => return self.unify(a, b), }; coerce_mutbls(mt_a.mutbl, mutbl_b)?; @@ -1011,16 +999,16 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // representation, we still register an Adjust::DerefRef so that // regionck knows that the region for `a` must be valid here. if is_ref { - self.unify_and(a_raw, b, |target| { - vec![ - Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }, - Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), target }, - ] - }) + self.unify_and( + a_raw, + b, + [Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }], + Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), + ) } else if mt_a.mutbl != mutbl_b { - self.unify_and(a_raw, b, simple(Adjust::Pointer(PointerCoercion::MutToConstPointer))) + self.unify_and(a_raw, b, [], Adjust::Pointer(PointerCoercion::MutToConstPointer)) } else { - self.unify_and(a_raw, b, identity) + self.unify(a_raw, b) } } } @@ -1118,9 +1106,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let cause = self.cause(DUMMY_SP, ObligationCauseCode::ExprAssignable); // We don't ever need two-phase here since we throw out the result of the coercion. let coerce = Coerce::new(self, cause, AllowTwoPhase::No, true); - coerce - .autoderef(DUMMY_SP, expr_ty) - .find_map(|(ty, steps)| self.probe(|_| coerce.unify(ty, target)).ok().map(|_| steps)) + coerce.autoderef(DUMMY_SP, expr_ty).find_map(|(ty, steps)| { + self.probe(|_| coerce.unify_raw(ty, target)).ok().map(|_| steps) + }) } /// Given a type, this function will calculate and return the type given diff --git a/compiler/rustc_index_macros/src/newtype.rs b/compiler/rustc_index_macros/src/newtype.rs index f0b58eabbff9a..eedbe630cf2c4 100644 --- a/compiler/rustc_index_macros/src/newtype.rs +++ b/compiler/rustc_index_macros/src/newtype.rs @@ -257,6 +257,13 @@ impl Parse for Newtype { } } + impl std::ops::AddAssign for #name { + #[inline] + fn add_assign(&mut self, other: usize) { + *self = *self + other; + } + } + impl rustc_index::Idx for #name { #[inline] fn new(value: usize) -> Self { diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index 9b5c564d3324b..12384690ff398 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -606,6 +606,11 @@ fn register_builtins(store: &mut LintStore) { "converted into hard error, see issue #127323 \ for more information", ); + store.register_removed( + "undefined_naked_function_abi", + "converted into hard error, see PR #139001 \ + for more information", + ); } fn register_internals(store: &mut LintStore) { diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 8a761a0a0969b..b25d2a30681c0 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -110,7 +110,6 @@ declare_lint_pass! { UNCONDITIONAL_PANIC, UNCONDITIONAL_RECURSION, UNCOVERED_PARAM_IN_PROJECTION, - UNDEFINED_NAKED_FUNCTION_ABI, UNEXPECTED_CFGS, UNFULFILLED_LINT_EXPECTATIONS, UNINHABITED_STATIC, @@ -2830,39 +2829,6 @@ declare_lint! { "detects deprecation attributes with no effect", } -declare_lint! { - /// The `undefined_naked_function_abi` lint detects naked function definitions that - /// either do not specify an ABI or specify the Rust ABI. - /// - /// ### Example - /// - /// ```rust - /// #![feature(asm_experimental_arch, naked_functions)] - /// - /// use std::arch::naked_asm; - /// - /// #[naked] - /// pub fn default_abi() -> u32 { - /// unsafe { naked_asm!(""); } - /// } - /// - /// #[naked] - /// pub extern "Rust" fn rust_abi() -> u32 { - /// unsafe { naked_asm!(""); } - /// } - /// ``` - /// - /// {{produces}} - /// - /// ### Explanation - /// - /// The Rust ABI is currently undefined. Therefore, naked functions should - /// specify a non-Rust ABI. - pub UNDEFINED_NAKED_FUNCTION_ABI, - Warn, - "undefined naked function ABI" -} - declare_lint! { /// The `ineffective_unstable_trait_impl` lint detects `#[unstable]` attributes which are not used. /// diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index 98273a05446a7..d1bbb0598fec5 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -89,7 +89,6 @@ macro_rules! arena_types { [] name_set: rustc_data_structures::unord::UnordSet, [] autodiff_item: rustc_ast::expand::autodiff_attrs::AutoDiffItem, [] ordered_name_set: rustc_data_structures::fx::FxIndexSet, - [] pats: rustc_middle::ty::PatternKind<'tcx>, [] valtree: rustc_middle::ty::ValTreeKind<'tcx>, // Note that this deliberately duplicates items in the `rustc_hir::arena`, diff --git a/compiler/rustc_middle/src/ty/consts.rs b/compiler/rustc_middle/src/ty/consts.rs index ae1c6c670cbca..dc5fe2d8f8b06 100644 --- a/compiler/rustc_middle/src/ty/consts.rs +++ b/compiler/rustc_middle/src/ty/consts.rs @@ -3,6 +3,7 @@ use std::borrow::Cow; use rustc_data_structures::intern::Interned; use rustc_error_messages::MultiSpan; use rustc_macros::HashStable; +use rustc_type_ir::walk::TypeWalker; use rustc_type_ir::{self as ir, TypeFlags, WithCachedTypeInfo}; use crate::ty::{self, Ty, TyCtxt}; @@ -243,4 +244,18 @@ impl<'tcx> Const<'tcx> { pub fn is_ct_infer(self) -> bool { matches!(self.kind(), ty::ConstKind::Infer(_)) } + + /// Iterator that walks `self` and any types reachable from + /// `self`, in depth-first order. Note that just walks the types + /// that appear in `self`, it does not descend into the fields of + /// structs or variants. For example: + /// + /// ```text + /// isize => { isize } + /// Foo> => { Foo>, Bar, isize } + /// [isize] => { [isize], isize } + /// ``` + pub fn walk(self) -> TypeWalker> { + TypeWalker::new(self.into()) + } } diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index abf6cbbcd8774..6e5ac13bd2cf3 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -870,7 +870,7 @@ impl<'tcx> CtxtInterners<'tcx> { Ty(Interned::new_unchecked( self.type_ .intern(kind, |kind| { - let flags = super::flags::FlagComputation::for_kind(&kind); + let flags = ty::FlagComputation::>::for_kind(&kind); let stable_hash = self.stable_hash(&flags, sess, untracked, &kind); InternedInSet(self.arena.alloc(WithCachedTypeInfo { @@ -896,7 +896,7 @@ impl<'tcx> CtxtInterners<'tcx> { Const(Interned::new_unchecked( self.const_ .intern(kind, |kind: ty::ConstKind<'_>| { - let flags = super::flags::FlagComputation::for_const_kind(&kind); + let flags = ty::FlagComputation::>::for_const_kind(&kind); let stable_hash = self.stable_hash(&flags, sess, untracked, &kind); InternedInSet(self.arena.alloc(WithCachedTypeInfo { @@ -912,7 +912,7 @@ impl<'tcx> CtxtInterners<'tcx> { fn stable_hash<'a, T: HashStable>>( &self, - flags: &ty::flags::FlagComputation, + flags: &ty::FlagComputation>, sess: &'a Session, untracked: &'a Untracked, val: &T, @@ -940,7 +940,7 @@ impl<'tcx> CtxtInterners<'tcx> { Predicate(Interned::new_unchecked( self.predicate .intern(kind, |kind| { - let flags = super::flags::FlagComputation::for_predicate(kind); + let flags = ty::FlagComputation::>::for_predicate(kind); let stable_hash = self.stable_hash(&flags, sess, untracked, &kind); @@ -961,7 +961,7 @@ impl<'tcx> CtxtInterners<'tcx> { } else { self.clauses .intern_ref(clauses, || { - let flags = super::flags::FlagComputation::for_clauses(clauses); + let flags = ty::FlagComputation::>::for_clauses(clauses); InternedInSet(ListWithCachedTypeInfo::from_arena( &*self.arena, diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs deleted file mode 100644 index 2424923fb787b..0000000000000 --- a/compiler/rustc_middle/src/ty/flags.rs +++ /dev/null @@ -1,359 +0,0 @@ -use std::slice; - -use crate::ty::{self, GenericArg, GenericArgKind, InferConst, Ty, TypeFlags}; - -#[derive(Debug)] -pub struct FlagComputation { - pub flags: TypeFlags, - - /// see `Ty::outer_exclusive_binder` for details - pub outer_exclusive_binder: ty::DebruijnIndex, -} - -impl FlagComputation { - fn new() -> FlagComputation { - FlagComputation { flags: TypeFlags::empty(), outer_exclusive_binder: ty::INNERMOST } - } - - #[allow(rustc::usage_of_ty_tykind)] - pub fn for_kind(kind: &ty::TyKind<'_>) -> FlagComputation { - let mut result = FlagComputation::new(); - result.add_kind(kind); - result - } - - pub fn for_predicate(binder: ty::Binder<'_, ty::PredicateKind<'_>>) -> FlagComputation { - let mut result = FlagComputation::new(); - result.add_predicate(binder); - result - } - - pub fn for_const_kind(kind: &ty::ConstKind<'_>) -> FlagComputation { - let mut result = FlagComputation::new(); - result.add_const_kind(kind); - result - } - - pub fn for_clauses(clauses: &[ty::Clause<'_>]) -> FlagComputation { - let mut result = FlagComputation::new(); - for c in clauses { - result.add_flags(c.as_predicate().flags()); - result.add_exclusive_binder(c.as_predicate().outer_exclusive_binder()); - } - result - } - - fn add_flags(&mut self, flags: TypeFlags) { - self.flags = self.flags | flags; - } - - /// indicates that `self` refers to something at binding level `binder` - fn add_bound_var(&mut self, binder: ty::DebruijnIndex) { - let exclusive_binder = binder.shifted_in(1); - self.add_exclusive_binder(exclusive_binder); - } - - /// indicates that `self` refers to something *inside* binding - /// level `binder` -- not bound by `binder`, but bound by the next - /// binder internal to it - fn add_exclusive_binder(&mut self, exclusive_binder: ty::DebruijnIndex) { - self.outer_exclusive_binder = self.outer_exclusive_binder.max(exclusive_binder); - } - - /// Adds the flags/depth from a set of types that appear within the current type, but within a - /// region binder. - fn bound_computation(&mut self, value: ty::Binder<'_, T>, f: F) - where - F: FnOnce(&mut Self, T), - { - let mut computation = FlagComputation::new(); - - if !value.bound_vars().is_empty() { - computation.add_flags(TypeFlags::HAS_BINDER_VARS); - } - - f(&mut computation, value.skip_binder()); - - self.add_flags(computation.flags); - - // The types that contributed to `computation` occurred within - // a region binder, so subtract one from the region depth - // within when adding the depth to `self`. - let outer_exclusive_binder = computation.outer_exclusive_binder; - if outer_exclusive_binder > ty::INNERMOST { - self.add_exclusive_binder(outer_exclusive_binder.shifted_out(1)); - } // otherwise, this binder captures nothing - } - - #[allow(rustc::usage_of_ty_tykind)] - fn add_kind(&mut self, kind: &ty::TyKind<'_>) { - match kind { - &ty::Bool - | &ty::Char - | &ty::Int(_) - | &ty::Float(_) - | &ty::Uint(_) - | &ty::Never - | &ty::Str - | &ty::Foreign(..) => {} - - &ty::Error(_) => self.add_flags(TypeFlags::HAS_ERROR), - - &ty::Param(_) => { - self.add_flags(TypeFlags::HAS_TY_PARAM); - } - - &ty::Closure(_, args) - | &ty::Coroutine(_, args) - | &ty::CoroutineClosure(_, args) - | &ty::CoroutineWitness(_, args) => { - self.add_args(args); - } - - &ty::Bound(debruijn, _) => { - self.add_bound_var(debruijn); - self.add_flags(TypeFlags::HAS_TY_BOUND); - } - - &ty::Placeholder(..) => { - self.add_flags(TypeFlags::HAS_TY_PLACEHOLDER); - } - - &ty::Infer(infer) => match infer { - ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) => { - self.add_flags(TypeFlags::HAS_TY_FRESH) - } - - ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => { - self.add_flags(TypeFlags::HAS_TY_INFER) - } - }, - - &ty::Adt(_, args) => { - self.add_args(args); - } - - &ty::Alias(kind, data) => { - self.add_flags(match kind { - ty::Projection => TypeFlags::HAS_TY_PROJECTION, - ty::Weak => TypeFlags::HAS_TY_WEAK, - ty::Opaque => TypeFlags::HAS_TY_OPAQUE, - ty::Inherent => TypeFlags::HAS_TY_INHERENT, - }); - - self.add_alias_ty(data); - } - - &ty::Dynamic(obj, r, _) => { - for predicate in obj.iter() { - self.bound_computation(predicate, |computation, predicate| match predicate { - ty::ExistentialPredicate::Trait(tr) => computation.add_args(tr.args), - ty::ExistentialPredicate::Projection(p) => { - computation.add_existential_projection(&p); - } - ty::ExistentialPredicate::AutoTrait(_) => {} - }); - } - - self.add_region(r); - } - - &ty::Array(tt, len) => { - self.add_ty(tt); - self.add_const(len); - } - - &ty::Pat(ty, pat) => { - self.add_ty(ty); - match *pat { - ty::PatternKind::Range { start, end } => { - self.add_const(start); - self.add_const(end); - } - } - } - - &ty::Slice(tt) => self.add_ty(tt), - - &ty::RawPtr(ty, _) => { - self.add_ty(ty); - } - - &ty::Ref(r, ty, _) => { - self.add_region(r); - self.add_ty(ty); - } - - &ty::Tuple(types) => { - self.add_tys(types); - } - - &ty::FnDef(_, args) => { - self.add_args(args); - } - - &ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| { - computation.add_tys(sig_tys.inputs_and_output); - }), - - &ty::UnsafeBinder(bound_ty) => { - self.bound_computation(bound_ty.into(), |computation, ty| { - computation.add_ty(ty); - }) - } - } - } - - fn add_predicate(&mut self, binder: ty::Binder<'_, ty::PredicateKind<'_>>) { - self.bound_computation(binder, |computation, atom| computation.add_predicate_atom(atom)); - } - - fn add_predicate_atom(&mut self, atom: ty::PredicateKind<'_>) { - match atom { - ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) => { - self.add_args(trait_pred.trait_ref.args); - } - ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(ty::HostEffectPredicate { - trait_ref, - constness: _, - })) => { - self.add_args(trait_ref.args); - } - ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate( - a, - b, - ))) => { - self.add_region(a); - self.add_region(b); - } - ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate( - ty, - region, - ))) => { - self.add_ty(ty); - self.add_region(region); - } - ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => { - self.add_const(ct); - self.add_ty(ty); - } - ty::PredicateKind::Subtype(ty::SubtypePredicate { a_is_expected: _, a, b }) => { - self.add_ty(a); - self.add_ty(b); - } - ty::PredicateKind::Coerce(ty::CoercePredicate { a, b }) => { - self.add_ty(a); - self.add_ty(b); - } - ty::PredicateKind::Clause(ty::ClauseKind::Projection(ty::ProjectionPredicate { - projection_term, - term, - })) => { - self.add_alias_term(projection_term); - self.add_term(term); - } - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { - self.add_args(slice::from_ref(&arg)); - } - ty::PredicateKind::DynCompatible(_def_id) => {} - ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(uv)) => { - self.add_const(uv); - } - ty::PredicateKind::ConstEquate(expected, found) => { - self.add_const(expected); - self.add_const(found); - } - ty::PredicateKind::Ambiguous => {} - ty::PredicateKind::NormalizesTo(ty::NormalizesTo { alias, term }) => { - self.add_alias_term(alias); - self.add_term(term); - } - ty::PredicateKind::AliasRelate(t1, t2, _) => { - self.add_term(t1); - self.add_term(t2); - } - } - } - - fn add_ty(&mut self, ty: Ty<'_>) { - self.add_flags(ty.flags()); - self.add_exclusive_binder(ty.outer_exclusive_binder()); - } - - fn add_tys(&mut self, tys: &[Ty<'_>]) { - for &ty in tys { - self.add_ty(ty); - } - } - - fn add_region(&mut self, r: ty::Region<'_>) { - self.add_flags(r.type_flags()); - if let ty::ReBound(debruijn, _) = r.kind() { - self.add_bound_var(debruijn); - } - } - - fn add_const(&mut self, c: ty::Const<'_>) { - self.add_flags(c.flags()); - self.add_exclusive_binder(c.outer_exclusive_binder()); - } - - fn add_const_kind(&mut self, c: &ty::ConstKind<'_>) { - match *c { - ty::ConstKind::Unevaluated(uv) => { - self.add_args(uv.args); - self.add_flags(TypeFlags::HAS_CT_PROJECTION); - } - ty::ConstKind::Infer(infer) => match infer { - InferConst::Fresh(_) => self.add_flags(TypeFlags::HAS_CT_FRESH), - InferConst::Var(_) => self.add_flags(TypeFlags::HAS_CT_INFER), - }, - ty::ConstKind::Bound(debruijn, _) => { - self.add_bound_var(debruijn); - self.add_flags(TypeFlags::HAS_CT_BOUND); - } - ty::ConstKind::Param(_) => { - self.add_flags(TypeFlags::HAS_CT_PARAM); - } - ty::ConstKind::Placeholder(_) => { - self.add_flags(TypeFlags::HAS_CT_PLACEHOLDER); - } - ty::ConstKind::Value(cv) => self.add_ty(cv.ty), - ty::ConstKind::Expr(e) => self.add_args(e.args()), - ty::ConstKind::Error(_) => self.add_flags(TypeFlags::HAS_ERROR), - } - } - - fn add_existential_projection(&mut self, projection: &ty::ExistentialProjection<'_>) { - self.add_args(projection.args); - match projection.term.unpack() { - ty::TermKind::Ty(ty) => self.add_ty(ty), - ty::TermKind::Const(ct) => self.add_const(ct), - } - } - - fn add_alias_ty(&mut self, alias_ty: ty::AliasTy<'_>) { - self.add_args(alias_ty.args); - } - - fn add_alias_term(&mut self, alias_term: ty::AliasTerm<'_>) { - self.add_args(alias_term.args); - } - - fn add_args(&mut self, args: &[GenericArg<'_>]) { - for kind in args { - match kind.unpack() { - GenericArgKind::Type(ty) => self.add_ty(ty), - GenericArgKind::Lifetime(lt) => self.add_region(lt), - GenericArgKind::Const(ct) => self.add_const(ct), - } - } - } - - fn add_term(&mut self, term: ty::Term<'_>) { - match term.unpack() { - ty::TermKind::Ty(ty) => self.add_ty(ty), - ty::TermKind::Const(ct) => self.add_const(ct), - } - } -} diff --git a/compiler/rustc_middle/src/ty/generic_args.rs b/compiler/rustc_middle/src/ty/generic_args.rs index 9c1ff134f0fd4..1f04937232dd1 100644 --- a/compiler/rustc_middle/src/ty/generic_args.rs +++ b/compiler/rustc_middle/src/ty/generic_args.rs @@ -11,6 +11,7 @@ use rustc_hir::def_id::DefId; use rustc_macros::{HashStable, TyDecodable, TyEncodable, extension}; use rustc_serialize::{Decodable, Encodable}; use rustc_type_ir::WithCachedTypeInfo; +use rustc_type_ir::walk::TypeWalker; use smallvec::SmallVec; use crate::ty::codec::{TyDecoder, TyEncoder}; @@ -297,6 +298,20 @@ impl<'tcx> GenericArg<'tcx> { GenericArgKind::Const(ct) => ct.is_ct_infer(), } } + + /// Iterator that walks `self` and any types reachable from + /// `self`, in depth-first order. Note that just walks the types + /// that appear in `self`, it does not descend into the fields of + /// structs or variants. For example: + /// + /// ```text + /// isize => { isize } + /// Foo> => { Foo>, Bar, isize } + /// [isize] => { [isize], isize } + /// ``` + pub fn walk(self) -> TypeWalker> { + TypeWalker::new(self) + } } impl<'a, 'tcx> Lift> for GenericArg<'a> { diff --git a/compiler/rustc_middle/src/ty/list.rs b/compiler/rustc_middle/src/ty/list.rs index 0fd370a56195a..0cf5820959ee5 100644 --- a/compiler/rustc_middle/src/ty/list.rs +++ b/compiler/rustc_middle/src/ty/list.rs @@ -7,9 +7,9 @@ use std::{fmt, iter, mem, ptr, slice}; use rustc_data_structures::aligned::{Aligned, align_of}; use rustc_data_structures::sync::DynSync; use rustc_serialize::{Encodable, Encoder}; +use rustc_type_ir::FlagComputation; -use super::flags::FlagComputation; -use super::{DebruijnIndex, TypeFlags}; +use super::{DebruijnIndex, TyCtxt, TypeFlags}; use crate::arena::Arena; /// `List` is a bit like `&[T]`, but with some critical differences. @@ -299,8 +299,8 @@ impl TypeInfo { } } -impl From for TypeInfo { - fn from(computation: FlagComputation) -> TypeInfo { +impl<'tcx> From>> for TypeInfo { + fn from(computation: FlagComputation>) -> TypeInfo { TypeInfo { flags: computation.flags, outer_exclusive_binder: computation.outer_exclusive_binder, diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index a2b3acac3f26b..61e869f5de419 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -117,7 +117,6 @@ pub mod cast; pub mod codec; pub mod error; pub mod fast_reject; -pub mod flags; pub mod inhabitedness; pub mod layout; pub mod normalize_erasing_regions; @@ -128,7 +127,6 @@ pub mod significant_drop_order; pub mod trait_def; pub mod util; pub mod vtable; -pub mod walk; mod adt; mod assoc; diff --git a/compiler/rustc_middle/src/ty/pattern.rs b/compiler/rustc_middle/src/ty/pattern.rs index 4cad1ab209916..758adc42e3ebb 100644 --- a/compiler/rustc_middle/src/ty/pattern.rs +++ b/compiler/rustc_middle/src/ty/pattern.rs @@ -1,14 +1,40 @@ use std::fmt; use rustc_data_structures::intern::Interned; -use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; +use rustc_macros::HashStable; +use rustc_type_ir::ir_print::IrPrint; +use rustc_type_ir::{ + FlagComputation, Flags, {self as ir}, +}; +use super::TyCtxt; use crate::ty; +pub type PatternKind<'tcx> = ir::PatternKind>; + #[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)] #[rustc_pass_by_value] pub struct Pattern<'tcx>(pub Interned<'tcx, PatternKind<'tcx>>); +impl<'tcx> Flags for Pattern<'tcx> { + fn flags(&self) -> rustc_type_ir::TypeFlags { + match &**self { + ty::PatternKind::Range { start, end } => { + FlagComputation::for_const_kind(&start.kind()).flags + | FlagComputation::for_const_kind(&end.kind()).flags + } + } + } + + fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { + match &**self { + ty::PatternKind::Range { start, end } => { + start.outer_exclusive_binder().max(end.outer_exclusive_binder()) + } + } + } +} + impl<'tcx> std::ops::Deref for Pattern<'tcx> { type Target = PatternKind<'tcx>; @@ -23,9 +49,9 @@ impl<'tcx> fmt::Debug for Pattern<'tcx> { } } -impl<'tcx> fmt::Debug for PatternKind<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { +impl<'tcx> IrPrint> for TyCtxt<'tcx> { + fn print(t: &PatternKind<'tcx>, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *t { PatternKind::Range { start, end } => { write!(f, "{start}")?; @@ -53,10 +79,15 @@ impl<'tcx> fmt::Debug for PatternKind<'tcx> { } } } + + fn print_debug(t: &PatternKind<'tcx>, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + Self::print(t, fmt) + } } -#[derive(Clone, PartialEq, Eq, Hash)] -#[derive(HashStable, TyEncodable, TyDecodable, TypeVisitable, TypeFoldable)] -pub enum PatternKind<'tcx> { - Range { start: ty::Const<'tcx>, end: ty::Const<'tcx> }, +impl<'tcx> rustc_type_ir::inherent::IntoKind for Pattern<'tcx> { + type Kind = PatternKind<'tcx>; + fn kind(self) -> Self::Kind { + *self + } } diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 27ee363f1c142..d9a65ae57a090 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -16,6 +16,7 @@ use rustc_hir::def_id::DefId; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, extension}; use rustc_span::{DUMMY_SP, Span, Symbol, sym}; use rustc_type_ir::TyKind::*; +use rustc_type_ir::walk::TypeWalker; use rustc_type_ir::{self as ir, BoundVar, CollectAndApply, DynKind, TypeVisitableExt, elaborate}; use tracing::instrument; use ty::util::{AsyncDropGlueMorphology, IntTypeExt}; @@ -2029,6 +2030,20 @@ impl<'tcx> Ty<'tcx> { pub fn is_known_rigid(self) -> bool { self.kind().is_known_rigid() } + + /// Iterator that walks `self` and any types reachable from + /// `self`, in depth-first order. Note that just walks the types + /// that appear in `self`, it does not descend into the fields of + /// structs or variants. For example: + /// + /// ```text + /// isize => { isize } + /// Foo> => { Foo>, Bar, isize } + /// [isize] => { [isize], isize } + /// ``` + pub fn walk(self) -> TypeWalker> { + TypeWalker::new(self.into()) + } } impl<'tcx> rustc_type_ir::inherent::Tys> for &'tcx ty::List> { diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs index 04d96f117072f..80c729d66b1ec 100644 --- a/compiler/rustc_mir_transform/src/coroutine.rs +++ b/compiler/rustc_mir_transform/src/coroutine.rs @@ -547,7 +547,7 @@ fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, None); - for bb in START_BLOCK..body.basic_blocks.next_index() { + for bb in body.basic_blocks.indices() { let bb_data = &body[bb]; if bb_data.is_cleanup { continue; @@ -556,11 +556,11 @@ fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { match &bb_data.terminator().kind { TerminatorKind::Call { func, .. } => { let func_ty = func.ty(body, tcx); - if let ty::FnDef(def_id, _) = *func_ty.kind() { - if def_id == get_context_def_id { - let local = eliminate_get_context_call(&mut body[bb]); - replace_resume_ty_local(tcx, body, local, context_mut_ref); - } + if let ty::FnDef(def_id, _) = *func_ty.kind() + && def_id == get_context_def_id + { + let local = eliminate_get_context_call(&mut body[bb]); + replace_resume_ty_local(tcx, body, local, context_mut_ref); } } TerminatorKind::Yield { resume_arg, .. } => { @@ -1057,7 +1057,7 @@ fn insert_switch<'tcx>( let blocks = body.basic_blocks_mut().iter_mut(); for target in blocks.flat_map(|b| b.terminator_mut().successors_mut()) { - *target = BasicBlock::new(target.index() + 1); + *target += 1; } } @@ -1209,14 +1209,8 @@ fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, typing_env: ty::Typing } // If there's a return terminator the function may return. - for block in body.basic_blocks.iter() { - if let TerminatorKind::Return = block.terminator().kind { - return true; - } - } - + body.basic_blocks.iter().any(|block| matches!(block.terminator().kind, TerminatorKind::Return)) // Otherwise the function can't return. - false } fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool { @@ -1293,12 +1287,12 @@ fn create_coroutine_resume_function<'tcx>( kind: TerminatorKind::Goto { target: poison_block }, }; } - } else if !block.is_cleanup { + } else if !block.is_cleanup // Any terminators that *can* unwind but don't have an unwind target set are also // pointed at our poisoning block (unless they're part of the cleanup path). - if let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() { - *unwind = UnwindAction::Cleanup(poison_block); - } + && let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() + { + *unwind = UnwindAction::Cleanup(poison_block); } } } @@ -1340,12 +1334,14 @@ fn create_coroutine_resume_function<'tcx>( make_coroutine_state_argument_indirect(tcx, body); match transform.coroutine_kind { + CoroutineKind::Coroutine(_) + | CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) => + { + make_coroutine_state_argument_pinned(tcx, body); + } // Iterator::next doesn't accept a pinned argument, // unlike for all other coroutine kinds. CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {} - _ => { - make_coroutine_state_argument_pinned(tcx, body); - } } // Make sure we remove dead blocks to remove @@ -1408,8 +1404,7 @@ fn create_cases<'tcx>( let mut statements = Vec::new(); // Create StorageLive instructions for locals with live storage - for i in 0..(body.local_decls.len()) { - let l = Local::new(i); + for l in body.local_decls.indices() { let needs_storage_live = point.storage_liveness.contains(l) && !transform.remap.contains(l) && !transform.always_live_locals.contains(l); @@ -1535,15 +1530,10 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { let coroutine_kind = body.coroutine_kind().unwrap(); // Get the discriminant type and args which typeck computed - let (discr_ty, movable) = match *coroutine_ty.kind() { - ty::Coroutine(_, args) => { - let args = args.as_coroutine(); - (args.discr_ty(tcx), coroutine_kind.movability() == hir::Movability::Movable) - } - _ => { - tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}")); - } + let ty::Coroutine(_, args) = coroutine_ty.kind() else { + tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}")); }; + let discr_ty = args.as_coroutine().discr_ty(tcx); let new_ret_ty = match coroutine_kind { CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => { @@ -1610,6 +1600,7 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { let always_live_locals = always_storage_live_locals(body); + let movable = coroutine_kind.movability() == hir::Movability::Movable; let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable); diff --git a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs index 57f7893be1b8c..d49f5d9f9c385 100644 --- a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs +++ b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs @@ -103,9 +103,8 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch { let mut should_cleanup = false; // Also consider newly generated bbs in the same pass - for i in 0..body.basic_blocks.len() { + for parent in body.basic_blocks.indices() { let bbs = &*body.basic_blocks; - let parent = BasicBlock::from_usize(i); let Some(opt_data) = evaluate_candidate(tcx, body, parent) else { continue }; trace!("SUCCESS: found optimization possibility to apply: {opt_data:?}"); diff --git a/compiler/rustc_mir_transform/src/elaborate_drop.rs b/compiler/rustc_mir_transform/src/elaborate_drop.rs index 0d8cf524661c8..fa476f961235d 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drop.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drop.rs @@ -266,19 +266,16 @@ where let tcx = self.tcx(); assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis); - // The type error for normalization may have been in dropck: see - // `compute_drop_data` in rustc_borrowck, in which case we wouldn't have - // deleted the MIR body and could have an error here as well. let field_ty = match tcx .try_normalize_erasing_regions(self.elaborator.typing_env(), f.ty(tcx, args)) { Ok(t) => t, Err(_) => Ty::new_error( self.tcx(), - self.elaborator - .body() - .tainted_by_errors - .expect("Error in drop elaboration not found by dropck."), + self.tcx().dcx().span_delayed_bug( + self.elaborator.body().span, + "Error normalizing in drop elaboration.", + ), ), }; diff --git a/compiler/rustc_mir_transform/src/instsimplify.rs b/compiler/rustc_mir_transform/src/instsimplify.rs index a8d6aaa50a294..5f0c55ddc092d 100644 --- a/compiler/rustc_mir_transform/src/instsimplify.rs +++ b/compiler/rustc_mir_transform/src/instsimplify.rs @@ -10,7 +10,6 @@ use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, layout}; use rustc_span::{DUMMY_SP, Symbol, sym}; use crate::simplify::simplify_duplicate_switch_targets; -use crate::take_array; pub(super) enum InstSimplify { BeforeInline, @@ -214,7 +213,9 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { terminator: &mut Terminator<'tcx>, statements: &mut Vec>, ) { - let TerminatorKind::Call { func, args, destination, target, .. } = &mut terminator.kind + let TerminatorKind::Call { + func, args, destination, target: Some(destination_block), .. + } = &terminator.kind else { return; }; @@ -222,15 +223,8 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { // It's definitely not a clone if there are multiple arguments let [arg] = &args[..] else { return }; - let Some(destination_block) = *target else { return }; - // Only bother looking more if it's easy to know what we're calling - let Some((fn_def_id, fn_args)) = func.const_fn_def() else { return }; - - // Clone needs one arg, so we can cheaply rule out other stuff - if fn_args.len() != 1 { - return; - } + let Some((fn_def_id, ..)) = func.const_fn_def() else { return }; // These types are easily available from locals, so check that before // doing DefId lookups to figure out what we're actually calling. @@ -238,15 +232,12 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { let ty::Ref(_region, inner_ty, Mutability::Not) = *arg_ty.kind() else { return }; - if !inner_ty.is_trivially_pure_clone_copy() { - return; - } - - if !self.tcx.is_lang_item(fn_def_id, LangItem::CloneFn) { + if !self.tcx.is_lang_item(fn_def_id, LangItem::CloneFn) + || !inner_ty.is_trivially_pure_clone_copy() + { return; } - let Ok([arg]) = take_array(args) else { return }; let Some(arg_place) = arg.node.place() else { return }; statements.push(Statement { @@ -258,7 +249,7 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { )), ))), }); - terminator.kind = TerminatorKind::Goto { target: destination_block }; + terminator.kind = TerminatorKind::Goto { target: *destination_block }; } fn simplify_nounwind_call(&self, terminator: &mut Terminator<'tcx>) { diff --git a/compiler/rustc_mir_transform/src/match_branches.rs b/compiler/rustc_mir_transform/src/match_branches.rs index 0d9d0368d3729..5059837328e24 100644 --- a/compiler/rustc_mir_transform/src/match_branches.rs +++ b/compiler/rustc_mir_transform/src/match_branches.rs @@ -20,13 +20,11 @@ impl<'tcx> crate::MirPass<'tcx> for MatchBranchSimplification { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let typing_env = body.typing_env(tcx); let mut should_cleanup = false; - for i in 0..body.basic_blocks.len() { - let bbs = &*body.basic_blocks; - let bb_idx = BasicBlock::from_usize(i); - match bbs[bb_idx].terminator().kind { + for bb_idx in body.basic_blocks.indices() { + match &body.basic_blocks[bb_idx].terminator().kind { TerminatorKind::SwitchInt { - discr: ref _discr @ (Operand::Copy(_) | Operand::Move(_)), - ref targets, + discr: Operand::Copy(_) | Operand::Move(_), + targets, .. // We require that the possible target blocks don't contain this block. } if !targets.all_targets().contains(&bb_idx) => {} @@ -66,9 +64,10 @@ trait SimplifyMatch<'tcx> { typing_env: ty::TypingEnv<'tcx>, ) -> Option<()> { let bbs = &body.basic_blocks; - let (discr, targets) = match bbs[switch_bb_idx].terminator().kind { - TerminatorKind::SwitchInt { ref discr, ref targets, .. } => (discr, targets), - _ => unreachable!(), + let TerminatorKind::SwitchInt { discr, targets, .. } = + &bbs[switch_bb_idx].terminator().kind + else { + unreachable!(); }; let discr_ty = discr.ty(body.local_decls(), tcx); diff --git a/compiler/rustc_mir_transform/src/multiple_return_terminators.rs b/compiler/rustc_mir_transform/src/multiple_return_terminators.rs index c63bfdcee8559..f59b849e85c62 100644 --- a/compiler/rustc_mir_transform/src/multiple_return_terminators.rs +++ b/compiler/rustc_mir_transform/src/multiple_return_terminators.rs @@ -18,19 +18,17 @@ impl<'tcx> crate::MirPass<'tcx> for MultipleReturnTerminators { // find basic blocks with no statement and a return terminator let mut bbs_simple_returns = DenseBitSet::new_empty(body.basic_blocks.len()); let bbs = body.basic_blocks_mut(); - for idx in bbs.indices() { - if bbs[idx].statements.is_empty() - && bbs[idx].terminator().kind == TerminatorKind::Return - { + for (idx, bb) in bbs.iter_enumerated() { + if bb.statements.is_empty() && bb.terminator().kind == TerminatorKind::Return { bbs_simple_returns.insert(idx); } } for bb in bbs { - if let TerminatorKind::Goto { target } = bb.terminator().kind { - if bbs_simple_returns.contains(target) { - bb.terminator_mut().kind = TerminatorKind::Return; - } + if let TerminatorKind::Goto { target } = bb.terminator().kind + && bbs_simple_returns.contains(target) + { + bb.terminator_mut().kind = TerminatorKind::Return; } } diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs index e7930f0a1e3f6..66fe3ef4141f5 100644 --- a/compiler/rustc_mir_transform/src/validate.rs +++ b/compiler/rustc_mir_transform/src/validate.rs @@ -221,12 +221,11 @@ impl<'a, 'tcx> CfgChecker<'a, 'tcx> { // Check for cycles let mut stack = FxHashSet::default(); - for i in 0..parent.len() { - let mut bb = BasicBlock::from_usize(i); + for (mut bb, parent) in parent.iter_enumerated_mut() { stack.clear(); stack.insert(bb); loop { - let Some(parent) = parent[bb].take() else { break }; + let Some(parent) = parent.take() else { break }; let no_cycle = stack.insert(parent); if !no_cycle { self.fail( diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl index bea86801ed753..6ee5e356435c8 100644 --- a/compiler/rustc_passes/messages.ftl +++ b/compiler/rustc_passes/messages.ftl @@ -742,9 +742,6 @@ passes_trait_impl_const_stable = passes_transparent_incompatible = transparent {$target} cannot have other repr hints -passes_undefined_naked_function_abi = - Rust ABI is unsupported in naked functions - passes_unknown_external_lang_item = unknown external lang item: `{$lang_item}` diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index c514250781222..42279258e8777 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -624,6 +624,21 @@ impl<'tcx> CheckAttrVisitor<'tcx> { match target { Target::Fn | Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) => { + let fn_sig = self.tcx.hir_node(hir_id).fn_sig().unwrap(); + let abi = fn_sig.header.abi; + if abi.is_rustic_abi() && !self.tcx.features().naked_functions_rustic_abi() { + feature_err( + &self.tcx.sess, + sym::naked_functions_rustic_abi, + fn_sig.span, + format!( + "`#[naked]` is currently unstable on `extern \"{}\"` functions", + abi.as_str() + ), + ) + .emit(); + } + for other_attr in attrs { // this covers "sugared doc comments" of the form `/// ...` // it does not cover `#[doc = "..."]`, which is handled below diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs index 4e3e0324205a4..85eddafefcd56 100644 --- a/compiler/rustc_passes/src/errors.rs +++ b/compiler/rustc_passes/src/errors.rs @@ -1197,10 +1197,6 @@ pub(crate) struct UnlabeledCfInWhileCondition<'a> { pub cf_type: &'a str, } -#[derive(LintDiagnostic)] -#[diag(passes_undefined_naked_function_abi)] -pub(crate) struct UndefinedNakedFunctionAbi; - #[derive(Diagnostic)] #[diag(passes_no_patterns)] pub(crate) struct NoPatterns { diff --git a/compiler/rustc_passes/src/naked_functions.rs b/compiler/rustc_passes/src/naked_functions.rs index d35aedf9a5647..3c9f8b72c3635 100644 --- a/compiler/rustc_passes/src/naked_functions.rs +++ b/compiler/rustc_passes/src/naked_functions.rs @@ -1,6 +1,5 @@ //! Checks validity of naked functions. -use rustc_abi::ExternAbi; use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::{LocalDefId, LocalModDefId}; @@ -10,12 +9,11 @@ use rustc_middle::hir::nested_filter::OnlyBodies; use rustc_middle::query::Providers; use rustc_middle::span_bug; use rustc_middle::ty::TyCtxt; -use rustc_session::lint::builtin::UNDEFINED_NAKED_FUNCTION_ABI; use rustc_span::{Span, sym}; use crate::errors::{ NakedAsmOutsideNakedFn, NakedFunctionsAsmBlock, NakedFunctionsMustNakedAsm, NoPatterns, - ParamsNotAllowed, UndefinedNakedFunctionAbi, + ParamsNotAllowed, }; pub(crate) fn provide(providers: &mut Providers) { @@ -29,26 +27,21 @@ fn check_mod_naked_functions(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) { continue; } - let (fn_header, body_id) = match tcx.hir_node_by_def_id(def_id) { + let body = match tcx.hir_node_by_def_id(def_id) { hir::Node::Item(hir::Item { - kind: hir::ItemKind::Fn { sig, body: body_id, .. }, - .. + kind: hir::ItemKind::Fn { body: body_id, .. }, .. }) | hir::Node::TraitItem(hir::TraitItem { - kind: hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)), + kind: hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body_id)), .. }) | hir::Node::ImplItem(hir::ImplItem { - kind: hir::ImplItemKind::Fn(sig, body_id), - .. - }) => (sig.header, *body_id), + kind: hir::ImplItemKind::Fn(_, body_id), .. + }) => tcx.hir_body(*body_id), _ => continue, }; - let body = tcx.hir_body(body_id); - if tcx.has_attr(def_id, sym::naked) { - check_abi(tcx, def_id, fn_header.abi); check_no_patterns(tcx, body.params); check_no_parameters_use(tcx, body); check_asm(tcx, def_id, body); @@ -60,20 +53,6 @@ fn check_mod_naked_functions(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) { } } -/// Checks that function uses non-Rust ABI. -fn check_abi(tcx: TyCtxt<'_>, def_id: LocalDefId, abi: ExternAbi) { - if abi == ExternAbi::Rust { - let hir_id = tcx.local_def_id_to_hir_id(def_id); - let span = tcx.def_span(def_id); - tcx.emit_node_span_lint( - UNDEFINED_NAKED_FUNCTION_ABI, - hir_id, - span, - UndefinedNakedFunctionAbi, - ); - } -} - /// Checks that parameters don't use patterns. Mirrors the checks for function declarations. fn check_no_patterns(tcx: TyCtxt<'_>, params: &[hir::Param<'_>]) { for param in params { diff --git a/compiler/rustc_span/src/source_map.rs b/compiler/rustc_span/src/source_map.rs index 6fdf8e46fec65..0273bb040f433 100644 --- a/compiler/rustc_span/src/source_map.rs +++ b/compiler/rustc_span/src/source_map.rs @@ -633,6 +633,24 @@ impl SourceMap { sp } + /// Extends the given `Span` to just before the previous occurrence of `c`. Return the same span + /// if an error occurred while retrieving the code snippet. + pub fn span_extend_to_prev_char_before( + &self, + sp: Span, + c: char, + accept_newlines: bool, + ) -> Span { + if let Ok(prev_source) = self.span_to_prev_source(sp) { + let prev_source = prev_source.rsplit(c).next().unwrap_or(""); + if accept_newlines || !prev_source.contains('\n') { + return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32 - 1_u32)); + } + } + + sp + } + /// Extends the given `Span` to just after the previous occurrence of `pat` when surrounded by /// whitespace. Returns None if the pattern could not be found or if an error occurred while /// retrieving the code snippet. diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 986370f501918..e1b4e7aaf7610 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -1400,6 +1400,7 @@ symbols! { naked, naked_asm, naked_functions, + naked_functions_rustic_abi, naked_functions_target_feature, name, names, diff --git a/compiler/rustc_trait_selection/messages.ftl b/compiler/rustc_trait_selection/messages.ftl index 05bbb42fb7c63..74e38f525c8e6 100644 --- a/compiler/rustc_trait_selection/messages.ftl +++ b/compiler/rustc_trait_selection/messages.ftl @@ -268,8 +268,8 @@ trait_selection_oc_type_compat = type not compatible with trait trait_selection_opaque_captures_lifetime = hidden type for `{$opaque_ty}` captures lifetime that does not appear in bounds .label = opaque type defined here trait_selection_opaque_type_non_generic_param = - expected generic {$kind} parameter, found `{$ty}` - .label = {STREQ($ty, "'static") -> + expected generic {$kind} parameter, found `{$arg}` + .label = {STREQ($arg, "'static") -> [true] cannot use static lifetime; use a bound lifetime instead or remove the lifetime parameter from the opaque type *[other] this generic parameter must be used with a generic {$kind} parameter } diff --git a/compiler/rustc_trait_selection/src/errors.rs b/compiler/rustc_trait_selection/src/errors.rs index bb4aba9d29e42..4e5581fb1da0d 100644 --- a/compiler/rustc_trait_selection/src/errors.rs +++ b/compiler/rustc_trait_selection/src/errors.rs @@ -1926,7 +1926,7 @@ impl Subdiagnostic for AddPreciseCapturingForOvercapture { #[derive(Diagnostic)] #[diag(trait_selection_opaque_type_non_generic_param, code = E0792)] pub(crate) struct NonGenericOpaqueTypeParam<'a, 'tcx> { - pub ty: GenericArg<'tcx>, + pub arg: GenericArg<'tcx>, pub kind: &'a str, #[primary_span] pub span: Span, diff --git a/compiler/rustc_trait_selection/src/opaque_types.rs b/compiler/rustc_trait_selection/src/opaque_types.rs index 309bf4dda3d89..cce67b066dde1 100644 --- a/compiler/rustc_trait_selection/src/opaque_types.rs +++ b/compiler/rustc_trait_selection/src/opaque_types.rs @@ -70,7 +70,7 @@ pub fn check_opaque_type_parameter_valid<'tcx>( opaque_env.param_is_error(i)?; return Err(infcx.dcx().emit_err(NonGenericOpaqueTypeParam { - ty: arg, + arg, kind, span, param_span: tcx.def_span(opaque_param.def_id), diff --git a/compiler/rustc_type_ir/src/flags.rs b/compiler/rustc_type_ir/src/flags.rs index 6a2498242feeb..e9d3a149a7303 100644 --- a/compiler/rustc_type_ir/src/flags.rs +++ b/compiler/rustc_type_ir/src/flags.rs @@ -1,3 +1,9 @@ +use std::slice; + +use crate::inherent::*; +use crate::visit::Flags; +use crate::{self as ty, Interner}; + bitflags::bitflags! { /// Flags that we track on types. These flags are propagated upwards /// through the type during type construction, so that we can quickly check @@ -128,3 +134,362 @@ bitflags::bitflags! { const HAS_BINDER_VARS = 1 << 23; } } + +#[derive(Debug)] +pub struct FlagComputation { + pub flags: TypeFlags, + + /// see `Ty::outer_exclusive_binder` for details + pub outer_exclusive_binder: ty::DebruijnIndex, + + interner: std::marker::PhantomData, +} + +impl FlagComputation { + fn new() -> FlagComputation { + FlagComputation { + flags: TypeFlags::empty(), + outer_exclusive_binder: ty::INNERMOST, + interner: std::marker::PhantomData, + } + } + + #[allow(rustc::usage_of_ty_tykind)] + pub fn for_kind(kind: &ty::TyKind) -> FlagComputation { + let mut result = FlagComputation::new(); + result.add_kind(kind); + result + } + + pub fn for_predicate(binder: ty::Binder>) -> FlagComputation { + let mut result = FlagComputation::new(); + result.add_predicate(binder); + result + } + + pub fn for_const_kind(kind: &ty::ConstKind) -> FlagComputation { + let mut result = FlagComputation::new(); + result.add_const_kind(kind); + result + } + + pub fn for_clauses(clauses: &[I::Clause]) -> FlagComputation { + let mut result = FlagComputation::new(); + for c in clauses { + result.add_flags(c.as_predicate().flags()); + result.add_exclusive_binder(c.as_predicate().outer_exclusive_binder()); + } + result + } + + fn add_flags(&mut self, flags: TypeFlags) { + self.flags = self.flags | flags; + } + + /// indicates that `self` refers to something at binding level `binder` + fn add_bound_var(&mut self, binder: ty::DebruijnIndex) { + let exclusive_binder = binder.shifted_in(1); + self.add_exclusive_binder(exclusive_binder); + } + + /// indicates that `self` refers to something *inside* binding + /// level `binder` -- not bound by `binder`, but bound by the next + /// binder internal to it + fn add_exclusive_binder(&mut self, exclusive_binder: ty::DebruijnIndex) { + self.outer_exclusive_binder = self.outer_exclusive_binder.max(exclusive_binder); + } + + /// Adds the flags/depth from a set of types that appear within the current type, but within a + /// region binder. + fn bound_computation(&mut self, value: ty::Binder, f: F) + where + F: FnOnce(&mut Self, T), + { + let mut computation = FlagComputation::new(); + + if !value.bound_vars().is_empty() { + computation.add_flags(TypeFlags::HAS_BINDER_VARS); + } + + f(&mut computation, value.skip_binder()); + + self.add_flags(computation.flags); + + // The types that contributed to `computation` occurred within + // a region binder, so subtract one from the region depth + // within when adding the depth to `self`. + let outer_exclusive_binder = computation.outer_exclusive_binder; + if outer_exclusive_binder > ty::INNERMOST { + self.add_exclusive_binder(outer_exclusive_binder.shifted_out(1)); + } // otherwise, this binder captures nothing + } + + #[allow(rustc::usage_of_ty_tykind)] + fn add_kind(&mut self, kind: &ty::TyKind) { + match *kind { + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Float(_) + | ty::Uint(_) + | ty::Never + | ty::Str + | ty::Foreign(..) => {} + + ty::Error(_) => self.add_flags(TypeFlags::HAS_ERROR), + + ty::Param(_) => { + self.add_flags(TypeFlags::HAS_TY_PARAM); + } + + ty::Closure(_, args) + | ty::Coroutine(_, args) + | ty::CoroutineClosure(_, args) + | ty::CoroutineWitness(_, args) => { + self.add_args(args.as_slice()); + } + + ty::Bound(debruijn, _) => { + self.add_bound_var(debruijn); + self.add_flags(TypeFlags::HAS_TY_BOUND); + } + + ty::Placeholder(..) => { + self.add_flags(TypeFlags::HAS_TY_PLACEHOLDER); + } + + ty::Infer(infer) => match infer { + ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) => { + self.add_flags(TypeFlags::HAS_TY_FRESH) + } + + ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => { + self.add_flags(TypeFlags::HAS_TY_INFER) + } + }, + + ty::Adt(_, args) => { + self.add_args(args.as_slice()); + } + + ty::Alias(kind, data) => { + self.add_flags(match kind { + ty::Projection => TypeFlags::HAS_TY_PROJECTION, + ty::Weak => TypeFlags::HAS_TY_WEAK, + ty::Opaque => TypeFlags::HAS_TY_OPAQUE, + ty::Inherent => TypeFlags::HAS_TY_INHERENT, + }); + + self.add_alias_ty(data); + } + + ty::Dynamic(obj, r, _) => { + for predicate in obj.iter() { + self.bound_computation(predicate, |computation, predicate| match predicate { + ty::ExistentialPredicate::Trait(tr) => { + computation.add_args(tr.args.as_slice()) + } + ty::ExistentialPredicate::Projection(p) => { + computation.add_existential_projection(&p); + } + ty::ExistentialPredicate::AutoTrait(_) => {} + }); + } + + self.add_region(r); + } + + ty::Array(tt, len) => { + self.add_ty(tt); + self.add_const(len); + } + + ty::Pat(ty, pat) => { + self.add_ty(ty); + self.add_flags(pat.flags()); + } + + ty::Slice(tt) => self.add_ty(tt), + + ty::RawPtr(ty, _) => { + self.add_ty(ty); + } + + ty::Ref(r, ty, _) => { + self.add_region(r); + self.add_ty(ty); + } + + ty::Tuple(types) => { + self.add_tys(types); + } + + ty::FnDef(_, args) => { + self.add_args(args.as_slice()); + } + + ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| { + computation.add_tys(sig_tys.inputs_and_output); + }), + + ty::UnsafeBinder(bound_ty) => { + self.bound_computation(bound_ty.into(), |computation, ty| { + computation.add_ty(ty); + }) + } + } + } + + fn add_predicate(&mut self, binder: ty::Binder>) { + self.bound_computation(binder, |computation, atom| computation.add_predicate_atom(atom)); + } + + fn add_predicate_atom(&mut self, atom: ty::PredicateKind) { + match atom { + ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) => { + self.add_args(trait_pred.trait_ref.args.as_slice()); + } + ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(ty::HostEffectPredicate { + trait_ref, + constness: _, + })) => { + self.add_args(trait_ref.args.as_slice()); + } + ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate( + a, + b, + ))) => { + self.add_region(a); + self.add_region(b); + } + ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate( + ty, + region, + ))) => { + self.add_ty(ty); + self.add_region(region); + } + ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => { + self.add_const(ct); + self.add_ty(ty); + } + ty::PredicateKind::Subtype(ty::SubtypePredicate { a_is_expected: _, a, b }) => { + self.add_ty(a); + self.add_ty(b); + } + ty::PredicateKind::Coerce(ty::CoercePredicate { a, b }) => { + self.add_ty(a); + self.add_ty(b); + } + ty::PredicateKind::Clause(ty::ClauseKind::Projection(ty::ProjectionPredicate { + projection_term, + term, + })) => { + self.add_alias_term(projection_term); + self.add_term(term); + } + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { + self.add_args(slice::from_ref(&arg)); + } + ty::PredicateKind::DynCompatible(_def_id) => {} + ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(uv)) => { + self.add_const(uv); + } + ty::PredicateKind::ConstEquate(expected, found) => { + self.add_const(expected); + self.add_const(found); + } + ty::PredicateKind::Ambiguous => {} + ty::PredicateKind::NormalizesTo(ty::NormalizesTo { alias, term }) => { + self.add_alias_term(alias); + self.add_term(term); + } + ty::PredicateKind::AliasRelate(t1, t2, _) => { + self.add_term(t1); + self.add_term(t2); + } + } + } + + fn add_ty(&mut self, ty: I::Ty) { + self.add_flags(ty.flags()); + self.add_exclusive_binder(ty.outer_exclusive_binder()); + } + + fn add_tys(&mut self, tys: I::Tys) { + for ty in tys.iter() { + self.add_ty(ty); + } + } + + fn add_region(&mut self, r: I::Region) { + self.add_flags(r.flags()); + if let ty::ReBound(debruijn, _) = r.kind() { + self.add_bound_var(debruijn); + } + } + + fn add_const(&mut self, c: I::Const) { + self.add_flags(c.flags()); + self.add_exclusive_binder(c.outer_exclusive_binder()); + } + + fn add_const_kind(&mut self, c: &ty::ConstKind) { + match *c { + ty::ConstKind::Unevaluated(uv) => { + self.add_args(uv.args.as_slice()); + self.add_flags(TypeFlags::HAS_CT_PROJECTION); + } + ty::ConstKind::Infer(infer) => match infer { + ty::InferConst::Fresh(_) => self.add_flags(TypeFlags::HAS_CT_FRESH), + ty::InferConst::Var(_) => self.add_flags(TypeFlags::HAS_CT_INFER), + }, + ty::ConstKind::Bound(debruijn, _) => { + self.add_bound_var(debruijn); + self.add_flags(TypeFlags::HAS_CT_BOUND); + } + ty::ConstKind::Param(_) => { + self.add_flags(TypeFlags::HAS_CT_PARAM); + } + ty::ConstKind::Placeholder(_) => { + self.add_flags(TypeFlags::HAS_CT_PLACEHOLDER); + } + ty::ConstKind::Value(cv) => self.add_ty(cv.ty()), + ty::ConstKind::Expr(e) => self.add_args(e.args().as_slice()), + ty::ConstKind::Error(_) => self.add_flags(TypeFlags::HAS_ERROR), + } + } + + fn add_existential_projection(&mut self, projection: &ty::ExistentialProjection) { + self.add_args(projection.args.as_slice()); + match projection.term.kind() { + ty::TermKind::Ty(ty) => self.add_ty(ty), + ty::TermKind::Const(ct) => self.add_const(ct), + } + } + + fn add_alias_ty(&mut self, alias_ty: ty::AliasTy) { + self.add_args(alias_ty.args.as_slice()); + } + + fn add_alias_term(&mut self, alias_term: ty::AliasTerm) { + self.add_args(alias_term.args.as_slice()); + } + + fn add_args(&mut self, args: &[I::GenericArg]) { + for kind in args { + match kind.kind() { + ty::GenericArgKind::Type(ty) => self.add_ty(ty), + ty::GenericArgKind::Lifetime(lt) => self.add_region(lt), + ty::GenericArgKind::Const(ct) => self.add_const(ct), + } + } + } + + fn add_term(&mut self, term: I::Term) { + match term.kind() { + ty::TermKind::Ty(ty) => self.add_ty(ty), + ty::TermKind::Const(ct) => self.add_const(ct), + } + } +} diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs index 6e6c40580d838..417803e75ead6 100644 --- a/compiler/rustc_type_ir/src/inherent.rs +++ b/compiler/rustc_type_ir/src/inherent.rs @@ -583,7 +583,7 @@ pub trait Span: Copy + Debug + Hash + Eq + TypeFoldable { pub trait SliceLike: Sized + Copy { type Item: Copy; - type IntoIter: Iterator; + type IntoIter: Iterator + DoubleEndedIterator; fn iter(self) -> Self::IntoIter; diff --git a/compiler/rustc_type_ir/src/interner.rs b/compiler/rustc_type_ir/src/interner.rs index a9e6764e21824..71bfeabfda878 100644 --- a/compiler/rustc_type_ir/src/interner.rs +++ b/compiler/rustc_type_ir/src/interner.rs @@ -31,6 +31,7 @@ pub trait Interner: + IrPrint> + IrPrint> + IrPrint> + + IrPrint> { type DefId: DefId; type LocalDefId: Copy + Debug + Hash + Eq + Into + TypeFoldable; @@ -104,7 +105,14 @@ pub trait Interner: type ErrorGuaranteed: Copy + Debug + Hash + Eq; type BoundExistentialPredicates: BoundExistentialPredicates; type AllocId: Copy + Debug + Hash + Eq; - type Pat: Copy + Debug + Hash + Eq + Debug + Relate; + type Pat: Copy + + Debug + + Hash + + Eq + + Debug + + Relate + + Flags + + IntoKind>; type Safety: Safety; type Abi: Abi; diff --git a/compiler/rustc_type_ir/src/ir_print.rs b/compiler/rustc_type_ir/src/ir_print.rs index 0c71f3a3df2a2..c259a9747f0d4 100644 --- a/compiler/rustc_type_ir/src/ir_print.rs +++ b/compiler/rustc_type_ir/src/ir_print.rs @@ -2,8 +2,8 @@ use std::fmt; use crate::{ AliasTerm, AliasTy, Binder, CoercePredicate, ExistentialProjection, ExistentialTraitRef, FnSig, - HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, ProjectionPredicate, - SubtypePredicate, TraitPredicate, TraitRef, + HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, PatternKind, + ProjectionPredicate, SubtypePredicate, TraitPredicate, TraitRef, }; pub trait IrPrint { @@ -57,9 +57,10 @@ define_display_via_print!( AliasTy, AliasTerm, FnSig, + PatternKind, ); -define_debug_via_print!(TraitRef, ExistentialTraitRef, ExistentialProjection); +define_debug_via_print!(TraitRef, ExistentialTraitRef, ExistentialProjection, PatternKind); impl fmt::Display for OutlivesPredicate where diff --git a/compiler/rustc_type_ir/src/lib.rs b/compiler/rustc_type_ir/src/lib.rs index bdc61e956f8c2..792090effcff1 100644 --- a/compiler/rustc_type_ir/src/lib.rs +++ b/compiler/rustc_type_ir/src/lib.rs @@ -31,6 +31,7 @@ pub mod outlives; pub mod relate; pub mod search_graph; pub mod solve; +pub mod walk; // These modules are not `pub` since they are glob-imported. #[macro_use] @@ -44,6 +45,7 @@ mod generic_arg; mod infer_ctxt; mod interner; mod opaque_ty; +mod pattern; mod predicate; mod predicate_kind; mod region_kind; @@ -67,6 +69,7 @@ pub use generic_arg::*; pub use infer_ctxt::*; pub use interner::*; pub use opaque_ty::*; +pub use pattern::*; pub use predicate::*; pub use predicate_kind::*; pub use region_kind::*; diff --git a/compiler/rustc_type_ir/src/pattern.rs b/compiler/rustc_type_ir/src/pattern.rs new file mode 100644 index 0000000000000..d74a82da1f92a --- /dev/null +++ b/compiler/rustc_type_ir/src/pattern.rs @@ -0,0 +1,16 @@ +use derive_where::derive_where; +#[cfg(feature = "nightly")] +use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_NoContext}; +use rustc_type_ir_macros::{Lift_Generic, TypeFoldable_Generic, TypeVisitable_Generic}; + +use crate::Interner; + +#[derive_where(Clone, Copy, Hash, PartialEq, Eq; I: Interner)] +#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)] +#[cfg_attr( + feature = "nightly", + derive(Decodable_NoContext, Encodable_NoContext, HashStable_NoContext) +)] +pub enum PatternKind { + Range { start: I::Const, end: I::Const }, +} diff --git a/compiler/rustc_middle/src/ty/walk.rs b/compiler/rustc_type_ir/src/walk.rs similarity index 55% rename from compiler/rustc_middle/src/ty/walk.rs rename to compiler/rustc_type_ir/src/walk.rs index a23316ae6fc88..5683e1f1712c8 100644 --- a/compiler/rustc_middle/src/ty/walk.rs +++ b/compiler/rustc_type_ir/src/walk.rs @@ -1,20 +1,21 @@ //! An iterator over the type substructure. //! WARNING: this does not keep track of the region depth. -use rustc_data_structures::sso::SsoHashSet; use smallvec::{SmallVec, smallvec}; use tracing::debug; -use crate::ty::{self, GenericArg, GenericArgKind, Ty}; +use crate::data_structures::SsoHashSet; +use crate::inherent::*; +use crate::{self as ty, Interner}; // The TypeWalker's stack is hot enough that it's worth going to some effort to // avoid heap allocations. -type TypeWalkerStack<'tcx> = SmallVec<[GenericArg<'tcx>; 8]>; +type TypeWalkerStack = SmallVec<[::GenericArg; 8]>; -pub struct TypeWalker<'tcx> { - stack: TypeWalkerStack<'tcx>, +pub struct TypeWalker { + stack: TypeWalkerStack, last_subtree: usize, - pub visited: SsoHashSet>, + pub visited: SsoHashSet, } /// An iterator for walking the type tree. @@ -25,8 +26,8 @@ pub struct TypeWalker<'tcx> { /// in this situation walker only visits each type once. /// It maintains a set of visited types and /// skips any types that are already there. -impl<'tcx> TypeWalker<'tcx> { - pub fn new(root: GenericArg<'tcx>) -> Self { +impl TypeWalker { + pub fn new(root: I::GenericArg) -> Self { Self { stack: smallvec![root], last_subtree: 1, visited: SsoHashSet::new() } } @@ -47,16 +48,16 @@ impl<'tcx> TypeWalker<'tcx> { } } -impl<'tcx> Iterator for TypeWalker<'tcx> { - type Item = GenericArg<'tcx>; +impl Iterator for TypeWalker { + type Item = I::GenericArg; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { debug!("next(): stack={:?}", self.stack); loop { let next = self.stack.pop()?; self.last_subtree = self.stack.len(); if self.visited.insert(next) { - push_inner(&mut self.stack, next); + push_inner::(&mut self.stack, next); debug!("next: stack={:?}", self.stack); return Some(next); } @@ -64,63 +65,15 @@ impl<'tcx> Iterator for TypeWalker<'tcx> { } } -impl<'tcx> GenericArg<'tcx> { - /// Iterator that walks `self` and any types reachable from - /// `self`, in depth-first order. Note that just walks the types - /// that appear in `self`, it does not descend into the fields of - /// structs or variants. For example: - /// - /// ```text - /// isize => { isize } - /// Foo> => { Foo>, Bar, isize } - /// [isize] => { [isize], isize } - /// ``` - pub fn walk(self) -> TypeWalker<'tcx> { - TypeWalker::new(self) - } -} - -impl<'tcx> Ty<'tcx> { - /// Iterator that walks `self` and any types reachable from - /// `self`, in depth-first order. Note that just walks the types - /// that appear in `self`, it does not descend into the fields of - /// structs or variants. For example: - /// - /// ```text - /// isize => { isize } - /// Foo> => { Foo>, Bar, isize } - /// [isize] => { [isize], isize } - /// ``` - pub fn walk(self) -> TypeWalker<'tcx> { - TypeWalker::new(self.into()) - } -} - -impl<'tcx> ty::Const<'tcx> { - /// Iterator that walks `self` and any types reachable from - /// `self`, in depth-first order. Note that just walks the types - /// that appear in `self`, it does not descend into the fields of - /// structs or variants. For example: - /// - /// ```text - /// isize => { isize } - /// Foo> => { Foo>, Bar, isize } - /// [isize] => { [isize], isize } - /// ``` - pub fn walk(self) -> TypeWalker<'tcx> { - TypeWalker::new(self.into()) - } -} - /// We push `GenericArg`s on the stack in reverse order so as to /// maintain a pre-order traversal. As of the time of this /// writing, the fact that the traversal is pre-order is not /// known to be significant to any code, but it seems like the /// natural order one would expect (basically, the order of the /// types as they are written). -fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) { - match parent.unpack() { - GenericArgKind::Type(parent_ty) => match *parent_ty.kind() { +fn push_inner(stack: &mut TypeWalkerStack, parent: I::GenericArg) { + match parent.kind() { + ty::GenericArgKind::Type(parent_ty) => match parent_ty.kind() { ty::Bool | ty::Char | ty::Int(_) @@ -136,7 +89,7 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) | ty::Foreign(..) => {} ty::Pat(ty, pat) => { - match *pat { + match pat.kind() { ty::PatternKind::Range { start, end } => { stack.push(end.into()); stack.push(start.into()); @@ -163,22 +116,25 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) } ty::Dynamic(obj, lt, _) => { stack.push(lt.into()); - stack.extend(obj.iter().rev().flat_map(|predicate| { - let (args, opt_ty) = match predicate.skip_binder() { - ty::ExistentialPredicate::Trait(tr) => (tr.args, None), - ty::ExistentialPredicate::Projection(p) => (p.args, Some(p.term)), - ty::ExistentialPredicate::AutoTrait(_) => - // Empty iterator - { - (ty::GenericArgs::empty(), None) - } - }; - - args.iter().rev().chain(opt_ty.map(|term| match term.unpack() { - ty::TermKind::Ty(ty) => ty.into(), - ty::TermKind::Const(ct) => ct.into(), - })) - })); + stack.extend( + obj.iter() + .rev() + .filter_map(|predicate| { + let (args, opt_ty) = match predicate.skip_binder() { + ty::ExistentialPredicate::Trait(tr) => (tr.args, None), + ty::ExistentialPredicate::Projection(p) => (p.args, Some(p.term)), + ty::ExistentialPredicate::AutoTrait(_) => { + return None; + } + }; + + Some(args.iter().rev().chain(opt_ty.map(|term| match term.kind() { + ty::TermKind::Ty(ty) => ty.into(), + ty::TermKind::Const(ct) => ct.into(), + }))) + }) + .flatten(), + ); } ty::Adt(_, args) | ty::Closure(_, args) @@ -188,7 +144,7 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) | ty::FnDef(_, args) => { stack.extend(args.iter().rev()); } - ty::Tuple(ts) => stack.extend(ts.iter().rev().map(GenericArg::from)), + ty::Tuple(ts) => stack.extend(ts.iter().rev().map(|ty| ty.into())), ty::FnPtr(sig_tys, _hdr) => { stack.extend( sig_tys.skip_binder().inputs_and_output.iter().rev().map(|ty| ty.into()), @@ -198,15 +154,15 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) stack.push(bound_ty.skip_binder().into()); } }, - GenericArgKind::Lifetime(_) => {} - GenericArgKind::Const(parent_ct) => match parent_ct.kind() { + ty::GenericArgKind::Lifetime(_) => {} + ty::GenericArgKind::Const(parent_ct) => match parent_ct.kind() { ty::ConstKind::Infer(_) | ty::ConstKind::Param(_) | ty::ConstKind::Placeholder(_) | ty::ConstKind::Bound(..) | ty::ConstKind::Error(_) => {} - ty::ConstKind::Value(cv) => stack.push(cv.ty.into()), + ty::ConstKind::Value(cv) => stack.push(cv.ty().into()), ty::ConstKind::Expr(expr) => stack.extend(expr.args().iter().rev()), ty::ConstKind::Unevaluated(ct) => { diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 5c381181218df..8b703bc33dd0f 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -349,6 +349,7 @@ #![feature(hashmap_internals)] #![feature(hint_must_use)] #![feature(ip)] +#![feature(ipv6_hop_limit)] #![feature(lazy_get)] #![feature(maybe_uninit_slice)] #![feature(maybe_uninit_write_slice)] diff --git a/library/std/src/net/tcp.rs b/library/std/src/net/tcp.rs index 6a95142640726..aac7a52c7e133 100644 --- a/library/std/src/net/tcp.rs +++ b/library/std/src/net/tcp.rs @@ -548,6 +548,46 @@ impl TcpStream { self.0.ttl() } + /// Sets the value for the `IPV6_UNICAST_HOPS` option on this socket. + /// + /// This value sets the unicast hop limit field that is used in every packet + /// sent from this socket. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(ipv6_hop_limit)] + /// use std::net::TcpStream; + /// + /// let stream = TcpStream::connect("[::1]:12345") + /// .expect("Couldn't connect to the server..."); + /// stream.set_hop_limit_v6(88).expect("set_hop_limit_v6 call failed"); + /// ``` + #[unstable(feature = "ipv6_hop_limit", issue = "139166")] + pub fn set_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + self.0.set_hop_limit_v6(limit) + } + + /// Gets the value of the `IPV6_UNICAST_HOPS` option on this socket. + /// + /// For more information about this option, see [`TcpStream::set_hop_limit_v6`]. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(ipv6_hop_limit)] + /// use std::net::TcpStream; + /// + /// let stream = TcpStream::connect("[::1]:12345") + /// .expect("Couldn't connect to the server..."); + /// stream.set_hop_limit_v6(88).expect("set_hop_limit_v6 call failed"); + /// assert_eq!(stream.hop_limit_v6().unwrap(), 88); + /// ``` + #[unstable(feature = "ipv6_hop_limit", issue = "139166")] + pub fn hop_limit_v6(&self) -> io::Result { + self.0.hop_limit_v6() + } + /// Gets the value of the `SO_ERROR` option on this socket. /// /// This will retrieve the stored error in the underlying socket, clearing @@ -942,6 +982,44 @@ impl TcpListener { self.0.ttl() } + /// Sets the value for the `IPV6_UNICAST_HOPS` option on this socket. + /// + /// This value sets the unicast hop limit field that is used in every packet + /// sent from this socket. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(ipv6_hop_limit)] + /// use std::net::TcpListener; + /// + /// let listener = TcpListener::bind("[::1]:12345").unwrap(); + /// listener.set_hop_limit_v6(88).expect("set_hop_limit_v6 call failed"); + /// ``` + #[unstable(feature = "ipv6_hop_limit", issue = "139166")] + pub fn set_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + self.0.set_hop_limit_v6(limit) + } + + /// Gets the value of the `IPV6_UNICAST_HOPS` option on this socket. + /// + /// For more information about this option, see [`TcpListener::set_hop_limit_v6`]. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(ipv6_hop_limit)] + /// use std::net::TcpListener; + /// + /// let listener = TcpListener::bind("[::1]:12345").unwrap(); + /// listener.set_hop_limit_v6(88).expect("set_hop_limit_v6 call failed"); + /// assert_eq!(listener.hop_limit_v6().unwrap(), 88); + /// ``` + #[unstable(feature = "ipv6_hop_limit", issue = "139166")] + pub fn hop_limit_v6(&self) -> io::Result { + self.0.hop_limit_v6() + } + #[stable(feature = "net2_mutators", since = "1.9.0")] #[deprecated(since = "1.16.0", note = "this option can only be set before the socket is bound")] #[allow(missing_docs)] diff --git a/library/std/src/net/tcp/tests.rs b/library/std/src/net/tcp/tests.rs index 03003037b295c..d14acbb40cb0b 100644 --- a/library/std/src/net/tcp/tests.rs +++ b/library/std/src/net/tcp/tests.rs @@ -875,6 +875,23 @@ fn ttl() { assert_eq!(ttl, t!(stream.ttl())); } +#[test] +#[cfg_attr(target_env = "sgx", ignore)] +fn hop_limit() { + let hlim = 100; + + let addr = next_test_ip6(); + let listener = t!(TcpListener::bind(&addr)); + + t!(listener.set_hop_limit_v6(hlim)); + assert_eq!(hlim, t!(listener.hop_limit_v6())); + + let stream = t!(TcpStream::connect(&addr)); + + t!(stream.set_hop_limit_v6(hlim)); + assert_eq!(hlim, t!(stream.hop_limit_v6())); +} + #[test] #[cfg_attr(target_env = "sgx", ignore)] fn set_nonblocking() { diff --git a/library/std/src/net/udp.rs b/library/std/src/net/udp.rs index a97b3299774bb..a1c55fa8316c0 100644 --- a/library/std/src/net/udp.rs +++ b/library/std/src/net/udp.rs @@ -582,6 +582,82 @@ impl UdpSocket { self.0.ttl() } + /// Sets the value for the `IPV6_UNICAST_HOPS` option on this socket. + /// + /// This value sets the unicast hop limit field that is used in every packet + /// sent from this socket. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(ipv6_hop_limit)] + /// use std::net::UdpSocket; + /// + /// let socket = UdpSocket::bind("[::1]:12345").expect("couldn't bind to address"); + /// socket.set_hop_limit_v6(88).expect("set_hop_limit_v6 call failed"); + /// ``` + #[unstable(feature = "ipv6_hop_limit", issue = "139166")] + pub fn set_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + self.0.set_hop_limit_v6(limit) + } + + /// Gets the value of the `IPV6_UNICAST_HOPS` option on this socket. + /// + /// For more information about this option, see [`UdpSocket::set_hop_limit_v6`]. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(ipv6_hop_limit)] + /// use std::net::UdpSocket; + /// + /// let socket = UdpSocket::bind("[::1]:12345").expect("couldn't bind to address"); + /// socket.set_hop_limit_v6(88).expect("set_hop_limit_v6 call failed"); + /// assert_eq!(socket.hop_limit_v6().unwrap(), 88); + /// ``` + #[unstable(feature = "ipv6_hop_limit", issue = "139166")] + pub fn hop_limit_v6(&self) -> io::Result { + self.0.hop_limit_v6() + } + + /// Sets the value for the `IPV6_MULTICAST_HOPS` option on this socket. + /// + /// This value sets the hop limit field for outgoing multicast packets + /// sent from this socket. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(ipv6_hop_limit)] + /// use std::net::UdpSocket; + /// + /// let socket = UdpSocket::bind("[::1]:12345").expect("couldn't bind to address"); + /// socket.set_multicast_hop_limit_v6(88).expect("set_multicast_hop_limit_v6 call failed"); + /// ``` + #[unstable(feature = "ipv6_hop_limit", issue = "139166")] + pub fn set_multicast_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + self.0.set_multicast_hop_limit_v6(limit) + } + + /// Gets the value of the `IPV6_MULTICAST_HOPS` option on this socket. + /// + /// For more information about this option, see [`UdpSocket::set_multicast_hop_limit_v6`]. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(ipv6_hop_limit)] + /// use std::net::UdpSocket; + /// + /// let socket = UdpSocket::bind("[::1]:12345").expect("couldn't bind to address"); + /// socket.set_multicast_hop_limit_v6(88).expect("set_multicast_hop_limit_v6 call failed"); + /// assert_eq!(socket.multicast_hop_limit_v6().unwrap(), 88); + /// ``` + #[unstable(feature = "ipv6_hop_limit", issue = "139166")] + pub fn multicast_hop_limit_v6(&self) -> io::Result { + self.0.multicast_hop_limit_v6() + } + /// Executes an operation of the `IP_ADD_MEMBERSHIP` type. /// /// This function specifies a new multicast group for this socket to join. diff --git a/library/std/src/net/udp/tests.rs b/library/std/src/net/udp/tests.rs index 91da3135f97c6..8af9ee2f7e5ef 100644 --- a/library/std/src/net/udp/tests.rs +++ b/library/std/src/net/udp/tests.rs @@ -357,6 +357,18 @@ fn ttl() { assert_eq!(ttl, t!(stream.ttl())); } +#[test] +fn hop_limit() { + let hlim = 100; + + let addr = next_test_ip6(); + + let stream = t!(UdpSocket::bind(&addr)); + + t!(stream.set_hop_limit_v6(hlim)); + assert_eq!(hlim, t!(stream.hop_limit_v6())); +} + #[test] fn set_nonblocking() { each_ip(&mut |addr, _| { diff --git a/library/std/src/sys/net/connection/sgx.rs b/library/std/src/sys/net/connection/sgx.rs index 242df10bc3270..52aac652b7b3a 100644 --- a/library/std/src/sys/net/connection/sgx.rs +++ b/library/std/src/sys/net/connection/sgx.rs @@ -8,6 +8,7 @@ use crate::time::Duration; use crate::{error, fmt}; const DEFAULT_FAKE_TTL: u32 = 64; +const DEFAULT_FAKE_HLIM: u8 = 64; #[derive(Debug, Clone)] pub struct Socket { @@ -199,6 +200,14 @@ impl TcpStream { sgx_ineffective(DEFAULT_FAKE_TTL) } + pub fn set_hop_limit_v6(&self, _: u8) -> io::Result<()> { + sgx_ineffective(()) + } + + pub fn hop_limit_v6(&self) -> io::Result { + sgx_ineffective(DEFAULT_FAKE_HLIM) + } + pub fn take_error(&self) -> io::Result> { Ok(None) } @@ -277,6 +286,14 @@ impl TcpListener { sgx_ineffective(DEFAULT_FAKE_TTL) } + pub fn set_hop_limit_v6(&self, _: u8) -> io::Result<()> { + sgx_ineffective(()) + } + + pub fn hop_limit_v6(&self) -> io::Result { + sgx_ineffective(DEFAULT_FAKE_HLIM) + } + pub fn set_only_v6(&self, _: bool) -> io::Result<()> { sgx_ineffective(()) } @@ -416,6 +433,22 @@ impl UdpSocket { self.0 } + pub fn set_hop_limit_v6(&self, _: u8) -> io::Result<()> { + self.0 + } + + pub fn hop_limit_v6(&self) -> io::Result { + self.0 + } + + pub fn set_multicast_hop_limit_v6(&self, _: u8) -> io::Result<()> { + self.0 + } + + pub fn multicast_hop_limit_v6(&self) -> io::Result { + self.0 + } + pub fn take_error(&self) -> io::Result> { self.0 } diff --git a/library/std/src/sys/net/connection/socket.rs b/library/std/src/sys/net/connection/socket.rs index 7301bde6881a3..9af63fe5fd078 100644 --- a/library/std/src/sys/net/connection/socket.rs +++ b/library/std/src/sys/net/connection/socket.rs @@ -454,6 +454,15 @@ impl TcpStream { Ok(raw as u32) } + pub fn set_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + setsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_UNICAST_HOPS, limit as c_int) + } + + pub fn hop_limit_v6(&self) -> io::Result { + let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_UNICAST_HOPS)?; + Ok(raw as u8) + } + pub fn take_error(&self) -> io::Result> { self.inner.take_error() } @@ -581,6 +590,15 @@ impl TcpListener { Ok(raw as u32) } + pub fn set_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + setsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_UNICAST_HOPS, limit as c_int) + } + + pub fn hop_limit_v6(&self) -> io::Result { + let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_UNICAST_HOPS)?; + Ok(raw as u8) + } + pub fn set_only_v6(&self, only_v6: bool) -> io::Result<()> { setsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_V6ONLY, only_v6 as c_int) } @@ -786,6 +804,24 @@ impl UdpSocket { Ok(raw as u32) } + pub fn set_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + setsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_UNICAST_HOPS, limit as c_int) + } + + pub fn hop_limit_v6(&self) -> io::Result { + let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_UNICAST_HOPS)?; + Ok(raw as u8) + } + + pub fn set_multicast_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + setsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_MULTICAST_HOPS, limit as c_int) + } + + pub fn multicast_hop_limit_v6(&self) -> io::Result { + let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_MULTICAST_HOPS)?; + Ok(raw as u8) + } + pub fn take_error(&self) -> io::Result> { self.inner.take_error() } diff --git a/library/std/src/sys/net/connection/socket/windows.rs b/library/std/src/sys/net/connection/socket/windows.rs index ce975bb2289c2..8ac9837dbcb8b 100644 --- a/library/std/src/sys/net/connection/socket/windows.rs +++ b/library/std/src/sys/net/connection/socket/windows.rs @@ -30,10 +30,10 @@ pub(super) mod netc { pub use crate::sys::c::{ ADDRESS_FAMILY as sa_family_t, ADDRINFOA as addrinfo, IP_ADD_MEMBERSHIP, IP_DROP_MEMBERSHIP, IP_MULTICAST_LOOP, IP_MULTICAST_TTL, IP_TTL, IPPROTO_IP, IPPROTO_IPV6, - IPV6_ADD_MEMBERSHIP, IPV6_DROP_MEMBERSHIP, IPV6_MULTICAST_LOOP, IPV6_V6ONLY, SO_BROADCAST, - SO_RCVTIMEO, SO_SNDTIMEO, SOCK_DGRAM, SOCK_STREAM, SOCKADDR as sockaddr, - SOCKADDR_STORAGE as sockaddr_storage, SOL_SOCKET, bind, connect, freeaddrinfo, getpeername, - getsockname, getsockopt, listen, setsockopt, + IPV6_ADD_MEMBERSHIP, IPV6_DROP_MEMBERSHIP, IPV6_MULTICAST_HOPS, IPV6_MULTICAST_LOOP, + IPV6_UNICAST_HOPS, IPV6_V6ONLY, SO_BROADCAST, SO_RCVTIMEO, SO_SNDTIMEO, SOCK_DGRAM, + SOCK_STREAM, SOCKADDR as sockaddr, SOCKADDR_STORAGE as sockaddr_storage, SOL_SOCKET, bind, + connect, freeaddrinfo, getpeername, getsockname, getsockopt, listen, setsockopt, }; #[allow(non_camel_case_types)] diff --git a/library/std/src/sys/net/connection/uefi/mod.rs b/library/std/src/sys/net/connection/uefi/mod.rs index da2174396266f..437284f9c9177 100644 --- a/library/std/src/sys/net/connection/uefi/mod.rs +++ b/library/std/src/sys/net/connection/uefi/mod.rs @@ -103,6 +103,14 @@ impl TcpStream { self.0 } + pub fn set_hop_limit_v6(&self, _: u8) -> io::Result<()> { + self.0 + } + + pub fn hop_limit_v6(&self) -> io::Result { + self.0 + } + pub fn take_error(&self) -> io::Result> { self.0 } @@ -145,6 +153,14 @@ impl TcpListener { self.0 } + pub fn set_hop_limit_v6(&self, _: u8) -> io::Result<()> { + self.0 + } + + pub fn hop_limit_v6(&self) -> io::Result { + self.0 + } + pub fn set_only_v6(&self, _: bool) -> io::Result<()> { self.0 } @@ -271,6 +287,22 @@ impl UdpSocket { self.0 } + pub fn set_hop_limit_v6(&self, _: u8) -> io::Result<()> { + self.0 + } + + pub fn hop_limit_v6(&self) -> io::Result { + self.0 + } + + pub fn set_multicast_hop_limit_v6(&self, _: u8) -> io::Result<()> { + self.0 + } + + pub fn multicast_hop_limit_v6(&self) -> io::Result { + self.0 + } + pub fn take_error(&self) -> io::Result> { self.0 } diff --git a/library/std/src/sys/net/connection/unsupported.rs b/library/std/src/sys/net/connection/unsupported.rs index da2174396266f..73417c0d5fc88 100644 --- a/library/std/src/sys/net/connection/unsupported.rs +++ b/library/std/src/sys/net/connection/unsupported.rs @@ -103,6 +103,14 @@ impl TcpStream { self.0 } + pub fn set_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + self.0 + } + + pub fn hop_limit_v6(&self) -> io::Result { + self.0 + } + pub fn take_error(&self) -> io::Result> { self.0 } @@ -145,6 +153,14 @@ impl TcpListener { self.0 } + pub fn set_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + self.0 + } + + pub fn hop_limit_v6(&self) -> io::Result { + self.0 + } + pub fn set_only_v6(&self, _: bool) -> io::Result<()> { self.0 } @@ -271,6 +287,22 @@ impl UdpSocket { self.0 } + pub fn set_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + self.0 + } + + pub fn hop_limit_v6(&self) -> io::Result { + self.0 + } + + pub fn set_multicast_hop_limit_v6(&self, limit: u8) -> io::Result<()> { + self.0 + } + + pub fn multicast_hop_limit_v6(&self) -> io::Result { + self.0 + } + pub fn take_error(&self) -> io::Result> { self.0 } diff --git a/library/std/src/sys/net/connection/wasip1.rs b/library/std/src/sys/net/connection/wasip1.rs index 951dc65e5b47d..a62912b6407af 100644 --- a/library/std/src/sys/net/connection/wasip1.rs +++ b/library/std/src/sys/net/connection/wasip1.rs @@ -162,6 +162,14 @@ impl TcpStream { unsupported() } + pub fn set_hop_limit_v6(&self, _: u8) -> io::Result<()> { + unsupported() + } + + pub fn hop_limit_v6(&self) -> io::Result { + unsupported() + } + pub fn take_error(&self) -> io::Result> { unsupported() } @@ -245,6 +253,14 @@ impl TcpListener { unsupported() } + pub fn set_hop_limit_v6(&self, _: u8) -> io::Result<()> { + unsupported() + } + + pub fn hop_limit_v6(&self) -> io::Result { + unsupported() + } + pub fn set_only_v6(&self, _: bool) -> io::Result<()> { unsupported() } @@ -416,6 +432,22 @@ impl UdpSocket { unsupported() } + pub fn set_hop_limit_v6(&self, _: u8) -> io::Result<()> { + unsupported() + } + + pub fn hop_limit_v6(&self) -> io::Result { + unsupported() + } + + pub fn set_multicast_hop_limit_v6(&self, _: u8) -> io::Result<()> { + unsupported() + } + + pub fn multicast_hop_limit_v6(&self) -> io::Result { + unsupported() + } + pub fn take_error(&self) -> io::Result> { unsupported() } diff --git a/library/std/src/sys/net/connection/xous/tcplistener.rs b/library/std/src/sys/net/connection/xous/tcplistener.rs index 7f13ca5592040..8e9febac28c85 100644 --- a/library/std/src/sys/net/connection/xous/tcplistener.rs +++ b/library/std/src/sys/net/connection/xous/tcplistener.rs @@ -199,6 +199,14 @@ impl TcpListener { .map(|res| res[0] as _)?) } + pub fn set_hop_limit_v6(&self, hlim: u8) -> io::Result<()> { + unimpl!(); + } + + pub fn hop_limit_v6(&self) -> io::Result { + unimpl!(); + } + pub fn set_only_v6(&self, _: bool) -> io::Result<()> { unimpl!(); } diff --git a/library/std/src/sys/net/connection/xous/tcpstream.rs b/library/std/src/sys/net/connection/xous/tcpstream.rs index e8aea8b706a58..e36fb079ffbf4 100644 --- a/library/std/src/sys/net/connection/xous/tcpstream.rs +++ b/library/std/src/sys/net/connection/xous/tcpstream.rs @@ -389,6 +389,14 @@ impl TcpStream { .map(|res| res[0] as _)?) } + pub fn set_hop_limit_v6(&self, hlim: u8) -> io::Result<()> { + unimpl!(); + } + + pub fn hop_limit_v6(&self) -> io::Result { + unimpl!(); + } + pub fn take_error(&self) -> io::Result> { // this call doesn't have a meaning on our platform, but we can at least not panic if it's used. Ok(None) diff --git a/library/std/src/sys/net/connection/xous/udp.rs b/library/std/src/sys/net/connection/xous/udp.rs index c112c04ce94bc..81bedba77fc73 100644 --- a/library/std/src/sys/net/connection/xous/udp.rs +++ b/library/std/src/sys/net/connection/xous/udp.rs @@ -376,6 +376,14 @@ impl UdpSocket { .map(|res| res[0] as _)?) } + pub fn set_hop_limit_v6(&self, hlim: u8) -> io::Result<()> { + unimpl!(); + } + + pub fn hop_limit_v6(&self) -> io::Result { + unimpl!(); + } + pub fn take_error(&self) -> io::Result> { // this call doesn't have a meaning on our platform, but we can at least not panic if it's used. Ok(None) @@ -411,6 +419,14 @@ impl UdpSocket { unimpl!(); } + pub fn set_multicast_hop_limit_v6(&self, _: u8) -> io::Result<()> { + unimpl!(); + } + + pub fn multicast_hop_limit_v6(&self) -> io::Result { + unimpl!(); + } + pub fn set_multicast_loop_v6(&self, _: bool) -> io::Result<()> { unimpl!(); } diff --git a/library/std/src/sys/pal/windows/c/bindings.txt b/library/std/src/sys/pal/windows/c/bindings.txt index d5fbb453c6f96..1f3cb311347d7 100644 --- a/library/std/src/sys/pal/windows/c/bindings.txt +++ b/library/std/src/sys/pal/windows/c/bindings.txt @@ -2246,7 +2246,9 @@ IPPROTO_UDP IPV6_ADD_MEMBERSHIP IPV6_DROP_MEMBERSHIP IPV6_MREQ +IPV6_MULTICAST_HOPS IPV6_MULTICAST_LOOP +IPV6_UNICAST_HOPS IPV6_V6ONLY LINGER listen diff --git a/library/std/src/sys/pal/windows/c/windows_sys.rs b/library/std/src/sys/pal/windows/c/windows_sys.rs index eb2914b864473..4d9395bbb078a 100644 --- a/library/std/src/sys/pal/windows/c/windows_sys.rs +++ b/library/std/src/sys/pal/windows/c/windows_sys.rs @@ -2875,7 +2875,9 @@ impl Default for IPV6_MREQ { unsafe { core::mem::zeroed() } } } +pub const IPV6_MULTICAST_HOPS: i32 = 10i32; pub const IPV6_MULTICAST_LOOP: i32 = 11i32; +pub const IPV6_UNICAST_HOPS: i32 = 4i32; pub const IPV6_V6ONLY: i32 = 27i32; pub const IP_ADD_MEMBERSHIP: i32 = 12i32; pub const IP_DROP_MEMBERSHIP: i32 = 13i32; diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml index 3db34ed24cc20..ba1b8f256586e 100644 --- a/src/tools/compiletest/Cargo.toml +++ b/src/tools/compiletest/Cargo.toml @@ -10,6 +10,7 @@ doctest = false # tidy-alphabetical-start anstyle-svg = "0.1.3" build_helper = { path = "../../build_helper" } +camino = "1" colored = "2" diff = "0.1.10" getopts = "0.2" diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs index 6750b5288f42a..604c5fcbddff0 100644 --- a/src/tools/compiletest/src/common.rs +++ b/src/tools/compiletest/src/common.rs @@ -1,18 +1,17 @@ use std::collections::{BTreeSet, HashMap, HashSet}; -use std::ffi::OsString; -use std::path::{Path, PathBuf}; use std::process::Command; use std::str::FromStr; use std::sync::OnceLock; use std::{fmt, iter}; use build_helper::git::GitConfig; +use camino::{Utf8Path, Utf8PathBuf}; use semver::Version; use serde::de::{Deserialize, Deserializer, Error as _}; pub use self::Mode::*; use crate::executor::{ColorConfig, OutputFormat}; -use crate::util::{PathBufExt, add_dylib_path}; +use crate::util::{Utf8PathBufExt, add_dylib_path}; macro_rules! string_enum { ($(#[$meta:meta])* $vis:vis enum $name:ident { $($variant:ident => $repr:expr,)* }) => { @@ -183,25 +182,25 @@ pub struct Config { pub fail_fast: bool, /// The library paths required for running the compiler. - pub compile_lib_path: PathBuf, + pub compile_lib_path: Utf8PathBuf, /// The library paths required for running compiled programs. - pub run_lib_path: PathBuf, + pub run_lib_path: Utf8PathBuf, /// The rustc executable. - pub rustc_path: PathBuf, + pub rustc_path: Utf8PathBuf, /// The cargo executable. - pub cargo_path: Option, + pub cargo_path: Option, /// Rustc executable used to compile run-make recipes. - pub stage0_rustc_path: Option, + pub stage0_rustc_path: Option, /// The rustdoc executable. - pub rustdoc_path: Option, + pub rustdoc_path: Option, /// The coverage-dump executable. - pub coverage_dump_path: Option, + pub coverage_dump_path: Option, /// The Python executable to use for LLDB and htmldocck. pub python: String, @@ -213,27 +212,27 @@ pub struct Config { pub jsondoclint_path: Option, /// The LLVM `FileCheck` binary path. - pub llvm_filecheck: Option, + pub llvm_filecheck: Option, /// Path to LLVM's bin directory. - pub llvm_bin_dir: Option, + pub llvm_bin_dir: Option, /// The path to the Clang executable to run Clang-based tests with. If /// `None` then these tests will be ignored. pub run_clang_based_tests_with: Option, /// The directory containing the sources. - pub src_root: PathBuf, + pub src_root: Utf8PathBuf, /// The directory containing the test suite sources. Must be a subdirectory of `src_root`. - pub src_test_suite_root: PathBuf, + pub src_test_suite_root: Utf8PathBuf, /// Root build directory (e.g. `build/`). - pub build_root: PathBuf, + pub build_root: Utf8PathBuf, /// Test suite specific build directory (e.g. `build/host/test/ui/`). - pub build_test_suite_root: PathBuf, + pub build_test_suite_root: Utf8PathBuf, /// The directory containing the compiler sysroot - pub sysroot_base: PathBuf, + pub sysroot_base: Utf8PathBuf, /// The number of the stage under test. pub stage: u32, @@ -301,7 +300,7 @@ pub struct Config { pub host: String, /// Path to / name of the Microsoft Console Debugger (CDB) executable - pub cdb: Option, + pub cdb: Option, /// Version of CDB pub cdb_version: Option<[u16; 4]>, @@ -322,7 +321,7 @@ pub struct Config { pub system_llvm: bool, /// Path to the android tools - pub android_cross_path: PathBuf, + pub android_cross_path: Utf8PathBuf, /// Extra parameter to run adb on arm-linux-androideabi pub adb_path: String, @@ -346,7 +345,7 @@ pub struct Config { pub color: ColorConfig, /// where to find the remote test client process, if we're using it - pub remote_test_client: Option, + pub remote_test_client: Option, /// mode describing what file the actual ui output will be compared to pub compare_mode: Option, @@ -414,7 +413,7 @@ pub struct Config { /// Path to minicore aux library, used for `no_core` tests that need `core` stubs in /// cross-compilation scenarios that do not otherwise want/need to `-Zbuild-std`. Used in e.g. /// ABI tests. - pub minicore_path: PathBuf, + pub minicore_path: Utf8PathBuf, } impl Config { @@ -804,8 +803,8 @@ fn serde_parse_u32<'de, D: Deserializer<'de>>(deserializer: D) -> Result, compare_mode: &Option, kind: &str, -) -> PathBuf { +) -> Utf8PathBuf { assert!(UI_EXTENSIONS.contains(&kind)); let mut parts = Vec::new(); @@ -865,7 +864,7 @@ pub const UI_COVERAGE_MAP: &str = "cov-map"; /// ``` /// /// This is created early when tests are collected to avoid race conditions. -pub fn output_relative_path(config: &Config, relative_dir: &Path) -> PathBuf { +pub fn output_relative_path(config: &Config, relative_dir: &Utf8Path) -> Utf8PathBuf { config.build_test_suite_root.join(relative_dir) } @@ -874,10 +873,10 @@ pub fn output_testname_unique( config: &Config, testpaths: &TestPaths, revision: Option<&str>, -) -> PathBuf { +) -> Utf8PathBuf { let mode = config.compare_mode.as_ref().map_or("", |m| m.to_str()); let debugger = config.debugger.as_ref().map_or("", |m| m.to_str()); - PathBuf::from(&testpaths.file.file_stem().unwrap()) + Utf8PathBuf::from(&testpaths.file.file_stem().unwrap()) .with_extra_extension(config.mode.output_dir_disambiguator()) .with_extra_extension(revision.unwrap_or("")) .with_extra_extension(mode) @@ -887,7 +886,11 @@ pub fn output_testname_unique( /// Absolute path to the directory where all output for the given /// test/revision should reside. Example: /// /path/to/build/host-tuple/test/ui/relative/testname.revision.mode/ -pub fn output_base_dir(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf { +pub fn output_base_dir( + config: &Config, + testpaths: &TestPaths, + revision: Option<&str>, +) -> Utf8PathBuf { output_relative_path(config, &testpaths.relative_dir) .join(output_testname_unique(config, testpaths, revision)) } @@ -895,12 +898,20 @@ pub fn output_base_dir(config: &Config, testpaths: &TestPaths, revision: Option< /// Absolute path to the base filename used as output for the given /// test/revision. Example: /// /path/to/build/host-tuple/test/ui/relative/testname.revision.mode/testname -pub fn output_base_name(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf { +pub fn output_base_name( + config: &Config, + testpaths: &TestPaths, + revision: Option<&str>, +) -> Utf8PathBuf { output_base_dir(config, testpaths, revision).join(testpaths.file.file_stem().unwrap()) } /// Absolute path to the directory to use for incremental compilation. Example: /// /path/to/build/host-tuple/test/ui/relative/testname.mode/testname.inc -pub fn incremental_dir(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf { +pub fn incremental_dir( + config: &Config, + testpaths: &TestPaths, + revision: Option<&str>, +) -> Utf8PathBuf { output_base_name(config, testpaths, revision).with_extension("inc") } diff --git a/src/tools/compiletest/src/compute_diff.rs b/src/tools/compiletest/src/compute_diff.rs index 4c942c51bae13..509e7e117039c 100644 --- a/src/tools/compiletest/src/compute_diff.rs +++ b/src/tools/compiletest/src/compute_diff.rs @@ -1,6 +1,7 @@ use std::collections::VecDeque; use std::fs::{File, FileType}; -use std::path::Path; + +use camino::Utf8Path; #[derive(Debug, PartialEq)] pub enum DiffLine { @@ -112,8 +113,8 @@ pub(crate) fn write_diff(expected: &str, actual: &str, context_size: usize) -> S /// Returns whether any data was actually written. pub(crate) fn write_filtered_diff( diff_filename: &str, - out_dir: &Path, - compare_dir: &Path, + out_dir: &Utf8Path, + compare_dir: &Utf8Path, verbose: bool, filter: Filter, ) -> bool @@ -123,19 +124,21 @@ where use std::io::{Read, Write}; let mut diff_output = File::create(diff_filename).unwrap(); let mut wrote_data = false; - for entry in walkdir::WalkDir::new(out_dir) { + for entry in walkdir::WalkDir::new(out_dir.as_std_path()) { let entry = entry.expect("failed to read file"); let extension = entry.path().extension().and_then(|p| p.to_str()); if filter(entry.file_type(), extension) { - let expected_path = compare_dir.join(entry.path().strip_prefix(&out_dir).unwrap()); + let expected_path = compare_dir + .as_std_path() + .join(entry.path().strip_prefix(&out_dir.as_std_path()).unwrap()); let expected = if let Ok(s) = std::fs::read(&expected_path) { s } else { continue }; let actual_path = entry.path(); let actual = std::fs::read(&actual_path).unwrap(); let diff = unified_diff::diff( &expected, - &expected_path.to_string_lossy(), + &expected_path.to_str().unwrap(), &actual, - &actual_path.to_string_lossy(), + &actual_path.to_str().unwrap(), 3, ); wrote_data |= !diff.is_empty(); diff --git a/src/tools/compiletest/src/debuggers.rs b/src/tools/compiletest/src/debuggers.rs index 5126e55aea123..c133d7fd4fbd0 100644 --- a/src/tools/compiletest/src/debuggers.rs +++ b/src/tools/compiletest/src/debuggers.rs @@ -1,9 +1,9 @@ use std::env; -use std::ffi::OsString; -use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::Arc; +use camino::{Utf8Path, Utf8PathBuf}; + use crate::common::{Config, Debugger}; pub(crate) fn configure_cdb(config: &Config) -> Option> { @@ -78,12 +78,15 @@ fn is_pc_windows_msvc_target(target: &str) -> bool { target.ends_with("-pc-windows-msvc") } -fn find_cdb(target: &str) -> Option { +fn find_cdb(target: &str) -> Option { if !(cfg!(windows) && is_pc_windows_msvc_target(target)) { return None; } - let pf86 = env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?; + let pf86 = Utf8PathBuf::from_path_buf( + env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?.into(), + ) + .unwrap(); let cdb_arch = if cfg!(target_arch = "x86") { "x86" } else if cfg!(target_arch = "x86_64") { @@ -96,8 +99,7 @@ fn find_cdb(target: &str) -> Option { return None; // No compatible CDB.exe in the Windows 10 SDK }; - let mut path = PathBuf::new(); - path.push(pf86); + let mut path = pf86; path.push(r"Windows Kits\10\Debuggers"); // We could check 8.1 etc. too? path.push(cdb_arch); path.push(r"cdb.exe"); @@ -106,15 +108,15 @@ fn find_cdb(target: &str) -> Option { return None; } - Some(path.into_os_string()) + Some(path) } /// Returns Path to CDB pub(crate) fn analyze_cdb( cdb: Option, target: &str, -) -> (Option, Option<[u16; 4]>) { - let cdb = cdb.map(OsString::from).or_else(|| find_cdb(target)); +) -> (Option, Option<[u16; 4]>) { + let cdb = cdb.map(Utf8PathBuf::from).or_else(|| find_cdb(target)); let mut version = None; if let Some(cdb) = cdb.as_ref() { @@ -143,7 +145,7 @@ pub(crate) fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> { pub(crate) fn analyze_gdb( gdb: Option, target: &str, - android_cross_path: &Path, + android_cross_path: &Utf8Path, ) -> (Option, Option) { #[cfg(not(windows))] const GDB_FALLBACK: &str = "gdb"; @@ -152,10 +154,7 @@ pub(crate) fn analyze_gdb( let fallback_gdb = || { if is_android_gdb_target(target) { - let mut gdb_path = match android_cross_path.to_str() { - Some(x) => x.to_owned(), - None => panic!("cannot find android cross path"), - }; + let mut gdb_path = android_cross_path.to_string(); gdb_path.push_str("/bin/gdb"); gdb_path } else { diff --git a/src/tools/compiletest/src/errors.rs b/src/tools/compiletest/src/errors.rs index 64d68eb7f23e5..3bb98276bf525 100644 --- a/src/tools/compiletest/src/errors.rs +++ b/src/tools/compiletest/src/errors.rs @@ -2,9 +2,9 @@ use std::fmt; use std::fs::File; use std::io::BufReader; use std::io::prelude::*; -use std::path::Path; use std::sync::OnceLock; +use camino::Utf8Path; use regex::Regex; use tracing::*; @@ -102,8 +102,8 @@ impl Error { /// /// If revision is not None, then we look /// for `//[X]~` instead, where `X` is the current revision. -pub fn load_errors(testfile: &Path, revision: Option<&str>) -> Vec { - let rdr = BufReader::new(File::open(testfile).unwrap()); +pub fn load_errors(testfile: &Utf8Path, revision: Option<&str>) -> Vec { + let rdr = BufReader::new(File::open(testfile.as_std_path()).unwrap()); // `last_nonfollow_error` tracks the most recently seen // line with an error template that did not use the diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs index 3406e8749a151..33ecdec499171 100644 --- a/src/tools/compiletest/src/header.rs +++ b/src/tools/compiletest/src/header.rs @@ -3,9 +3,9 @@ use std::env; use std::fs::File; use std::io::BufReader; use std::io::prelude::*; -use std::path::{Path, PathBuf}; use std::process::Command; +use camino::{Utf8Path, Utf8PathBuf}; use semver::Version; use tracing::*; @@ -45,12 +45,12 @@ pub struct EarlyProps { } impl EarlyProps { - pub fn from_file(config: &Config, testfile: &Path) -> Self { - let file = File::open(testfile).expect("open test file to parse earlyprops"); + pub fn from_file(config: &Config, testfile: &Utf8Path) -> Self { + let file = File::open(testfile.as_std_path()).expect("open test file to parse earlyprops"); Self::from_reader(config, testfile, file) } - pub fn from_reader(config: &Config, testfile: &Path, rdr: R) -> Self { + pub fn from_reader(config: &Config, testfile: &Utf8Path, rdr: R) -> Self { let mut props = EarlyProps::default(); let mut poisoned = false; iter_header( @@ -66,7 +66,7 @@ impl EarlyProps { ); if poisoned { - eprintln!("errors encountered during EarlyProps parsing: {}", testfile.display()); + eprintln!("errors encountered during EarlyProps parsing: {}", testfile); panic!("errors encountered during EarlyProps parsing"); } @@ -88,7 +88,7 @@ pub struct TestProps { pub doc_flags: Vec, // If present, the name of a file that this test should match when // pretty-printed - pub pp_exact: Option, + pub pp_exact: Option, /// Auxiliary crates that should be built and made available to this test. pub(crate) aux: AuxProps, // Environment settings to use for compiling @@ -134,7 +134,7 @@ pub struct TestProps { // not set by end-users; rather it is set by the incremental // testing harness and used when generating compilation // arguments. (In particular, it propagates to the aux-builds.) - pub incremental_dir: Option, + pub incremental_dir: Option, // If `true`, this test will use incremental compilation. // // This can be set manually with the `incremental` header, or implicitly @@ -311,7 +311,12 @@ impl TestProps { } } - pub fn from_aux_file(&self, testfile: &Path, revision: Option<&str>, config: &Config) -> Self { + pub fn from_aux_file( + &self, + testfile: &Utf8Path, + revision: Option<&str>, + config: &Config, + ) -> Self { let mut props = TestProps::new(); // copy over select properties to the aux build: @@ -322,10 +327,10 @@ impl TestProps { props } - pub fn from_file(testfile: &Path, revision: Option<&str>, config: &Config) -> Self { + pub fn from_file(testfile: &Utf8Path, revision: Option<&str>, config: &Config) -> Self { let mut props = TestProps::new(); props.load_from(testfile, revision, config); - props.exec_env.push(("RUSTC".to_string(), config.rustc_path.display().to_string())); + props.exec_env.push(("RUSTC".to_string(), config.rustc_path.to_string())); match (props.pass_mode, props.fail_mode) { (None, None) if config.mode == Mode::Ui => props.fail_mode = Some(FailMode::Check), @@ -340,10 +345,10 @@ impl TestProps { /// tied to a particular revision `foo` (indicated by writing /// `//@[foo]`), then the property is ignored unless `test_revision` is /// `Some("foo")`. - fn load_from(&mut self, testfile: &Path, test_revision: Option<&str>, config: &Config) { + fn load_from(&mut self, testfile: &Utf8Path, test_revision: Option<&str>, config: &Config) { let mut has_edition = false; if !testfile.is_dir() { - let file = File::open(testfile).unwrap(); + let file = File::open(testfile.as_std_path()).unwrap(); let mut poisoned = false; @@ -600,7 +605,7 @@ impl TestProps { ); if poisoned { - eprintln!("errors encountered during TestProps parsing: {}", testfile.display()); + eprintln!("errors encountered during TestProps parsing: {}", testfile); panic!("errors encountered during TestProps parsing"); } } @@ -871,7 +876,7 @@ fn iter_header( mode: Mode, _suite: &str, poisoned: &mut bool, - testfile: &Path, + testfile: &Utf8Path, rdr: impl Read, it: &mut dyn FnMut(DirectiveLine<'_>), ) { @@ -923,9 +928,7 @@ fn iter_header( eprintln!( "error: detected unknown compiletest test directive `{}` in {}:{}", - directive_line.raw_directive, - testfile.display(), - line_number, + directive_line.raw_directive, testfile, line_number, ); return; @@ -937,10 +940,7 @@ fn iter_header( eprintln!( "error: detected trailing compiletest test directive `{}` in {}:{}\n \ help: put the trailing directive in it's own line: `//@ {}`", - trailing_directive, - testfile.display(), - line_number, - trailing_directive, + trailing_directive, testfile, line_number, trailing_directive, ); return; @@ -952,7 +952,12 @@ fn iter_header( } impl Config { - fn parse_and_update_revisions(&self, testfile: &Path, line: &str, existing: &mut Vec) { + fn parse_and_update_revisions( + &self, + testfile: &Utf8Path, + line: &str, + existing: &mut Vec, + ) { const FORBIDDEN_REVISION_NAMES: [&str; 2] = [ // `//@ revisions: true false` Implying `--cfg=true` and `--cfg=false` makes it very // weird for the test, since if the test writer wants a cfg of the same revision name @@ -965,26 +970,19 @@ impl Config { if let Some(raw) = self.parse_name_value_directive(line, "revisions") { if self.mode == Mode::RunMake { - panic!("`run-make` tests do not support revisions: {}", testfile.display()); + panic!("`run-make` tests do not support revisions: {}", testfile); } let mut duplicates: HashSet<_> = existing.iter().cloned().collect(); for revision in raw.split_whitespace() { if !duplicates.insert(revision.to_string()) { - panic!( - "duplicate revision: `{}` in line `{}`: {}", - revision, - raw, - testfile.display() - ); + panic!("duplicate revision: `{}` in line `{}`: {}", revision, raw, testfile); } if FORBIDDEN_REVISION_NAMES.contains(&revision) { panic!( "revision name `{revision}` is not permitted: `{}` in line `{}`: {}", - revision, - raw, - testfile.display() + revision, raw, testfile ); } @@ -995,8 +993,7 @@ impl Config { "revision name `{revision}` is not permitted in a test suite that uses \ `FileCheck` annotations as it is confusing when used as custom `FileCheck` \ prefix: `{revision}` in line `{}`: {}", - raw, - testfile.display() + raw, testfile ); } @@ -1016,11 +1013,11 @@ impl Config { (name.to_owned(), value.to_owned()) } - fn parse_pp_exact(&self, line: &str, testfile: &Path) -> Option { + fn parse_pp_exact(&self, line: &str, testfile: &Utf8Path) -> Option { if let Some(s) = self.parse_name_value_directive(line, "pp-exact") { - Some(PathBuf::from(&s)) + Some(Utf8PathBuf::from(&s)) } else if self.parse_name_directive(line, "pp-exact") { - testfile.file_name().map(PathBuf::from) + testfile.file_name().map(Utf8PathBuf::from) } else { None } @@ -1126,20 +1123,19 @@ fn expand_variables(mut value: String, config: &Config) -> String { if value.contains(CWD) { let cwd = env::current_dir().unwrap(); - value = value.replace(CWD, &cwd.to_string_lossy()); + value = value.replace(CWD, &cwd.to_str().unwrap()); } if value.contains(SRC_BASE) { - value = value.replace(SRC_BASE, &config.src_test_suite_root.to_str().unwrap()); + value = value.replace(SRC_BASE, &config.src_test_suite_root.as_str()); } if value.contains(TEST_SUITE_BUILD_BASE) { - value = - value.replace(TEST_SUITE_BUILD_BASE, &config.build_test_suite_root.to_str().unwrap()); + value = value.replace(TEST_SUITE_BUILD_BASE, &config.build_test_suite_root.as_str()); } if value.contains(SYSROOT_BASE) { - value = value.replace(SYSROOT_BASE, &config.sysroot_base.to_str().unwrap()); + value = value.replace(SYSROOT_BASE, &config.sysroot_base.as_str()); } if value.contains(TARGET_LINKER) { @@ -1152,9 +1148,9 @@ fn expand_variables(mut value: String, config: &Config) -> String { if value.contains(RUST_SRC_BASE) { let src_base = config.sysroot_base.join("lib/rustlib/src/rust"); - src_base.try_exists().expect(&*format!("{} should exists", src_base.display())); - let src_base = src_base.read_link().unwrap_or(src_base); - value = value.replace(RUST_SRC_BASE, &src_base.to_string_lossy()); + src_base.try_exists().expect(&*format!("{} should exists", src_base)); + let src_base = src_base.read_link_utf8().unwrap_or(src_base); + value = value.replace(RUST_SRC_BASE, &src_base.as_str()); } value @@ -1257,14 +1253,14 @@ pub fn llvm_has_libzstd(config: &Config) -> bool { // contains a path to that static lib, and that it exists. // // See compiler/rustc_llvm/build.rs for more details and similar expectations. - fn is_zstd_in_config(llvm_bin_dir: &Path) -> Option<()> { + fn is_zstd_in_config(llvm_bin_dir: &Utf8Path) -> Option<()> { let llvm_config_path = llvm_bin_dir.join("llvm-config"); let output = Command::new(llvm_config_path).arg("--system-libs").output().ok()?; assert!(output.status.success(), "running llvm-config --system-libs failed"); let libs = String::from_utf8(output.stdout).ok()?; for lib in libs.split_whitespace() { - if lib.ends_with("libzstd.a") && Path::new(lib).exists() { + if lib.ends_with("libzstd.a") && Utf8Path::new(lib).exists() { return Some(()); } } @@ -1282,7 +1278,7 @@ pub fn llvm_has_libzstd(config: &Config) -> bool { // `lld` supports it. If not, an error will be emitted: "LLVM was not built with // LLVM_ENABLE_ZSTD or did not find zstd at build time". #[cfg(unix)] - fn is_lld_built_with_zstd(llvm_bin_dir: &Path) -> Option<()> { + fn is_lld_built_with_zstd(llvm_bin_dir: &Utf8Path) -> Option<()> { let lld_path = llvm_bin_dir.join("lld"); if lld_path.exists() { // We can't call `lld` as-is, it expects to be invoked by a compiler driver using a @@ -1318,7 +1314,7 @@ pub fn llvm_has_libzstd(config: &Config) -> bool { } #[cfg(not(unix))] - fn is_lld_built_with_zstd(_llvm_bin_dir: &Path) -> Option<()> { + fn is_lld_built_with_zstd(_llvm_bin_dir: &Utf8Path) -> Option<()> { None } @@ -1385,7 +1381,7 @@ pub(crate) fn make_test_description( config: &Config, cache: &HeadersCache, name: String, - path: &Path, + path: &Utf8Path, src: R, test_revision: Option<&str>, poisoned: &mut bool, @@ -1416,7 +1412,7 @@ pub(crate) fn make_test_description( ignore_message = Some(reason.into()); } IgnoreDecision::Error { message } => { - eprintln!("error: {}:{line_number}: {message}", path.display()); + eprintln!("error: {}:{line_number}: {message}", path); *poisoned = true; return; } @@ -1446,7 +1442,7 @@ pub(crate) fn make_test_description( ); if local_poisoned { - eprintln!("errors encountered when trying to make test description: {}", path.display()); + eprintln!("errors encountered when trying to make test description: {}", path); panic!("errors encountered when trying to make test description"); } @@ -1555,7 +1551,7 @@ fn ignore_lldb(config: &Config, line: &str) -> IgnoreDecision { IgnoreDecision::Continue } -fn ignore_llvm(config: &Config, path: &Path, line: &str) -> IgnoreDecision { +fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { if let Some(needed_components) = config.parse_name_value_directive(line, "needs-llvm-components") { @@ -1567,8 +1563,7 @@ fn ignore_llvm(config: &Config, path: &Path, line: &str) -> IgnoreDecision { if env::var_os("COMPILETEST_REQUIRE_ALL_LLVM_COMPONENTS").is_some() { panic!( "missing LLVM component {}, and COMPILETEST_REQUIRE_ALL_LLVM_COMPONENTS is set: {}", - missing_component, - path.display() + missing_component, path ); } return IgnoreDecision::Ignore { diff --git a/src/tools/compiletest/src/header/tests.rs b/src/tools/compiletest/src/header/tests.rs index f3461f3c244f3..3a8c3748de99f 100644 --- a/src/tools/compiletest/src/header/tests.rs +++ b/src/tools/compiletest/src/header/tests.rs @@ -1,6 +1,6 @@ use std::io::Read; -use std::path::Path; +use camino::Utf8Path; use semver::Version; use super::{ @@ -13,7 +13,7 @@ use crate::executor::{CollectedTestDesc, ShouldPanic}; fn make_test_description( config: &Config, name: String, - path: &Path, + path: &Utf8Path, src: R, revision: Option<&str>, ) -> CollectedTestDesc { @@ -230,12 +230,12 @@ fn cfg() -> ConfigBuilder { fn parse_rs(config: &Config, contents: &str) -> EarlyProps { let bytes = contents.as_bytes(); - EarlyProps::from_reader(config, Path::new("a.rs"), bytes) + EarlyProps::from_reader(config, Utf8Path::new("a.rs"), bytes) } fn check_ignore(config: &Config, contents: &str) -> bool { let tn = String::new(); - let p = Path::new("a.rs"); + let p = Utf8Path::new("a.rs"); let d = make_test_description(&config, tn, p, std::io::Cursor::new(contents), None); d.ignore } @@ -244,7 +244,7 @@ fn check_ignore(config: &Config, contents: &str) -> bool { fn should_fail() { let config: Config = cfg().build(); let tn = String::new(); - let p = Path::new("a.rs"); + let p = Utf8Path::new("a.rs"); let d = make_test_description(&config, tn.clone(), p, std::io::Cursor::new(""), None); assert_eq!(d.should_panic, ShouldPanic::No); @@ -784,7 +784,7 @@ fn threads_support() { } } -fn run_path(poisoned: &mut bool, path: &Path, buf: &[u8]) { +fn run_path(poisoned: &mut bool, path: &Utf8Path, buf: &[u8]) { let rdr = std::io::Cursor::new(&buf); iter_header(Mode::Ui, "ui", poisoned, path, rdr, &mut |_| {}); } @@ -794,7 +794,7 @@ fn test_unknown_directive_check() { let mut poisoned = false; run_path( &mut poisoned, - Path::new("a.rs"), + Utf8Path::new("a.rs"), include_bytes!("./test-auxillary/unknown_directive.rs"), ); assert!(poisoned); @@ -805,7 +805,7 @@ fn test_known_directive_check_no_error() { let mut poisoned = false; run_path( &mut poisoned, - Path::new("a.rs"), + Utf8Path::new("a.rs"), include_bytes!("./test-auxillary/known_directive.rs"), ); assert!(!poisoned); @@ -816,7 +816,7 @@ fn test_error_annotation_no_error() { let mut poisoned = false; run_path( &mut poisoned, - Path::new("a.rs"), + Utf8Path::new("a.rs"), include_bytes!("./test-auxillary/error_annotation.rs"), ); assert!(!poisoned); @@ -827,7 +827,7 @@ fn test_non_rs_unknown_directive_not_checked() { let mut poisoned = false; run_path( &mut poisoned, - Path::new("a.Makefile"), + Utf8Path::new("a.Makefile"), include_bytes!("./test-auxillary/not_rs.Makefile"), ); assert!(!poisoned); @@ -836,21 +836,21 @@ fn test_non_rs_unknown_directive_not_checked() { #[test] fn test_trailing_directive() { let mut poisoned = false; - run_path(&mut poisoned, Path::new("a.rs"), b"//@ only-x86 only-arm"); + run_path(&mut poisoned, Utf8Path::new("a.rs"), b"//@ only-x86 only-arm"); assert!(poisoned); } #[test] fn test_trailing_directive_with_comment() { let mut poisoned = false; - run_path(&mut poisoned, Path::new("a.rs"), b"//@ only-x86 only-arm with comment"); + run_path(&mut poisoned, Utf8Path::new("a.rs"), b"//@ only-x86 only-arm with comment"); assert!(poisoned); } #[test] fn test_not_trailing_directive() { let mut poisoned = false; - run_path(&mut poisoned, Path::new("a.rs"), b"//@ revisions: incremental"); + run_path(&mut poisoned, Utf8Path::new("a.rs"), b"//@ revisions: incremental"); assert!(!poisoned); } diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs index 720663b30ef42..b969b22750bc0 100644 --- a/src/tools/compiletest/src/lib.rs +++ b/src/tools/compiletest/src/lib.rs @@ -22,16 +22,15 @@ pub mod util; use core::panic; use std::collections::HashSet; -use std::ffi::OsString; use std::fmt::Write; use std::io::{self, ErrorKind}; -use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::{Arc, OnceLock}; use std::time::SystemTime; use std::{env, fs, vec}; use build_helper::git::{get_git_modified_files, get_git_untracked_files}; +use camino::{Utf8Path, Utf8PathBuf}; use getopts::Options; use tracing::*; use walkdir::WalkDir; @@ -230,15 +229,19 @@ pub fn parse_config(args: Vec) -> Config { panic!() } - fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf { - match m.opt_str(nm) { - Some(s) => PathBuf::from(&s), - None => panic!("no option (=path) found for {}", nm), + fn make_absolute(path: Utf8PathBuf) -> Utf8PathBuf { + if path.is_relative() { + Utf8PathBuf::try_from(env::current_dir().unwrap()).unwrap().join(path) + } else { + path } } - fn make_absolute(path: PathBuf) -> PathBuf { - if path.is_relative() { env::current_dir().unwrap().join(path) } else { path } + fn opt_path(m: &getopts::Matches, nm: &str) -> Utf8PathBuf { + match m.opt_str(nm) { + Some(s) => Utf8PathBuf::from(&s), + None => panic!("no option (=path) found for {}", nm), + } } let target = opt_str2(matches.opt_str("target")); @@ -279,12 +282,12 @@ pub fn parse_config(args: Vec) -> Config { .free .iter() .map(|f| { - let path = Path::new(f); + let path = Utf8Path::new(f); let mut iter = path.iter().skip(1); // We skip the test folder and check if the user passed `rmake.rs`. if iter.next().is_some_and(|s| s == "rmake.rs") && iter.next().is_none() { - path.parent().unwrap().to_str().unwrap().to_string() + path.parent().unwrap().to_string() } else { f.to_string() } @@ -316,8 +319,8 @@ pub fn parse_config(args: Vec) -> Config { assert!( src_test_suite_root.starts_with(&src_root), "`src-root` must be a parent of `src-test-suite-root`: `src-root`=`{}`, `src-test-suite-root` = `{}`", - src_root.display(), - src_test_suite_root.display() + src_root, + src_test_suite_root ); let build_root = opt_path(matches, "build-root"); @@ -332,16 +335,16 @@ pub fn parse_config(args: Vec) -> Config { compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")), run_lib_path: make_absolute(opt_path(matches, "run-lib-path")), rustc_path: opt_path(matches, "rustc-path"), - cargo_path: matches.opt_str("cargo-path").map(PathBuf::from), - stage0_rustc_path: matches.opt_str("stage0-rustc-path").map(PathBuf::from), - rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from), - coverage_dump_path: matches.opt_str("coverage-dump-path").map(PathBuf::from), + cargo_path: matches.opt_str("cargo-path").map(Utf8PathBuf::from), + stage0_rustc_path: matches.opt_str("stage0-rustc-path").map(Utf8PathBuf::from), + rustdoc_path: matches.opt_str("rustdoc-path").map(Utf8PathBuf::from), + coverage_dump_path: matches.opt_str("coverage-dump-path").map(Utf8PathBuf::from), python: matches.opt_str("python").unwrap(), jsondocck_path: matches.opt_str("jsondocck-path"), jsondoclint_path: matches.opt_str("jsondoclint-path"), run_clang_based_tests_with: matches.opt_str("run-clang-based-tests-with"), - llvm_filecheck: matches.opt_str("llvm-filecheck").map(PathBuf::from), - llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(PathBuf::from), + llvm_filecheck: matches.opt_str("llvm-filecheck").map(Utf8PathBuf::from), + llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(Utf8PathBuf::from), src_root, src_test_suite_root, @@ -407,7 +410,7 @@ pub fn parse_config(args: Vec) -> Config { }, only_modified: matches.opt_present("only-modified"), color, - remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from), + remote_test_client: matches.opt_str("remote-test-client").map(Utf8PathBuf::from), compare_mode, rustfix_coverage: matches.opt_present("rustfix-coverage"), has_html_tidy, @@ -450,19 +453,19 @@ pub fn parse_config(args: Vec) -> Config { pub fn log_config(config: &Config) { let c = config; logv(c, "configuration:".to_string()); - logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path)); - logv(c, format!("run_lib_path: {:?}", config.run_lib_path)); - logv(c, format!("rustc_path: {:?}", config.rustc_path.display())); + logv(c, format!("compile_lib_path: {}", config.compile_lib_path)); + logv(c, format!("run_lib_path: {}", config.run_lib_path)); + logv(c, format!("rustc_path: {}", config.rustc_path)); logv(c, format!("cargo_path: {:?}", config.cargo_path)); logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path)); - logv(c, format!("src_root: {}", config.src_root.display())); - logv(c, format!("src_test_suite_root: {}", config.src_test_suite_root.display())); + logv(c, format!("src_root: {}", config.src_root)); + logv(c, format!("src_test_suite_root: {}", config.src_test_suite_root)); - logv(c, format!("build_root: {}", config.build_root.display())); - logv(c, format!("build_test_suite_root: {}", config.build_test_suite_root.display())); + logv(c, format!("build_root: {}", config.build_root)); + logv(c, format!("build_test_suite_root: {}", config.build_test_suite_root)); - logv(c, format!("sysroot_base: {}", config.sysroot_base.display())); + logv(c, format!("sysroot_base: {}", config.sysroot_base)); logv(c, format!("stage: {}", config.stage)); logv(c, format!("stage_id: {}", config.stage_id)); @@ -480,16 +483,16 @@ pub fn log_config(config: &Config) { logv(c, format!("target-rustcflags: {:?}", config.target_rustcflags)); logv(c, format!("target: {}", config.target)); logv(c, format!("host: {}", config.host)); - logv(c, format!("android-cross-path: {:?}", config.android_cross_path.display())); - logv(c, format!("adb_path: {:?}", config.adb_path)); - logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir)); + logv(c, format!("android-cross-path: {}", config.android_cross_path)); + logv(c, format!("adb_path: {}", config.adb_path)); + logv(c, format!("adb_test_dir: {}", config.adb_test_dir)); logv(c, format!("adb_device_status: {}", config.adb_device_status)); logv(c, format!("ar: {}", config.ar)); logv(c, format!("target-linker: {:?}", config.target_linker)); logv(c, format!("host-linker: {:?}", config.host_linker)); logv(c, format!("verbose: {}", config.verbose)); logv(c, format!("format: {:?}", config.format)); - logv(c, format!("minicore_path: {:?}", config.minicore_path.display())); + logv(c, format!("minicore_path: {}", config.minicore_path)); logv(c, "\n".to_string()); } @@ -517,7 +520,7 @@ pub fn run_tests(config: Arc) { coverage_file_path.push("rustfix_missing_coverage.txt"); if coverage_file_path.exists() { if let Err(e) = fs::remove_file(&coverage_file_path) { - panic!("Could not delete {} due to {}", coverage_file_path.display(), e) + panic!("Could not delete {} due to {}", coverage_file_path, e) } } } @@ -619,13 +622,13 @@ struct TestCollectorCx { config: Arc, cache: HeadersCache, common_inputs_stamp: Stamp, - modified_tests: Vec, + modified_tests: Vec, } /// Mutable state used during test collection. struct TestCollector { tests: Vec, - found_path_stems: HashSet, + found_path_stems: HashSet, poisoned: bool, } @@ -635,14 +638,13 @@ struct TestCollector { /// regardless of whether any filters/tests were specified on the command-line, /// because filtering is handled later by libtest. pub(crate) fn collect_and_make_tests(config: Arc) -> Vec { - debug!("making tests from {}", config.src_test_suite_root.display()); + debug!("making tests from {}", config.src_test_suite_root); let common_inputs_stamp = common_inputs_stamp(&config); let modified_tests = modified_tests(&config, &config.src_test_suite_root).unwrap_or_else(|err| { panic!( "modified_tests got error from dir: {}, error: {}", - config.src_test_suite_root.display(), - err + config.src_test_suite_root, err ) }); let cache = HeadersCache::load(&config); @@ -651,12 +653,9 @@ pub(crate) fn collect_and_make_tests(config: Arc) -> Vec let mut collector = TestCollector { tests: vec![], found_path_stems: HashSet::new(), poisoned: false }; - collect_tests_from_dir(&cx, &mut collector, &cx.config.src_test_suite_root, Path::new("")) + collect_tests_from_dir(&cx, &mut collector, &cx.config.src_test_suite_root, Utf8Path::new("")) .unwrap_or_else(|reason| { - panic!( - "Could not read tests from {}: {reason}", - cx.config.src_test_suite_root.display() - ) + panic!("Could not read tests from {}: {reason}", cx.config.src_test_suite_root) }); let TestCollector { tests, found_path_stems, poisoned } = collector; @@ -725,24 +724,29 @@ fn common_inputs_stamp(config: &Config) -> Stamp { /// the `--only-modified` flag is in use. /// /// (Might be inaccurate in some cases.) -fn modified_tests(config: &Config, dir: &Path) -> Result, String> { +fn modified_tests(config: &Config, dir: &Utf8Path) -> Result, String> { // If `--only-modified` wasn't passed, the list of modified tests won't be // used for anything, so avoid some work and just return an empty list. if !config.only_modified { return Ok(vec![]); } - let files = - get_git_modified_files(&config.git_config(), Some(dir), &vec!["rs", "stderr", "fixed"])?; + let files = get_git_modified_files( + &config.git_config(), + Some(dir.as_std_path()), + &vec!["rs", "stderr", "fixed"], + )?; // Add new test cases to the list, it will be convenient in daily development. let untracked_files = get_git_untracked_files(&config.git_config(), None)?.unwrap_or(vec![]); let all_paths = [&files[..], &untracked_files[..]].concat(); let full_paths = { - let mut full_paths: Vec = all_paths + let mut full_paths: Vec = all_paths .into_iter() - .map(|f| PathBuf::from(f).with_extension("").with_extension("rs")) - .filter_map(|f| if Path::new(&f).exists() { f.canonicalize().ok() } else { None }) + .map(|f| Utf8PathBuf::from(f).with_extension("").with_extension("rs")) + .filter_map( + |f| if Utf8Path::new(&f).exists() { f.canonicalize_utf8().ok() } else { None }, + ) .collect(); full_paths.dedup(); full_paths.sort_unstable(); @@ -756,8 +760,8 @@ fn modified_tests(config: &Config, dir: &Path) -> Result, String> { fn collect_tests_from_dir( cx: &TestCollectorCx, collector: &mut TestCollector, - dir: &Path, - relative_dir_path: &Path, + dir: &Utf8Path, + relative_dir_path: &Utf8Path, ) -> io::Result<()> { // Ignore directories that contain a file named `compiletest-ignore-dir`. if dir.join("compiletest-ignore-dir").exists() { @@ -790,16 +794,16 @@ fn collect_tests_from_dir( // subdirectories we find, except for `auxiliary` directories. // FIXME: this walks full tests tree, even if we have something to ignore // use walkdir/ignore like in tidy? - for file in fs::read_dir(dir)? { + for file in fs::read_dir(dir.as_std_path())? { let file = file?; - let file_path = file.path(); - let file_name = file.file_name(); + let file_path = Utf8PathBuf::try_from(file.path()).unwrap(); + let file_name = file_path.file_name().unwrap(); - if is_test(&file_name) + if is_test(file_name) && (!cx.config.only_modified || cx.modified_tests.contains(&file_path)) { // We found a test file, so create the corresponding libtest structures. - debug!("found test file: {:?}", file_path.display()); + debug!(%file_path, "found test file"); // Record the stem of the test file, to check for overlaps later. let rel_test_path = relative_dir_path.join(file_path.file_stem().unwrap()); @@ -810,22 +814,20 @@ fn collect_tests_from_dir( make_test(cx, collector, &paths); } else if file_path.is_dir() { // Recurse to find more tests in a subdirectory. - let relative_file_path = relative_dir_path.join(file.file_name()); - if &file_name != "auxiliary" { - debug!("found directory: {:?}", file_path.display()); + let relative_file_path = relative_dir_path.join(file_name); + if file_name != "auxiliary" { + debug!(%file_path, "found directory"); collect_tests_from_dir(cx, collector, &file_path, &relative_file_path)?; } } else { - debug!("found other file/directory: {:?}", file_path.display()); + debug!(%file_path, "found other file/directory"); } } Ok(()) } /// Returns true if `file_name` looks like a proper test file name. -pub fn is_test(file_name: &OsString) -> bool { - let file_name = file_name.to_str().unwrap(); - +pub fn is_test(file_name: &str) -> bool { if !file_name.ends_with(".rs") { return false; } @@ -844,7 +846,7 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te let test_path = if cx.config.mode == Mode::RunMake { testpaths.file.join("rmake.rs") } else { - PathBuf::from(&testpaths.file) + testpaths.file.clone() }; // Scan the test file to discover its revisions, if any. @@ -899,7 +901,7 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te /// The path of the `stamp` file that gets created or updated whenever a /// particular test completes successfully. -fn stamp_file_path(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf { +fn stamp_file_path(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> Utf8PathBuf { output_base_dir(config, testpaths, revision).join("stamp") } @@ -912,7 +914,7 @@ fn files_related_to_test( testpaths: &TestPaths, props: &EarlyProps, revision: Option<&str>, -) -> Vec { +) -> Vec { let mut related = vec![]; if testpaths.file.is_dir() { @@ -920,7 +922,7 @@ fn files_related_to_test( for entry in WalkDir::new(&testpaths.file) { let path = entry.unwrap().into_path(); if path.is_file() { - related.push(path); + related.push(Utf8PathBuf::try_from(path).unwrap()); } } } else { @@ -991,7 +993,7 @@ struct Stamp { impl Stamp { /// Creates a timestamp holding the last-modified time of the specified file. - fn from_path(path: &Path) -> Self { + fn from_path(path: &Utf8Path) -> Self { let mut stamp = Stamp { time: SystemTime::UNIX_EPOCH }; stamp.add_path(path); stamp @@ -999,8 +1001,8 @@ impl Stamp { /// Updates this timestamp to the last-modified time of the specified file, /// if it is later than the currently-stored timestamp. - fn add_path(&mut self, path: &Path) { - let modified = fs::metadata(path) + fn add_path(&mut self, path: &Utf8Path) { + let modified = fs::metadata(path.as_std_path()) .and_then(|metadata| metadata.modified()) .unwrap_or(SystemTime::UNIX_EPOCH); self.time = self.time.max(modified); @@ -1009,7 +1011,8 @@ impl Stamp { /// Updates this timestamp to the most recent last-modified time of all files /// recursively contained in the given directory, if it is later than the /// currently-stored timestamp. - fn add_dir(&mut self, path: &Path) { + fn add_dir(&mut self, path: &Utf8Path) { + let path = path.as_std_path(); for entry in WalkDir::new(path) { let entry = entry.unwrap(); if entry.file_type().is_file() { @@ -1042,7 +1045,7 @@ fn make_test_name(config: &Config, testpaths: &TestPaths, revision: Option<&str> config.mode, debugger, mode_suffix, - path.display(), + path, revision.map_or("".to_string(), |rev| format!("#{}", rev)) ) } @@ -1064,7 +1067,7 @@ fn make_test_name(config: &Config, testpaths: &TestPaths, revision: Option<&str> /// To avoid problems, we forbid test names from overlapping in this way. /// /// See for more context. -fn check_for_overlapping_test_paths(found_path_stems: &HashSet) { +fn check_for_overlapping_test_paths(found_path_stems: &HashSet) { let mut collisions = Vec::new(); for path in found_path_stems { for ancestor in path.ancestors().skip(1) { @@ -1077,7 +1080,7 @@ fn check_for_overlapping_test_paths(found_path_stems: &HashSet) { collisions.sort(); let collisions: String = collisions .into_iter() - .map(|(path, check_parent)| format!("test {path:?} clashes with {check_parent:?}\n")) + .map(|(path, check_parent)| format!("test {path} clashes with {check_parent}\n")) .collect(); panic!( "{collisions}\n\ diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index 208d32833c9cf..722ba66e95416 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -1,15 +1,15 @@ use std::borrow::Cow; use std::collections::{HashMap, HashSet}; -use std::ffi::{OsStr, OsString}; +use std::ffi::OsString; use std::fs::{self, File, create_dir_all}; use std::hash::{DefaultHasher, Hash, Hasher}; use std::io::prelude::*; use std::io::{self, BufReader}; -use std::path::{Path, PathBuf}; use std::process::{Child, Command, ExitStatus, Output, Stdio}; use std::sync::Arc; use std::{env, iter, str}; +use camino::{Utf8Path, Utf8PathBuf}; use colored::Colorize; use regex::{Captures, Regex}; use tracing::*; @@ -25,7 +25,7 @@ use crate::compute_diff::{DiffLine, make_diff, write_diff, write_filtered_diff}; use crate::errors::{self, Error, ErrorKind}; use crate::header::TestProps; use crate::read2::{Truncated, read2_abbreviated}; -use crate::util::{PathBufExt, add_dylib_path, logv, static_regex}; +use crate::util::{Utf8PathBufExt, add_dylib_path, logv, static_regex}; use crate::{ColorConfig, json, stamp_file_path}; mod debugger; @@ -131,7 +131,7 @@ pub fn run(config: Arc, testpaths: &TestPaths, revision: Option<&str>) { // We're going to be dumping a lot of info. Start on a new line. print!("\n\n"); } - debug!("running {:?}", testpaths.file.display()); + debug!("running {}", testpaths.file); let mut props = TestProps::from_file(&testpaths.file, revision, &config); // For non-incremental (i.e. regular UI) tests, the incremental directory @@ -144,7 +144,7 @@ pub fn run(config: Arc, testpaths: &TestPaths, revision: Option<&str>) { let cx = TestCx { config: &config, props: &props, testpaths, revision }; if let Err(e) = create_dir_all(&cx.output_base_dir()) { - panic!("failed to create output base directory {}: {e}", cx.output_base_dir().display()); + panic!("failed to create output base directory {}: {e}", cx.output_base_dir()); } if props.incremental { @@ -207,7 +207,8 @@ pub fn compute_stamp_hash(config: &Config) -> String { format!("{:x}", hash.finish()) } -fn remove_and_create_dir_all(path: &Path) { +fn remove_and_create_dir_all(path: &Utf8Path) { + let path = path.as_std_path(); let _ = fs::remove_dir_all(path); fs::create_dir_all(path).unwrap(); } @@ -423,7 +424,7 @@ impl<'test> TestCx<'test> { let aux_dir = self.aux_output_dir_name(); let input: &str = match read_from { ReadFrom::Stdin(_) => "-", - ReadFrom::Path => self.testpaths.file.to_str().unwrap(), + ReadFrom::Path => self.testpaths.file.as_str(), }; let mut rustc = Command::new(&self.config.rustc_path); @@ -590,10 +591,7 @@ impl<'test> TestCx<'test> { // FIXME(#65865) return; } else { - self.fatal(&format!( - "no error pattern specified in {:?}", - self.testpaths.file.display() - )); + self.fatal(&format!("no error pattern specified in {}", self.testpaths.file)); } } @@ -697,17 +695,17 @@ impl<'test> TestCx<'test> { } // On Windows, translate all '\' path separators to '/' - let file_name = format!("{}", self.testpaths.file.display()).replace(r"\", "/"); + let file_name = self.testpaths.file.to_string().replace(r"\", "/"); // On Windows, keep all '\' path separators to match the paths reported in the JSON output // from the compiler let diagnostic_file_name = if self.props.remap_src_base { - let mut p = PathBuf::from(FAKE_SRC_BASE); + let mut p = Utf8PathBuf::from(FAKE_SRC_BASE); p.push(&self.testpaths.relative_dir); p.push(self.testpaths.file.file_name().unwrap()); - p.display().to_string() + p.to_string() } else { - self.testpaths.file.display().to_string() + self.testpaths.file.to_string() }; let expect_help = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Help)); @@ -887,7 +885,7 @@ impl<'test> TestCx<'test> { /// `root_out_dir` and `root_testpaths` refer to the parameters of the actual test being run. /// Auxiliaries, no matter how deep, have the same root_out_dir and root_testpaths. - fn document(&self, root_out_dir: &Path, root_testpaths: &TestPaths) -> ProcRes { + fn document(&self, root_out_dir: &Utf8Path, root_testpaths: &TestPaths) -> ProcRes { if self.props.build_aux_docs { for rel_ab in &self.props.aux.builds { let aux_testpaths = self.compute_aux_test_paths(root_testpaths, rel_ab); @@ -916,13 +914,13 @@ impl<'test> TestCx<'test> { // actual --out-dir given to the auxiliary or test, as opposed to the root out dir for the entire // test - let out_dir: Cow<'_, Path> = if self.props.unique_doc_out_dir { + let out_dir: Cow<'_, Utf8Path> = if self.props.unique_doc_out_dir { let file_name = self.testpaths.file.file_stem().expect("file name should not be empty"); - let out_dir = PathBuf::from_iter([ + let out_dir = Utf8PathBuf::from_iter([ root_out_dir, - Path::new("docs"), - Path::new(file_name), - Path::new("doc"), + Utf8Path::new("docs"), + Utf8Path::new(file_name), + Utf8Path::new("doc"), ]); create_dir_all(&out_dir).unwrap(); Cow::Owned(out_dir) @@ -935,7 +933,7 @@ impl<'test> TestCx<'test> { rustdoc.current_dir(current_dir); rustdoc .arg("-L") - .arg(self.config.run_lib_path.to_str().unwrap()) + .arg(self.config.run_lib_path.as_path()) .arg("-L") .arg(aux_dir) .arg("-o") @@ -1073,7 +1071,7 @@ impl<'test> TestCx<'test> { let test_ab = of.file.parent().expect("test file path has no parent").join("auxiliary").join(rel_ab); if !test_ab.exists() { - self.fatal(&format!("aux-build `{}` source not found", test_ab.display())) + self.fatal(&format!("aux-build `{}` source not found", test_ab)) } TestPaths { @@ -1110,7 +1108,7 @@ impl<'test> TestCx<'test> { || !self.props.aux.proc_macros.is_empty() } - fn aux_output_dir(&self) -> PathBuf { + fn aux_output_dir(&self) -> Utf8PathBuf { let aux_dir = self.aux_output_dir_name(); if !self.props.aux.builds.is_empty() { @@ -1126,7 +1124,7 @@ impl<'test> TestCx<'test> { aux_dir } - fn build_all_auxiliary(&self, of: &TestPaths, aux_dir: &Path, rustc: &mut Command) { + fn build_all_auxiliary(&self, of: &TestPaths, aux_dir: &Utf8Path, rustc: &mut Command) { for rel_ab in &self.props.aux.builds { self.build_auxiliary(of, rel_ab, &aux_dir, None); } @@ -1146,12 +1144,7 @@ impl<'test> TestCx<'test> { |rustc: &mut Command, aux_name: &str, aux_path: &str, aux_type: AuxType| { let lib_name = get_lib_name(&path_to_crate_name(aux_path), aux_type); if let Some(lib_name) = lib_name { - rustc.arg("--extern").arg(format!( - "{}={}/{}", - aux_name, - aux_dir.display(), - lib_name - )); + rustc.arg("--extern").arg(format!("{}={}/{}", aux_name, aux_dir, lib_name)); } }; @@ -1172,7 +1165,7 @@ impl<'test> TestCx<'test> { let aux_type = self.build_auxiliary(of, aux_file, aux_dir, None); if let Some(lib_name) = get_lib_name(aux_file.trim_end_matches(".rs"), aux_type) { let lib_path = aux_dir.join(&lib_name); - rustc.arg(format!("-Zcodegen-backend={}", lib_path.display())); + rustc.arg(format!("-Zcodegen-backend={}", lib_path)); } } } @@ -1188,7 +1181,7 @@ impl<'test> TestCx<'test> { if self.props.add_core_stubs { let minicore_path = self.build_minicore(); rustc.arg("--extern"); - rustc.arg(&format!("minicore={}", minicore_path.to_str().unwrap())); + rustc.arg(&format!("minicore={}", minicore_path)); } let aux_dir = self.aux_output_dir(); @@ -1206,7 +1199,7 @@ impl<'test> TestCx<'test> { /// Builds `minicore`. Returns the path to the minicore rlib within the base test output /// directory. - fn build_minicore(&self) -> PathBuf { + fn build_minicore(&self) -> Utf8PathBuf { let output_file_path = self.output_base_dir().join("libminicore.rlib"); let mut rustc = self.make_compile_args( &self.config.minicore_path, @@ -1223,10 +1216,7 @@ impl<'test> TestCx<'test> { let res = self.compose_and_run(rustc, self.config.compile_lib_path.as_path(), None, None); if !res.status.success() { self.fatal_proc_rec( - &format!( - "auxiliary build of {:?} failed to compile: ", - self.config.minicore_path.display() - ), + &format!("auxiliary build of {} failed to compile: ", self.config.minicore_path), &res, ); } @@ -1241,7 +1231,7 @@ impl<'test> TestCx<'test> { &self, of: &TestPaths, source_path: &str, - aux_dir: &Path, + aux_dir: &Utf8Path, aux_type: Option, ) -> AuxType { let aux_testpaths = self.compute_aux_test_paths(of, source_path); @@ -1338,10 +1328,7 @@ impl<'test> TestCx<'test> { ); if !auxres.status.success() { self.fatal_proc_rec( - &format!( - "auxiliary build of {:?} failed to compile: ", - aux_testpaths.file.display() - ), + &format!("auxiliary build of {} failed to compile: ", aux_testpaths.file), &auxres, ); } @@ -1350,8 +1337,8 @@ impl<'test> TestCx<'test> { fn read2_abbreviated(&self, child: Child) -> (Output, Truncated) { let mut filter_paths_from_len = Vec::new(); - let mut add_path = |path: &Path| { - let path = path.display().to_string(); + let mut add_path = |path: &Utf8Path| { + let path = path.to_string(); let windows = path.replace("\\", "\\\\"); if windows != path { filter_paths_from_len.push(windows); @@ -1373,8 +1360,8 @@ impl<'test> TestCx<'test> { fn compose_and_run( &self, mut command: Command, - lib_path: &Path, - aux_path: Option<&Path>, + lib_path: &Utf8Path, + aux_path: Option<&Utf8Path>, input: Option, ) -> ProcRes { let cmdline = { @@ -1419,9 +1406,9 @@ impl<'test> TestCx<'test> { matches!(self.config.suite.as_str(), "rustdoc-ui" | "rustdoc-js" | "rustdoc-json") } - fn get_mir_dump_dir(&self) -> PathBuf { + fn get_mir_dump_dir(&self) -> Utf8PathBuf { let mut mir_dump_dir = self.config.build_test_suite_root.clone(); - debug!("input_file: {:?}", self.testpaths.file); + debug!("input_file: {}", self.testpaths.file); mir_dump_dir.push(&self.testpaths.relative_dir); mir_dump_dir.push(self.testpaths.file.file_stem().unwrap()); mir_dump_dir @@ -1429,7 +1416,7 @@ impl<'test> TestCx<'test> { fn make_compile_args( &self, - input_file: &Path, + input_file: &Utf8Path, output_file: TargetLocation, emit: Emit, allow_unused: AllowUnused, @@ -1470,7 +1457,7 @@ impl<'test> TestCx<'test> { // Similarly, vendored sources shouldn't be shown when running from a dist tarball. rustc.arg("-Z").arg(format!( "ignore-directory-in-diagnostics-source-blocks={}", - self.config.src_root.join("vendor").to_str().unwrap(), + self.config.src_root.join("vendor"), )); // Optionally prevent default --sysroot if specified in test compile-flags. @@ -1494,7 +1481,7 @@ impl<'test> TestCx<'test> { if !is_rustdoc { if let Some(ref incremental_dir) = self.props.incremental_dir { - rustc.args(&["-C", &format!("incremental={}", incremental_dir.display())]); + rustc.args(&["-C", &format!("incremental={}", incremental_dir)]); rustc.args(&["-Z", "incremental-verify-ich"]); } @@ -1538,7 +1525,7 @@ impl<'test> TestCx<'test> { let mir_dump_dir = self.get_mir_dump_dir(); remove_and_create_dir_all(&mir_dump_dir); let mut dir_opt = "-Zdump-mir-dir=".to_string(); - dir_opt.push_str(mir_dump_dir.to_str().unwrap()); + dir_opt.push_str(mir_dump_dir.as_str()); debug!("dir_opt: {:?}", dir_opt); rustc.arg(dir_opt); }; @@ -1631,8 +1618,7 @@ impl<'test> TestCx<'test> { if self.props.remap_src_base { rustc.arg(format!( "--remap-path-prefix={}={}", - self.config.src_test_suite_root.to_str().unwrap(), - FAKE_SRC_BASE, + self.config.src_test_suite_root, FAKE_SRC_BASE, )); } @@ -1755,7 +1741,7 @@ impl<'test> TestCx<'test> { rustc } - fn make_exe_name(&self) -> PathBuf { + fn make_exe_name(&self) -> Utf8PathBuf { // Using a single letter here to keep the path length down for // Windows. Some test names get very long. rustc creates `rcgu` // files with the module name appended to it which can more than @@ -1806,7 +1792,7 @@ impl<'test> TestCx<'test> { } } - fn make_cmdline(&self, command: &Command, libpath: &Path) -> String { + fn make_cmdline(&self, command: &Command, libpath: &Utf8Path) -> String { use crate::util; // Linux and mac don't require adjusting the library search path @@ -1819,7 +1805,7 @@ impl<'test> TestCx<'test> { format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path)) } - format!("{} {:?}", lib_path_cmd_prefix(libpath.to_str().unwrap()), command) + format!("{} {:?}", lib_path_cmd_prefix(libpath.as_str()), command) } } @@ -1833,20 +1819,19 @@ impl<'test> TestCx<'test> { return; } - let path = Path::new(proc_name); + let path = Utf8Path::new(proc_name); let proc_name = if path.file_stem().is_some_and(|p| p == "rmake") { - OsString::from_iter( + String::from_iter( path.parent() .unwrap() .file_name() .into_iter() - .chain(Some(OsStr::new("/"))) + .chain(Some("/")) .chain(path.file_name()), ) } else { path.file_name().unwrap().into() }; - let proc_name = proc_name.to_string_lossy(); println!("------{proc_name} stdout------------------------------"); println!("{}", out); println!("------{proc_name} stderr------------------------------"); @@ -1856,18 +1841,18 @@ impl<'test> TestCx<'test> { fn dump_output_file(&self, out: &str, extension: &str) { let outfile = self.make_out_name(extension); - fs::write(&outfile, out).unwrap(); + fs::write(outfile.as_std_path(), out).unwrap(); } /// Creates a filename for output with the given extension. /// E.g., `/.../testname.revision.mode/testname.extension`. - fn make_out_name(&self, extension: &str) -> PathBuf { + fn make_out_name(&self, extension: &str) -> Utf8PathBuf { self.output_base_name().with_extension(extension) } /// Gets the directory where auxiliary files are written. /// E.g., `/.../testname.revision.mode/auxiliary/`. - fn aux_output_dir_name(&self) -> PathBuf { + fn aux_output_dir_name(&self) -> Utf8PathBuf { self.output_base_dir() .join("auxiliary") .with_extra_extension(self.config.mode.aux_dir_disambiguator()) @@ -1875,12 +1860,12 @@ impl<'test> TestCx<'test> { /// Gets the directory where auxiliary binaries are written. /// E.g., `/.../testname.revision.mode/auxiliary/bin`. - fn aux_bin_output_dir_name(&self) -> PathBuf { + fn aux_bin_output_dir_name(&self) -> Utf8PathBuf { self.aux_output_dir_name().join("bin") } /// Generates a unique name for the test, such as `testname.revision.mode`. - fn output_testname_unique(&self) -> PathBuf { + fn output_testname_unique(&self) -> Utf8PathBuf { output_testname_unique(self.config, self.testpaths, self.safe_revision()) } @@ -1893,14 +1878,14 @@ impl<'test> TestCx<'test> { /// Gets the absolute path to the directory where all output for the given /// test/revision should reside. /// E.g., `/path/to/build/host-tuple/test/ui/relative/testname.revision.mode/`. - fn output_base_dir(&self) -> PathBuf { + fn output_base_dir(&self) -> Utf8PathBuf { output_base_dir(self.config, self.testpaths, self.safe_revision()) } /// Gets the absolute path to the base filename used as output for the given /// test/revision. /// E.g., `/.../relative/testname.revision.mode/testname`. - fn output_base_name(&self) -> PathBuf { + fn output_base_name(&self) -> Utf8PathBuf { output_base_name(self.config, self.testpaths, self.safe_revision()) } @@ -1935,7 +1920,7 @@ impl<'test> TestCx<'test> { // codegen tests (using FileCheck) - fn compile_test_and_save_ir(&self) -> (ProcRes, PathBuf) { + fn compile_test_and_save_ir(&self) -> (ProcRes, Utf8PathBuf) { let output_path = self.output_base_name().with_extension("ll"); let input_file = &self.testpaths.file; let rustc = self.make_compile_args( @@ -1951,7 +1936,7 @@ impl<'test> TestCx<'test> { (proc_res, output_path) } - fn verify_with_filecheck(&self, output: &Path) -> ProcRes { + fn verify_with_filecheck(&self, output: &Utf8Path) -> ProcRes { let mut filecheck = Command::new(self.config.llvm_filecheck.as_ref().unwrap()); filecheck.arg("--input-file").arg(output).arg(&self.testpaths.file); @@ -1981,7 +1966,7 @@ impl<'test> TestCx<'test> { filecheck.args(&self.props.filecheck_flags); // FIXME(jieyouxu): don't pass an empty Path - self.compose_and_run(filecheck, Path::new(""), None, None) + self.compose_and_run(filecheck, Utf8Path::new(""), None, None) } fn charset() -> &'static str { @@ -1989,7 +1974,7 @@ impl<'test> TestCx<'test> { if cfg!(target_os = "freebsd") { "ISO-8859-1" } else { "UTF-8" } } - fn compare_to_default_rustdoc(&mut self, out_dir: &Path) { + fn compare_to_default_rustdoc(&mut self, out_dir: &Utf8Path) { if !self.config.has_html_tidy { return; } @@ -2141,12 +2126,8 @@ impl<'test> TestCx<'test> { }; } - fn get_lines>( - &self, - path: &P, - mut other_files: Option<&mut Vec>, - ) -> Vec { - let content = fs::read_to_string(&path).unwrap(); + fn get_lines(&self, path: &Utf8Path, mut other_files: Option<&mut Vec>) -> Vec { + let content = fs::read_to_string(path.as_std_path()).unwrap(); let mut ignore = false; content .lines() @@ -2192,8 +2173,8 @@ impl<'test> TestCx<'test> { for other_file in other_files { let mut path = self.testpaths.file.clone(); path.set_file_name(&format!("{}.rs", other_file)); - let path = fs::canonicalize(path).expect("failed to canonicalize"); - let normalized = path.to_str().unwrap().replace('\\', "/"); + let path = path.canonicalize_utf8().expect("failed to canonicalize"); + let normalized = path.as_str().replace('\\', "/"); files.insert(normalized, self.get_lines(&path, None)); } @@ -2377,26 +2358,24 @@ impl<'test> TestCx<'test> { let mut normalized = output.to_string(); - let mut normalize_path = |from: &Path, to: &str| { - let mut from = from.display().to_string(); - if json { - from = from.replace("\\", "\\\\"); - } - normalized = normalized.replace(&from, to); + let mut normalize_path = |from: &Utf8Path, to: &str| { + let from = if json { &from.as_str().replace("\\", "\\\\") } else { from.as_str() }; + + normalized = normalized.replace(from, to); }; let parent_dir = self.testpaths.file.parent().unwrap(); normalize_path(parent_dir, "$DIR"); if self.props.remap_src_base { - let mut remapped_parent_dir = PathBuf::from(FAKE_SRC_BASE); - if self.testpaths.relative_dir != Path::new("") { + let mut remapped_parent_dir = Utf8PathBuf::from(FAKE_SRC_BASE); + if self.testpaths.relative_dir != Utf8Path::new("") { remapped_parent_dir.push(&self.testpaths.relative_dir); } normalize_path(&remapped_parent_dir, "$DIR"); } - let base_dir = Path::new("/rustc/FAKE_PREFIX"); + let base_dir = Utf8Path::new("/rustc/FAKE_PREFIX"); // Fake paths into the libstd/libcore normalize_path(&base_dir.join("library"), "$SRC_DIR"); // `ui-fulldeps` tests can show paths to the compiler source when testing macros from @@ -2406,8 +2385,8 @@ impl<'test> TestCx<'test> { // Real paths into the libstd/libcore let rust_src_dir = &self.config.sysroot_base.join("lib/rustlib/src/rust"); - rust_src_dir.try_exists().expect(&*format!("{} should exists", rust_src_dir.display())); - let rust_src_dir = rust_src_dir.read_link().unwrap_or(rust_src_dir.to_path_buf()); + rust_src_dir.try_exists().expect(&*format!("{} should exists", rust_src_dir)); + let rust_src_dir = rust_src_dir.read_link_utf8().unwrap_or(rust_src_dir.to_path_buf()); normalize_path(&rust_src_dir.join("library"), "$SRC_DIR_REAL"); // eg. @@ -2547,7 +2526,7 @@ impl<'test> TestCx<'test> { .replace("\r\n", "\n") } - fn expected_output_path(&self, kind: &str) -> PathBuf { + fn expected_output_path(&self, kind: &str) -> Utf8PathBuf { let mut path = expected_output_path(&self.testpaths, self.revision, &self.config.compare_mode, kind); @@ -2576,19 +2555,18 @@ impl<'test> TestCx<'test> { } } - fn load_expected_output_from_path(&self, path: &Path) -> Result { - fs::read_to_string(path).map_err(|err| { - format!("failed to load expected output from `{}`: {}", path.display(), err) - }) + fn load_expected_output_from_path(&self, path: &Utf8Path) -> Result { + fs::read_to_string(path) + .map_err(|err| format!("failed to load expected output from `{}`: {}", path, err)) } - fn delete_file(&self, file: &Path) { + fn delete_file(&self, file: &Utf8Path) { if !file.exists() { // Deleting a nonexistent file would error. return; } - if let Err(e) = fs::remove_file(file) { - self.fatal(&format!("failed to delete `{}`: {}", file.display(), e,)); + if let Err(e) = fs::remove_file(file.as_std_path()) { + self.fatal(&format!("failed to delete `{}`: {}", file, e,)); } } @@ -2694,8 +2672,8 @@ impl<'test> TestCx<'test> { fn show_diff( &self, stream: &str, - expected_path: &Path, - actual_path: &Path, + expected_path: &Utf8Path, + actual_path: &Utf8Path, expected: &str, actual: &str, actual_unnormalized: &str, @@ -2834,7 +2812,7 @@ impl<'test> TestCx<'test> { fs::create_dir_all(&incremental_dir).unwrap(); if self.config.verbose { - println!("init_incremental_test: incremental_dir={}", incremental_dir.display()); + println!("init_incremental_test: incremental_dir={incremental_dir}"); } } } @@ -2892,8 +2870,8 @@ impl ProcRes { #[derive(Debug)] enum TargetLocation { - ThisFile(PathBuf), - ThisDirectory(PathBuf), + ThisFile(Utf8PathBuf), + ThisDirectory(Utf8PathBuf), } enum AllowUnused { diff --git a/src/tools/compiletest/src/runtest/assembly.rs b/src/tools/compiletest/src/runtest/assembly.rs index 89d7de58c203c..91d4f620f7194 100644 --- a/src/tools/compiletest/src/runtest/assembly.rs +++ b/src/tools/compiletest/src/runtest/assembly.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use camino::Utf8PathBuf; use super::{AllowUnused, Emit, LinkToAux, ProcRes, TargetLocation, TestCx}; @@ -19,7 +19,7 @@ impl TestCx<'_> { } } - fn compile_test_and_save_assembly(&self) -> (ProcRes, PathBuf) { + fn compile_test_and_save_assembly(&self) -> (ProcRes, Utf8PathBuf) { // This works with both `--emit asm` (as default output name for the assembly) // and `ptx-linker` because the latter can write output at requested location. let output_path = self.output_base_name().with_extension("s"); diff --git a/src/tools/compiletest/src/runtest/codegen_units.rs b/src/tools/compiletest/src/runtest/codegen_units.rs index 6c866cbef21ab..8dfa8d18d1a0b 100644 --- a/src/tools/compiletest/src/runtest/codegen_units.rs +++ b/src/tools/compiletest/src/runtest/codegen_units.rs @@ -26,9 +26,7 @@ impl TestCx<'_> { .stdout .lines() .filter(|line| line.starts_with(PREFIX)) - .map(|line| { - line.replace(&self.testpaths.file.display().to_string(), "TEST_PATH").to_string() - }) + .map(|line| line.replace(&self.testpaths.file.as_str(), "TEST_PATH").to_string()) .map(|line| str_to_mono_item(&line, true)) .collect(); diff --git a/src/tools/compiletest/src/runtest/coverage.rs b/src/tools/compiletest/src/runtest/coverage.rs index 56fc5baf5f248..41cfeaee35ffb 100644 --- a/src/tools/compiletest/src/runtest/coverage.rs +++ b/src/tools/compiletest/src/runtest/coverage.rs @@ -1,9 +1,9 @@ //! Code specific to the coverage test suites. use std::ffi::OsStr; -use std::path::{Path, PathBuf}; use std::process::Command; +use camino::{Utf8Path, Utf8PathBuf}; use glob::glob; use crate::common::{UI_COVERAGE, UI_COVERAGE_MAP}; @@ -11,7 +11,7 @@ use crate::runtest::{Emit, ProcRes, TestCx, WillExecute}; use crate::util::static_regex; impl<'test> TestCx<'test> { - fn coverage_dump_path(&self) -> &Path { + fn coverage_dump_path(&self) -> &Utf8Path { self.config .coverage_dump_path .as_deref() @@ -79,10 +79,8 @@ impl<'test> TestCx<'test> { std::fs::remove_file(&profdata_path).unwrap(); } - let proc_res = self.exec_compiled_test_general( - &[("LLVM_PROFILE_FILE", &profraw_path.to_str().unwrap())], - false, - ); + let proc_res = + self.exec_compiled_test_general(&[("LLVM_PROFILE_FILE", profraw_path.as_str())], false); if self.props.failure_status.is_some() { self.check_correct_failure_status(&proc_res); } else if !proc_res.status.success() { @@ -158,8 +156,8 @@ impl<'test> TestCx<'test> { /// `.profraw` files and doctest executables to the given vectors. fn run_doctests_for_coverage( &self, - profraw_paths: &mut Vec, - bin_paths: &mut Vec, + profraw_paths: &mut Vec, + bin_paths: &mut Vec, ) { // Put .profraw files and doctest executables in dedicated directories, // to make it easier to glob them all later. @@ -204,10 +202,9 @@ impl<'test> TestCx<'test> { self.fatal_proc_rec("rustdoc --test failed!", &proc_res) } - fn glob_iter(path: impl AsRef) -> impl Iterator { - let path_str = path.as_ref().to_str().unwrap(); - let iter = glob(path_str).unwrap(); - iter.map(Result::unwrap) + fn glob_iter(path: impl AsRef) -> impl Iterator { + let iter = glob(path.as_ref().as_str()).unwrap(); + iter.map(Result::unwrap).map(Utf8PathBuf::try_from).map(Result::unwrap) } // Find all profraw files in the profraw directory. diff --git a/src/tools/compiletest/src/runtest/debugger.rs b/src/tools/compiletest/src/runtest/debugger.rs index d9e5c3fa0d8fa..a4103c5b4a9a4 100644 --- a/src/tools/compiletest/src/runtest/debugger.rs +++ b/src/tools/compiletest/src/runtest/debugger.rs @@ -1,7 +1,8 @@ use std::fmt::Write; use std::fs::File; use std::io::{BufRead, BufReader}; -use std::path::{Path, PathBuf}; + +use camino::{Utf8Path, Utf8PathBuf}; use crate::common::Config; use crate::runtest::ProcRes; @@ -15,11 +16,15 @@ pub(super) struct DebuggerCommands { /// Contains the source line number to check and the line itself check_lines: Vec<(usize, String)>, /// Source file name - file: PathBuf, + file: Utf8PathBuf, } impl DebuggerCommands { - pub fn parse_from(file: &Path, config: &Config, debugger_prefix: &str) -> Result { + pub fn parse_from( + file: &Utf8Path, + config: &Config, + debugger_prefix: &str, + ) -> Result { let command_directive = format!("{debugger_prefix}-command"); let check_directive = format!("{debugger_prefix}-check"); @@ -27,7 +32,7 @@ impl DebuggerCommands { let mut commands = vec![]; let mut check_lines = vec![]; let mut counter = 0; - let reader = BufReader::new(File::open(file).unwrap()); + let reader = BufReader::new(File::open(file.as_std_path()).unwrap()); for (line_no, line) in reader.lines().enumerate() { counter += 1; let line = line.map_err(|e| format!("Error while parsing debugger commands: {}", e))?; @@ -50,7 +55,7 @@ impl DebuggerCommands { } } - Ok(Self { commands, breakpoint_lines, check_lines, file: file.to_owned() }) + Ok(Self { commands, breakpoint_lines, check_lines, file: file.to_path_buf() }) } /// Given debugger output and lines to check, ensure that every line is @@ -81,10 +86,10 @@ impl DebuggerCommands { if missing.is_empty() { Ok(()) } else { - let fname = self.file.file_name().unwrap().to_string_lossy(); + let fname = self.file.file_name().unwrap(); let mut msg = format!( "check directive(s) from `{}` not found in debugger output. errors:", - self.file.display() + self.file ); for (src_lineno, err_line) in missing { diff --git a/src/tools/compiletest/src/runtest/debuginfo.rs b/src/tools/compiletest/src/runtest/debuginfo.rs index 50e733cd31b6b..31240dff9a196 100644 --- a/src/tools/compiletest/src/runtest/debuginfo.rs +++ b/src/tools/compiletest/src/runtest/debuginfo.rs @@ -1,9 +1,9 @@ use std::ffi::{OsStr, OsString}; use std::fs::File; use std::io::{BufRead, BufReader, Read}; -use std::path::Path; use std::process::{Command, Output, Stdio}; +use camino::Utf8Path; use tracing::debug; use super::debugger::DebuggerCommands; @@ -73,11 +73,11 @@ impl TestCx<'_> { let mut js_extension = self.testpaths.file.clone(); js_extension.set_extension("cdb.js"); if js_extension.exists() { - script_str.push_str(&format!(".scriptload \"{}\"\n", js_extension.to_string_lossy())); + script_str.push_str(&format!(".scriptload \"{}\"\n", js_extension)); } // Set breakpoints on every line that contains the string "#break" - let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy(); + let source_file_name = self.testpaths.file.file_name().unwrap(); for line in &dbg_cmds.breakpoint_lines { script_str.push_str(&format!("bp `{}:{}`\n", source_file_name, line)); } @@ -151,16 +151,11 @@ impl TestCx<'_> { if is_android_gdb_target(&self.config.target) { cmds = cmds.replace("run", "continue"); - let tool_path = match self.config.android_cross_path.to_str() { - Some(x) => x.to_owned(), - None => self.fatal("cannot find android cross path"), - }; - // write debugger script let mut script_str = String::with_capacity(2048); script_str.push_str(&format!("set charset {}\n", Self::charset())); - script_str.push_str(&format!("set sysroot {}\n", tool_path)); - script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap())); + script_str.push_str(&format!("set sysroot {}\n", &self.config.android_cross_path)); + script_str.push_str(&format!("file {}\n", exe_file)); script_str.push_str("target remote :5039\n"); script_str.push_str(&format!( "set solib-search-path \ @@ -169,12 +164,8 @@ impl TestCx<'_> { )); for line in &dbg_cmds.breakpoint_lines { script_str.push_str( - format!( - "break {:?}:{}\n", - self.testpaths.file.file_name().unwrap().to_string_lossy(), - *line - ) - .as_str(), + format!("break {}:{}\n", self.testpaths.file.file_name().unwrap(), *line) + .as_str(), ); } script_str.push_str(&cmds); @@ -203,7 +194,7 @@ impl TestCx<'_> { self.config.adb_test_dir.clone(), if self.config.target.contains("aarch64") { "64" } else { "" }, self.config.adb_test_dir.clone(), - exe_file.file_name().unwrap().to_str().unwrap() + exe_file.file_name().unwrap() ); debug!("adb arg: {}", adb_arg); @@ -242,7 +233,7 @@ impl TestCx<'_> { let mut gdb = Command::new(&format!("{}-gdb", self.config.target)); gdb.args(debugger_opts); // FIXME(jieyouxu): don't pass an empty Path - let cmdline = self.make_cmdline(&gdb, Path::new("")); + let cmdline = self.make_cmdline(&gdb, Utf8Path::new("")); logv(self.config, format!("executing {}", cmdline)); cmdline }; @@ -259,7 +250,6 @@ impl TestCx<'_> { } } else { let rust_pp_module_abs_path = self.config.src_root.join("src").join("etc"); - let rust_pp_module_abs_path = rust_pp_module_abs_path.to_str().unwrap(); // write debugger script let mut script_str = String::with_capacity(2048); script_str.push_str(&format!("set charset {}\n", Self::charset())); @@ -274,17 +264,15 @@ impl TestCx<'_> { // GDB's script auto loading safe path script_str.push_str(&format!( "add-auto-load-safe-path {}\n", - rust_pp_module_abs_path.replace(r"\", r"\\") + rust_pp_module_abs_path.as_str().replace(r"\", r"\\") )); - let output_base_dir = self.output_base_dir().to_str().unwrap().to_owned(); - // Add the directory containing the output binary to // include embedded pretty printers to GDB's script // auto loading safe path script_str.push_str(&format!( "add-auto-load-safe-path {}\n", - output_base_dir.replace(r"\", r"\\") + self.output_base_dir().as_str().replace(r"\", r"\\") )); } } @@ -301,12 +289,13 @@ impl TestCx<'_> { script_str.push_str("set print pretty off\n"); // Add the pretty printer directory to GDB's source-file search path - script_str - .push_str(&format!("directory {}\n", rust_pp_module_abs_path.replace(r"\", r"\\"))); + script_str.push_str(&format!( + "directory {}\n", + rust_pp_module_abs_path.as_str().replace(r"\", r"\\") + )); // Load the target executable - script_str - .push_str(&format!("file {}\n", exe_file.to_str().unwrap().replace(r"\", r"\\"))); + script_str.push_str(&format!("file {}\n", exe_file.as_str().replace(r"\", r"\\"))); // Force GDB to print values in the Rust format. script_str.push_str("set language rust\n"); @@ -315,7 +304,7 @@ impl TestCx<'_> { for line in &dbg_cmds.breakpoint_lines { script_str.push_str(&format!( "break '{}':{}\n", - self.testpaths.file.file_name().unwrap().to_string_lossy(), + self.testpaths.file.file_name().unwrap(), *line )); } @@ -410,14 +399,14 @@ impl TestCx<'_> { script_str.push_str(&format!( "command script import {}/lldb_lookup.py\n", - rust_pp_module_abs_path.to_str().unwrap() + rust_pp_module_abs_path )); File::open(rust_pp_module_abs_path.join("lldb_commands")) .and_then(|mut file| file.read_to_string(&mut script_str)) .expect("Failed to read lldb_commands"); // Set breakpoints on every line that contains the string "#break" - let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy(); + let source_file_name = self.testpaths.file.file_name().unwrap(); for line in &dbg_cmds.breakpoint_lines { script_str.push_str(&format!( "breakpoint set --file '{}' --line {}\n", @@ -451,7 +440,7 @@ impl TestCx<'_> { } } - fn run_lldb(&self, test_executable: &Path, debugger_script: &Path) -> ProcRes { + fn run_lldb(&self, test_executable: &Utf8Path, debugger_script: &Utf8Path) -> ProcRes { // Prepare the lldb_batchmode which executes the debugger script let lldb_script_path = self.config.src_root.join("src/etc/lldb_batchmode.py"); let pythonpath = if let Ok(pp) = std::env::var("PYTHONPATH") { diff --git a/src/tools/compiletest/src/runtest/js_doc.rs b/src/tools/compiletest/src/runtest/js_doc.rs index d630affbec104..fd53f01ca1746 100644 --- a/src/tools/compiletest/src/runtest/js_doc.rs +++ b/src/tools/compiletest/src/runtest/js_doc.rs @@ -9,8 +9,7 @@ impl TestCx<'_> { self.document(&out_dir, &self.testpaths); - let file_stem = - self.testpaths.file.file_stem().and_then(|f| f.to_str()).expect("no file stem"); + let file_stem = self.testpaths.file.file_stem().expect("no file stem"); let res = self.run_command_to_procres( Command::new(&nodejs) .arg(self.config.src_root.join("src/tools/rustdoc-js/tester.js")) diff --git a/src/tools/compiletest/src/runtest/mir_opt.rs b/src/tools/compiletest/src/runtest/mir_opt.rs index d1ec00357449d..ded6a68fe5876 100644 --- a/src/tools/compiletest/src/runtest/mir_opt.rs +++ b/src/tools/compiletest/src/runtest/mir_opt.rs @@ -1,6 +1,6 @@ use std::fs; -use std::path::{Path, PathBuf}; +use camino::{Utf8Path, Utf8PathBuf}; use glob::glob; use miropt_test_tools::{MiroptTest, MiroptTestFile, files_for_miropt_test}; use tracing::debug; @@ -14,7 +14,7 @@ impl TestCx<'_> { let should_run = self.should_run(pm); let mut test_info = files_for_miropt_test( - &self.testpaths.file, + &self.testpaths.file.as_std_path(), self.config.get_pointer_width(), self.config.target_cfg().panic.for_miropt_test_tools(), ); @@ -38,20 +38,15 @@ impl TestCx<'_> { fn check_mir_dump(&self, test_info: MiroptTest) { let test_dir = self.testpaths.file.parent().unwrap(); - let test_crate = - self.testpaths.file.file_stem().unwrap().to_str().unwrap().replace('-', "_"); + let test_crate = self.testpaths.file.file_stem().unwrap().replace('-', "_"); let MiroptTest { run_filecheck, suffix, files, passes: _ } = test_info; if self.config.bless { - for e in - glob(&format!("{}/{}.*{}.mir", test_dir.display(), test_crate, suffix)).unwrap() - { + for e in glob(&format!("{}/{}.*{}.mir", test_dir, test_crate, suffix)).unwrap() { fs::remove_file(e.unwrap()).unwrap(); } - for e in - glob(&format!("{}/{}.*{}.diff", test_dir.display(), test_crate, suffix)).unwrap() - { + for e in glob(&format!("{}/{}.*{}.diff", test_dir, test_crate, suffix)).unwrap() { fs::remove_file(e.unwrap()).unwrap(); } } @@ -60,19 +55,15 @@ impl TestCx<'_> { let dumped_string = if let Some(after) = to_file { self.diff_mir_files(from_file.into(), after.into()) } else { - let mut output_file = PathBuf::new(); + let mut output_file = Utf8PathBuf::new(); output_file.push(self.get_mir_dump_dir()); output_file.push(&from_file); - debug!( - "comparing the contents of: {} with {}", - output_file.display(), - expected_file.display() - ); + debug!("comparing the contents of: {} with {:?}", output_file, expected_file); if !output_file.exists() { panic!( "Output file `{}` from test does not exist, available files are in `{}`", - output_file.display(), - output_file.parent().unwrap().display() + output_file, + output_file.parent().unwrap() ); } self.check_mir_test_timestamp(&from_file, &output_file); @@ -107,21 +98,20 @@ impl TestCx<'_> { } } - fn diff_mir_files(&self, before: PathBuf, after: PathBuf) -> String { - let to_full_path = |path: PathBuf| { + fn diff_mir_files(&self, before: Utf8PathBuf, after: Utf8PathBuf) -> String { + let to_full_path = |path: Utf8PathBuf| { let full = self.get_mir_dump_dir().join(&path); if !full.exists() { panic!( "the mir dump file for {} does not exist (requested in {})", - path.display(), - self.testpaths.file.display(), + path, self.testpaths.file, ); } full }; let before = to_full_path(before); let after = to_full_path(after); - debug!("comparing the contents of: {} with {}", before.display(), after.display()); + debug!("comparing the contents of: {} with {}", before, after); let before = fs::read_to_string(before).unwrap(); let after = fs::read_to_string(after).unwrap(); let before = self.normalize_output(&before, &[]); @@ -138,8 +128,8 @@ impl TestCx<'_> { dumped_string } - fn check_mir_test_timestamp(&self, test_name: &str, output_file: &Path) { - let t = |file| fs::metadata(file).unwrap().modified().unwrap(); + fn check_mir_test_timestamp(&self, test_name: &str, output_file: &Utf8Path) { + let t = |file: &Utf8Path| fs::metadata(file.as_std_path()).unwrap().modified().unwrap(); let source_file = &self.testpaths.file; let output_time = t(output_file); let source_time = t(source_file); @@ -147,8 +137,7 @@ impl TestCx<'_> { debug!("source file time: {:?} output file time: {:?}", source_time, output_time); panic!( "test source file `{}` is newer than potentially stale output file `{}`.", - source_file.display(), - test_name + source_file, test_name ); } } diff --git a/src/tools/compiletest/src/runtest/run_make.rs b/src/tools/compiletest/src/runtest/run_make.rs index 073116933bdb6..a5ce929f9b8e4 100644 --- a/src/tools/compiletest/src/runtest/run_make.rs +++ b/src/tools/compiletest/src/runtest/run_make.rs @@ -1,8 +1,8 @@ -use std::path::Path; use std::process::{Command, Output, Stdio}; use std::{env, fs}; use build_helper::fs::{ignore_not_found, recursive_remove}; +use camino::{Utf8Path, Utf8PathBuf}; use super::{ProcRes, TestCx, disable_error_reporting}; use crate::util::{copy_dir_all, dylib_env_var}; @@ -39,14 +39,16 @@ impl TestCx<'_> { // Copy all input files (apart from rmake.rs) to the temporary directory, // so that the input directory structure from `tests/run-make/` is mirrored // to the `rmake_out` directory. - for path in walkdir::WalkDir::new(&self.testpaths.file).min_depth(1) { - let path = path.unwrap().path().to_path_buf(); + for entry in walkdir::WalkDir::new(&self.testpaths.file).min_depth(1) { + let entry = entry.unwrap(); + let path = entry.path(); + let path = <&Utf8Path>::try_from(path).unwrap(); if path.file_name().is_some_and(|s| s != "rmake.rs") { let target = rmake_out_dir.join(path.strip_prefix(&self.testpaths.file).unwrap()); if path.is_dir() { - copy_dir_all(&path, target).unwrap(); + copy_dir_all(&path, &target).unwrap(); } else { - fs::copy(&path, target).unwrap(); + fs::copy(path.as_std_path(), target).unwrap(); } } } @@ -83,8 +85,10 @@ impl TestCx<'_> { // on some linux distros. // 2. Specific library paths in `self.config.compile_lib_path` needed for running rustc. - let base_dylib_search_paths = - Vec::from_iter(env::split_paths(&env::var(dylib_env_var()).unwrap())); + let base_dylib_search_paths = Vec::from_iter( + env::split_paths(&env::var(dylib_env_var()).unwrap()) + .map(|p| Utf8PathBuf::try_from(p).expect("dylib env var contains non-UTF8 paths")), + ); // Calculate the paths of the recipe binary. As previously discussed, this is placed at // `/` with `bin_name` being `rmake` or `rmake.exe` depending on @@ -113,13 +117,13 @@ impl TestCx<'_> { .arg("-o") .arg(&recipe_bin) // Specify library search paths for `run_make_support`. - .arg(format!("-Ldependency={}", &support_lib_path.parent().unwrap().to_string_lossy())) - .arg(format!("-Ldependency={}", &support_lib_deps.to_string_lossy())) - .arg(format!("-Ldependency={}", &support_lib_deps_deps.to_string_lossy())) + .arg(format!("-Ldependency={}", &support_lib_path.parent().unwrap())) + .arg(format!("-Ldependency={}", &support_lib_deps)) + .arg(format!("-Ldependency={}", &support_lib_deps_deps)) // Provide `run_make_support` as extern prelude, so test writers don't need to write // `extern run_make_support;`. .arg("--extern") - .arg(format!("run_make_support={}", &support_lib_path.to_string_lossy())) + .arg(format!("run_make_support={}", &support_lib_path)) .arg("--edition=2021") .arg(&self.testpaths.file.join("rmake.rs")) .arg("-Cprefer-dynamic"); @@ -240,7 +244,7 @@ impl TestCx<'_> { if self.config.target.contains("msvc") && !self.config.cc.is_empty() { // We need to pass a path to `lib.exe`, so assume that `cc` is `cl.exe` // and that `lib.exe` lives next to it. - let lib = Path::new(&self.config.cc).parent().unwrap().join("lib.exe"); + let lib = Utf8Path::new(&self.config.cc).parent().unwrap().join("lib.exe"); // MSYS doesn't like passing flags of the form `/foo` as it thinks it's // a path and instead passes `C:\msys64\foo`, so convert all @@ -262,8 +266,8 @@ impl TestCx<'_> { cmd.env("IS_MSVC", "1") .env("IS_WINDOWS", "1") - .env("MSVC_LIB", format!("'{}' -nologo", lib.display())) - .env("MSVC_LIB_PATH", format!("{}", lib.display())) + .env("MSVC_LIB", format!("'{}' -nologo", lib)) + .env("MSVC_LIB_PATH", &lib) // Note: we diverge from legacy run_make and don't lump `CC` the compiler and // default flags together. .env("CC_DEFAULT_FLAGS", &cflags) diff --git a/src/tools/compiletest/src/runtest/ui.rs b/src/tools/compiletest/src/runtest/ui.rs index 974e5170465ec..e87b037cd289d 100644 --- a/src/tools/compiletest/src/runtest/ui.rs +++ b/src/tools/compiletest/src/runtest/ui.rs @@ -68,7 +68,7 @@ impl TestCx<'_> { { let mut coverage_file_path = self.config.build_test_suite_root.clone(); coverage_file_path.push("rustfix_missing_coverage.txt"); - debug!("coverage_file_path: {}", coverage_file_path.display()); + debug!("coverage_file_path: {}", coverage_file_path); let mut file = OpenOptions::new() .create(true) @@ -76,8 +76,8 @@ impl TestCx<'_> { .open(coverage_file_path.as_path()) .expect("could not create or open file"); - if let Err(e) = writeln!(file, "{}", self.testpaths.file.display()) { - panic!("couldn't write to {}: {e:?}", coverage_file_path.display()); + if let Err(e) = writeln!(file, "{}", self.testpaths.file) { + panic!("couldn't write to {}: {e:?}", coverage_file_path); } } } else if self.props.run_rustfix { @@ -119,7 +119,7 @@ impl TestCx<'_> { self.testpaths.relative_dir.join(self.testpaths.file.file_name().unwrap()); println!( "To only update this specific test, also pass `--test-args {}`", - relative_path_to_file.display(), + relative_path_to_file, ); self.fatal_proc_rec( &format!("{} errors occurred comparing output.", errors), @@ -211,8 +211,6 @@ impl TestCx<'_> { let crate_name = self.testpaths.file.file_stem().expect("test must have a file stem"); // crate name must be alphanumeric or `_`. - let crate_name = - crate_name.to_str().expect("crate name implies file name must be valid UTF-8"); // replace `a.foo` -> `a__foo` for crate name purposes. // replace `revision-name-with-dashes` -> `revision_name_with_underscore` let crate_name = crate_name.replace('.', "__"); diff --git a/src/tools/compiletest/src/tests.rs b/src/tools/compiletest/src/tests.rs index 43c6dc0a67e89..e3e4a81755d09 100644 --- a/src/tools/compiletest/src/tests.rs +++ b/src/tools/compiletest/src/tests.rs @@ -1,5 +1,3 @@ -use std::ffi::OsString; - use crate::debuggers::{extract_gdb_version, extract_lldb_version}; use crate::is_test; @@ -60,11 +58,11 @@ fn test_extract_lldb_version() { #[test] fn is_test_test() { - assert!(is_test(&OsString::from("a_test.rs"))); - assert!(!is_test(&OsString::from(".a_test.rs"))); - assert!(!is_test(&OsString::from("a_cat.gif"))); - assert!(!is_test(&OsString::from("#a_dog_gif"))); - assert!(!is_test(&OsString::from("~a_temp_file"))); + assert!(is_test("a_test.rs")); + assert!(!is_test(".a_test.rs")); + assert!(!is_test("a_cat.gif")); + assert!(!is_test("#a_dog_gif")); + assert!(!is_test("~a_temp_file")); } #[test] diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs index bff02f1db9f02..81f5679aead77 100644 --- a/src/tools/compiletest/src/util.rs +++ b/src/tools/compiletest/src/util.rs @@ -1,8 +1,7 @@ use std::env; -use std::ffi::OsStr; -use std::path::{Path, PathBuf}; use std::process::Command; +use camino::{Utf8Path, Utf8PathBuf}; use tracing::*; use crate::common::Config; @@ -34,21 +33,21 @@ pub fn logv(config: &Config, s: String) { } } -pub trait PathBufExt { +pub trait Utf8PathBufExt { /// Append an extension to the path, even if it already has one. - fn with_extra_extension>(&self, extension: S) -> PathBuf; + fn with_extra_extension(&self, extension: &str) -> Utf8PathBuf; } -impl PathBufExt for PathBuf { - fn with_extra_extension>(&self, extension: S) -> PathBuf { - if extension.as_ref().is_empty() { +impl Utf8PathBufExt for Utf8PathBuf { + fn with_extra_extension(&self, extension: &str) -> Utf8PathBuf { + if extension.is_empty() { self.clone() } else { - let mut fname = self.file_name().unwrap().to_os_string(); - if !extension.as_ref().to_str().unwrap().starts_with('.') { - fname.push("."); + let mut fname = self.file_name().unwrap().to_string(); + if !extension.starts_with('.') { + fname.push_str("."); } - fname.push(extension); + fname.push_str(extension); self.with_file_name(fname) } } @@ -71,22 +70,27 @@ pub fn dylib_env_var() -> &'static str { /// Adds a list of lookup paths to `cmd`'s dynamic library lookup path. /// If the dylib_path_var is already set for this cmd, the old value will be overwritten! -pub fn add_dylib_path(cmd: &mut Command, paths: impl Iterator>) { +pub fn add_dylib_path( + cmd: &mut Command, + paths: impl Iterator>, +) { let path_env = env::var_os(dylib_env_var()); let old_paths = path_env.as_ref().map(env::split_paths); let new_paths = paths.map(Into::into).chain(old_paths.into_iter().flatten()); cmd.env(dylib_env_var(), env::join_paths(new_paths).unwrap()); } -pub fn copy_dir_all(src: impl AsRef, dst: impl AsRef) -> std::io::Result<()> { - std::fs::create_dir_all(&dst)?; - for entry in std::fs::read_dir(src)? { +pub fn copy_dir_all(src: &Utf8Path, dst: &Utf8Path) -> std::io::Result<()> { + std::fs::create_dir_all(dst.as_std_path())?; + for entry in std::fs::read_dir(src.as_std_path())? { let entry = entry?; + let path = Utf8PathBuf::try_from(entry.path()).unwrap(); + let file_name = path.file_name().unwrap(); let ty = entry.file_type()?; if ty.is_dir() { - copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?; + copy_dir_all(&path, &dst.join(file_name))?; } else { - std::fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?; + std::fs::copy(path.as_std_path(), dst.join(file_name).as_std_path())?; } } Ok(()) diff --git a/src/tools/compiletest/src/util/tests.rs b/src/tools/compiletest/src/util/tests.rs index b09a183b14e6a..5bcae0dcee146 100644 --- a/src/tools/compiletest/src/util/tests.rs +++ b/src/tools/compiletest/src/util/tests.rs @@ -3,12 +3,12 @@ use super::*; #[test] fn path_buf_with_extra_extension_test() { assert_eq!( - PathBuf::from("foo.rs.stderr"), - PathBuf::from("foo.rs").with_extra_extension("stderr") + Utf8PathBuf::from("foo.rs.stderr"), + Utf8PathBuf::from("foo.rs").with_extra_extension("stderr") ); assert_eq!( - PathBuf::from("foo.rs.stderr"), - PathBuf::from("foo.rs").with_extra_extension(".stderr") + Utf8PathBuf::from("foo.rs.stderr"), + Utf8PathBuf::from("foo.rs").with_extra_extension(".stderr") ); - assert_eq!(PathBuf::from("foo.rs"), PathBuf::from("foo.rs").with_extra_extension("")); + assert_eq!(Utf8PathBuf::from("foo.rs"), Utf8PathBuf::from("foo.rs").with_extra_extension("")); } diff --git a/src/tools/rustdoc-gui-test/Cargo.toml b/src/tools/rustdoc-gui-test/Cargo.toml index f7384a98f8565..8d958ac94f30e 100644 --- a/src/tools/rustdoc-gui-test/Cargo.toml +++ b/src/tools/rustdoc-gui-test/Cargo.toml @@ -5,6 +5,7 @@ edition = "2021" [dependencies] build_helper = { path = "../../build_helper" } +camino = "1" compiletest = { path = "../compiletest" } getopts = "0.2" walkdir = "2" diff --git a/src/tools/rustdoc-gui-test/src/main.rs b/src/tools/rustdoc-gui-test/src/main.rs index f1c6e13d3ae89..addb0af4a541c 100644 --- a/src/tools/rustdoc-gui-test/src/main.rs +++ b/src/tools/rustdoc-gui-test/src/main.rs @@ -118,7 +118,11 @@ If you want to install the `browser-ui-test` dependency, run `npm install browse ..Default::default() }; - let test_props = TestProps::from_file(&librs, None, &compiletest_c); + let test_props = TestProps::from_file( + &camino::Utf8PathBuf::try_from(librs).unwrap(), + None, + &compiletest_c, + ); if !test_props.compile_flags.is_empty() { cargo.env("RUSTDOCFLAGS", test_props.compile_flags.join(" ")); diff --git a/tests/ui/asm/naked-functions-rustic-abi.rs b/tests/ui/asm/naked-functions-rustic-abi.rs new file mode 100644 index 0000000000000..b654d38ccc1a6 --- /dev/null +++ b/tests/ui/asm/naked-functions-rustic-abi.rs @@ -0,0 +1,27 @@ +//@ revisions: x86_64 aarch64 +// +//@[aarch64] only-aarch64 +//@[x86_64] only-x86_64 +// +//@ build-pass +//@ needs-asm-support + +#![feature(naked_functions, naked_functions_rustic_abi, rust_cold_cc)] +#![crate_type = "lib"] + +use std::arch::{asm, naked_asm}; + +#[naked] +pub unsafe fn rust_implicit() { + naked_asm!("ret"); +} + +#[naked] +pub unsafe extern "Rust" fn rust_explicit() { + naked_asm!("ret"); +} + +#[naked] +pub unsafe extern "rust-cold" fn rust_cold() { + naked_asm!("ret"); +} diff --git a/tests/ui/asm/naked-functions-testattrs.rs b/tests/ui/asm/naked-functions-testattrs.rs index 7e373270e9fc6..ad31876a77a59 100644 --- a/tests/ui/asm/naked-functions-testattrs.rs +++ b/tests/ui/asm/naked-functions-testattrs.rs @@ -1,7 +1,6 @@ //@ needs-asm-support //@ compile-flags: --test -#![allow(undefined_naked_function_abi)] #![feature(naked_functions)] #![feature(test)] #![crate_type = "lib"] @@ -11,7 +10,7 @@ use std::arch::naked_asm; #[test] #[naked] //~^ ERROR [E0736] -fn test_naked() { +extern "C" fn test_naked() { unsafe { naked_asm!("") }; } @@ -19,7 +18,7 @@ fn test_naked() { #[test] #[naked] //~^ ERROR [E0736] -fn test_naked_should_panic() { +extern "C" fn test_naked_should_panic() { unsafe { naked_asm!("") }; } @@ -27,13 +26,13 @@ fn test_naked_should_panic() { #[test] #[naked] //~^ ERROR [E0736] -fn test_naked_ignore() { +extern "C" fn test_naked_ignore() { unsafe { naked_asm!("") }; } #[bench] #[naked] //~^ ERROR [E0736] -fn bench_naked() { +extern "C" fn bench_naked() { unsafe { naked_asm!("") }; } diff --git a/tests/ui/asm/naked-functions-testattrs.stderr b/tests/ui/asm/naked-functions-testattrs.stderr index 4dabe41964a57..0f0bb91b95413 100644 --- a/tests/ui/asm/naked-functions-testattrs.stderr +++ b/tests/ui/asm/naked-functions-testattrs.stderr @@ -1,5 +1,5 @@ error[E0736]: cannot use `#[naked]` with testing attributes - --> $DIR/naked-functions-testattrs.rs:12:1 + --> $DIR/naked-functions-testattrs.rs:11:1 | LL | #[test] | ------- function marked with testing attribute here @@ -7,7 +7,7 @@ LL | #[naked] | ^^^^^^^^ `#[naked]` is incompatible with testing attributes error[E0736]: cannot use `#[naked]` with testing attributes - --> $DIR/naked-functions-testattrs.rs:20:1 + --> $DIR/naked-functions-testattrs.rs:19:1 | LL | #[test] | ------- function marked with testing attribute here @@ -15,7 +15,7 @@ LL | #[naked] | ^^^^^^^^ `#[naked]` is incompatible with testing attributes error[E0736]: cannot use `#[naked]` with testing attributes - --> $DIR/naked-functions-testattrs.rs:28:1 + --> $DIR/naked-functions-testattrs.rs:27:1 | LL | #[test] | ------- function marked with testing attribute here @@ -23,7 +23,7 @@ LL | #[naked] | ^^^^^^^^ `#[naked]` is incompatible with testing attributes error[E0736]: cannot use `#[naked]` with testing attributes - --> $DIR/naked-functions-testattrs.rs:35:1 + --> $DIR/naked-functions-testattrs.rs:34:1 | LL | #[bench] | -------- function marked with testing attribute here diff --git a/tests/ui/asm/naked-functions.rs b/tests/ui/asm/naked-functions.rs index 3d4d414539c16..5bf2e2a3abd06 100644 --- a/tests/ui/asm/naked-functions.rs +++ b/tests/ui/asm/naked-functions.rs @@ -122,18 +122,6 @@ unsafe extern "C" fn invalid_may_unwind() { //~^ ERROR the `may_unwind` option cannot be used with `naked_asm!` } -#[naked] -pub unsafe fn default_abi() { - //~^ WARN Rust ABI is unsupported in naked functions - naked_asm!(""); -} - -#[naked] -pub unsafe fn rust_abi() { - //~^ WARN Rust ABI is unsupported in naked functions - naked_asm!(""); -} - #[naked] pub extern "C" fn valid_a() -> T { unsafe { diff --git a/tests/ui/asm/naked-functions.stderr b/tests/ui/asm/naked-functions.stderr index 0898f3620f24f..0a55bb9cd8370 100644 --- a/tests/ui/asm/naked-functions.stderr +++ b/tests/ui/asm/naked-functions.stderr @@ -53,19 +53,19 @@ LL | naked_asm!("", options(may_unwind)); | ^^^^^^^^^^ the `may_unwind` option is not meaningful for global-scoped inline assembly error: this is a user specified error - --> $DIR/naked-functions.rs:169:5 + --> $DIR/naked-functions.rs:157:5 | LL | compile_error!("this is a user specified error") | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this is a user specified error - --> $DIR/naked-functions.rs:175:5 + --> $DIR/naked-functions.rs:163:5 | LL | compile_error!("this is a user specified error"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: asm template must be a string literal - --> $DIR/naked-functions.rs:182:16 + --> $DIR/naked-functions.rs:170:16 | LL | naked_asm!(invalid_syntax) | ^^^^^^^^^^^^^^ @@ -175,20 +175,6 @@ LL | LL | *&y | --- not allowed in naked functions -warning: Rust ABI is unsupported in naked functions - --> $DIR/naked-functions.rs:126:1 - | -LL | pub unsafe fn default_abi() { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: `#[warn(undefined_naked_function_abi)]` on by default - -warning: Rust ABI is unsupported in naked functions - --> $DIR/naked-functions.rs:132:1 - | -LL | pub unsafe fn rust_abi() { - | ^^^^^^^^^^^^^^^^^^^^^^^^ - -error: aborting due to 25 previous errors; 2 warnings emitted +error: aborting due to 25 previous errors For more information about this error, try `rustc --explain E0787`. diff --git a/tests/ui/cast/cast-array-issue-138836.rs b/tests/ui/cast/cast-array-issue-138836.rs new file mode 100644 index 0000000000000..3f8098e76fd29 --- /dev/null +++ b/tests/ui/cast/cast-array-issue-138836.rs @@ -0,0 +1,5 @@ +fn main() { + let a: [u8; 3] = [1,2,3]; + let b = &a; + let c = b as *const [u32; 3]; //~ ERROR casting `&[u8; 3]` as `*const [u32; 3]` is invalid +} diff --git a/tests/ui/cast/cast-array-issue-138836.stderr b/tests/ui/cast/cast-array-issue-138836.stderr new file mode 100644 index 0000000000000..309474c29f933 --- /dev/null +++ b/tests/ui/cast/cast-array-issue-138836.stderr @@ -0,0 +1,9 @@ +error[E0606]: casting `&[u8; 3]` as `*const [u32; 3]` is invalid + --> $DIR/cast-array-issue-138836.rs:4:13 + | +LL | let c = b as *const [u32; 3]; + | ^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0606`. diff --git a/tests/ui/consts/const-cast-wrong-type.rs b/tests/ui/consts/const-cast-wrong-type.rs index 6e055a2bcd340..9936a660936b0 100644 --- a/tests/ui/consts/const-cast-wrong-type.rs +++ b/tests/ui/consts/const-cast-wrong-type.rs @@ -1,5 +1,5 @@ const a: [u8; 3] = ['h' as u8, 'i' as u8, 0 as u8]; -const b: *const i8 = &a as *const i8; //~ ERROR mismatched types +const b: *const i8 = &a as *const i8; //~ ERROR casting `&[u8; 3]` as `*const i8` is invalid fn main() { } diff --git a/tests/ui/consts/const-cast-wrong-type.stderr b/tests/ui/consts/const-cast-wrong-type.stderr index 44361f15d8a98..0730bac22354d 100644 --- a/tests/ui/consts/const-cast-wrong-type.stderr +++ b/tests/ui/consts/const-cast-wrong-type.stderr @@ -1,9 +1,9 @@ -error[E0308]: mismatched types +error[E0606]: casting `&[u8; 3]` as `*const i8` is invalid --> $DIR/const-cast-wrong-type.rs:2:22 | LL | const b: *const i8 = &a as *const i8; - | ^^^^^^^^^^^^^^^ expected `u8`, found `i8` + | ^^^^^^^^^^^^^^^ error: aborting due to 1 previous error -For more information about this error, try `rustc --explain E0308`. +For more information about this error, try `rustc --explain E0606`. diff --git a/tests/crashes/137287.rs b/tests/ui/drop/drop_elaboration_with_errors2.rs similarity index 68% rename from tests/crashes/137287.rs rename to tests/ui/drop/drop_elaboration_with_errors2.rs index 59fdf568d3680..946c253179cb8 100644 --- a/tests/crashes/137287.rs +++ b/tests/ui/drop/drop_elaboration_with_errors2.rs @@ -1,11 +1,14 @@ -//@ known-bug: #137287 +// Regression test for #137287 mod defining_scope { use super::*; pub type Alias = impl Sized; + //~^ ERROR unconstrained opaque type + //~| ERROR `impl Trait` in type aliases is unstable pub fn cast(x: Container, T>) -> Container { x + //~^ ERROR mismatched types } } @@ -21,6 +24,7 @@ impl Trait for T { type Assoc = Box; } impl Trait for defining_scope::Alias { + //~^ ERROR conflicting implementations of trait `Trait<_>` type Assoc = usize; } diff --git a/tests/ui/drop/drop_elaboration_with_errors2.stderr b/tests/ui/drop/drop_elaboration_with_errors2.stderr new file mode 100644 index 0000000000000..15fe3f6ecc1f4 --- /dev/null +++ b/tests/ui/drop/drop_elaboration_with_errors2.stderr @@ -0,0 +1,47 @@ +error[E0658]: `impl Trait` in type aliases is unstable + --> $DIR/drop_elaboration_with_errors2.rs:5:25 + | +LL | pub type Alias = impl Sized; + | ^^^^^^^^^^ + | + = note: see issue #63063 for more information + = help: add `#![feature(type_alias_impl_trait)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0119]: conflicting implementations of trait `Trait<_>` + --> $DIR/drop_elaboration_with_errors2.rs:26:1 + | +LL | impl Trait for T { + | ---------------------- first implementation here +... +LL | impl Trait for defining_scope::Alias { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation + +error: unconstrained opaque type + --> $DIR/drop_elaboration_with_errors2.rs:5:25 + | +LL | pub type Alias = impl Sized; + | ^^^^^^^^^^ + | + = note: `Alias` must be used in combination with a concrete type within the same crate + +error[E0308]: mismatched types + --> $DIR/drop_elaboration_with_errors2.rs:10:9 + | +LL | pub type Alias = impl Sized; + | ---------- the found opaque type +... +LL | pub fn cast(x: Container, T>) -> Container { + | - expected this type parameter --------------- expected `Container` because of return type +LL | x + | ^ expected `Container`, found `Container, T>` + | + = note: expected struct `Container` + found struct `Container, _>` + = help: type parameters must be constrained to match other types + = note: for more information, visit https://doc.rust-lang.org/book/ch10-02-traits.html#traits-as-parameters + +error: aborting due to 4 previous errors + +Some errors have detailed explanations: E0119, E0308, E0658. +For more information about an error, try `rustc --explain E0119`. diff --git a/tests/crashes/135668.rs b/tests/ui/drop/drop_elaboration_with_errors3.rs similarity index 87% rename from tests/crashes/135668.rs rename to tests/ui/drop/drop_elaboration_with_errors3.rs index 00d7b5db0c679..c5ed63eb7ac2b 100644 --- a/tests/crashes/135668.rs +++ b/tests/ui/drop/drop_elaboration_with_errors3.rs @@ -1,5 +1,6 @@ -//@ known-bug: #135668 +// Regression test for #135668 //@ edition: 2021 + use std::future::Future; pub async fn foo() { @@ -11,7 +12,8 @@ async fn create_task() -> impl Sized { } async fn documentation() { - include_str!("nonexistent"); + compile_error!("bonjour"); + //~^ ERROR bonjour } fn bind(_filter: F) -> impl Sized @@ -36,3 +38,5 @@ where { type Assoc = F; } + +fn main() {} diff --git a/tests/ui/drop/drop_elaboration_with_errors3.stderr b/tests/ui/drop/drop_elaboration_with_errors3.stderr new file mode 100644 index 0000000000000..2d44e7c662597 --- /dev/null +++ b/tests/ui/drop/drop_elaboration_with_errors3.stderr @@ -0,0 +1,8 @@ +error: bonjour + --> $DIR/drop_elaboration_with_errors3.rs:15:5 + | +LL | compile_error!("bonjour"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 1 previous error + diff --git a/tests/ui/feature-gates/feature-gate-naked_functions_rustic_abi.rs b/tests/ui/feature-gates/feature-gate-naked_functions_rustic_abi.rs new file mode 100644 index 0000000000000..c91d833994414 --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-naked_functions_rustic_abi.rs @@ -0,0 +1,26 @@ +//@ needs-asm-support +//@ only-x86_64 + +#![feature(naked_functions, rust_cold_cc)] + +use std::arch::naked_asm; + +#[naked] +pub unsafe fn rust_implicit() { + //~^ ERROR `#[naked]` is currently unstable on `extern "Rust"` functions + naked_asm!("ret"); +} + +#[naked] +pub unsafe extern "Rust" fn rust_explicit() { + //~^ ERROR `#[naked]` is currently unstable on `extern "Rust"` functions + naked_asm!("ret"); +} + +#[naked] +pub unsafe extern "rust-cold" fn rust_cold() { + //~^ ERROR `#[naked]` is currently unstable on `extern "rust-cold"` functions + naked_asm!("ret"); +} + +fn main() {} diff --git a/tests/ui/feature-gates/feature-gate-naked_functions_rustic_abi.stderr b/tests/ui/feature-gates/feature-gate-naked_functions_rustic_abi.stderr new file mode 100644 index 0000000000000..ba45e15ec86b6 --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-naked_functions_rustic_abi.stderr @@ -0,0 +1,33 @@ +error[E0658]: `#[naked]` is currently unstable on `extern "Rust"` functions + --> $DIR/feature-gate-naked_functions_rustic_abi.rs:9:1 + | +LL | pub unsafe fn rust_implicit() { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #138997 for more information + = help: add `#![feature(naked_functions_rustic_abi)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: `#[naked]` is currently unstable on `extern "Rust"` functions + --> $DIR/feature-gate-naked_functions_rustic_abi.rs:15:1 + | +LL | pub unsafe extern "Rust" fn rust_explicit() { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #138997 for more information + = help: add `#![feature(naked_functions_rustic_abi)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: `#[naked]` is currently unstable on `extern "rust-cold"` functions + --> $DIR/feature-gate-naked_functions_rustic_abi.rs:21:1 + | +LL | pub unsafe extern "rust-cold" fn rust_cold() { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #138997 for more information + = help: add `#![feature(naked_functions_rustic_abi)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/lint/dead-code/self-assign.rs b/tests/ui/lint/dead-code/self-assign.rs index 072a899e1bdb1..357846baf2212 100644 --- a/tests/ui/lint/dead-code/self-assign.rs +++ b/tests/ui/lint/dead-code/self-assign.rs @@ -1,19 +1,29 @@ -// Test that dead code warnings are issued for superfluous assignments of -// fields or variables to themselves (issue #75356). - -//@ ignore-test FIXME(81658, 83171) +//! Test that dead code warnings are issued for superfluous assignments of fields or variables to +//! themselves (issue #75356). +//! +//! # History of this test (to aid relanding of a fixed version of #81473) +//! +//! - Original lint request was about self-assignments not triggering sth like `dead_code`. +//! - `dead_code` lint expansion for self-assignments was implemented in #87129. +//! - Unfortunately implementation components of #87129 had to be disabled as part of reverts +//! #86212, #83171 (to revert #81473) to address regressions #81626 and #81658. +//! - Consequently, none of the following warnings are emitted. //@ check-pass + +// Implementation of self-assignment `dead_code` lint expansions disabled due to reverts. +//@ known-bug: #75356 + #![allow(unused_assignments)] #![warn(dead_code)] fn main() { let mut x = 0; x = x; - //~^ WARNING: useless assignment of variable of type `i32` to itself + // FIXME ~^ WARNING: useless assignment of variable of type `i32` to itself x = (x); - //~^ WARNING: useless assignment of variable of type `i32` to itself + // FIXME ~^ WARNING: useless assignment of variable of type `i32` to itself x = {x}; // block expressions don't count as self-assignments @@ -22,10 +32,10 @@ fn main() { struct S<'a> { f: &'a str } let mut s = S { f: "abc" }; s = s; - //~^ WARNING: useless assignment of variable of type `S` to itself + // FIXME ~^ WARNING: useless assignment of variable of type `S` to itself s.f = s.f; - //~^ WARNING: useless assignment of field of type `&str` to itself + // FIXME ~^ WARNING: useless assignment of field of type `&str` to itself struct N0 { x: Box } @@ -34,11 +44,11 @@ fn main() { struct N3 { n: N2 }; let mut n3 = N3 { n: N2(N1 { n: N0 { x: Box::new(42) } }) }; n3.n.0.n.x = n3.n.0.n.x; - //~^ WARNING: useless assignment of field of type `Box` to itself + // FIXME ~^ WARNING: useless assignment of field of type `Box` to itself let mut t = (1, ((2, 3, (4, 5)),)); t.1.0.2.1 = t.1.0.2.1; - //~^ WARNING: useless assignment of field of type `i32` to itself + // FIXME ~^ WARNING: useless assignment of field of type `i32` to itself let mut y = 0; diff --git a/tests/ui/lint/dead-code/self-assign.stderr b/tests/ui/lint/dead-code/self-assign.stderr deleted file mode 100644 index bb79c0ec72a34..0000000000000 --- a/tests/ui/lint/dead-code/self-assign.stderr +++ /dev/null @@ -1,44 +0,0 @@ -warning: useless assignment of variable of type `i32` to itself - --> $DIR/self-assign.rs:10:5 - | -LL | x = x; - | ^^^^^ - | -note: the lint level is defined here - --> $DIR/self-assign.rs:6:9 - | -LL | #![warn(dead_code)] - | ^^^^^^^^^ - -warning: useless assignment of variable of type `i32` to itself - --> $DIR/self-assign.rs:13:5 - | -LL | x = (x); - | ^^^^^^^ - -warning: useless assignment of variable of type `S` to itself - --> $DIR/self-assign.rs:22:5 - | -LL | s = s; - | ^^^^^ - -warning: useless assignment of field of type `&str` to itself - --> $DIR/self-assign.rs:25:5 - | -LL | s.f = s.f; - | ^^^^^^^^^ - -warning: useless assignment of field of type `Box` to itself - --> $DIR/self-assign.rs:34:5 - | -LL | n3.n.0.n.x = n3.n.0.n.x; - | ^^^^^^^^^^^^^^^^^^^^^^^ - -warning: useless assignment of field of type `i32` to itself - --> $DIR/self-assign.rs:38:5 - | -LL | t.1.0.2.1 = t.1.0.2.1; - | ^^^^^^^^^^^^^^^^^^^^^ - -warning: 6 warnings emitted - diff --git a/tests/ui/lint/removed-lints/undefined_naked_function_abi.rs b/tests/ui/lint/removed-lints/undefined_naked_function_abi.rs new file mode 100644 index 0000000000000..cf3ac66ac86d4 --- /dev/null +++ b/tests/ui/lint/removed-lints/undefined_naked_function_abi.rs @@ -0,0 +1,5 @@ +//@ check-pass + +#![deny(undefined_naked_function_abi)] +//~^ WARN lint `undefined_naked_function_abi` has been removed +fn main() {} diff --git a/tests/ui/lint/removed-lints/undefined_naked_function_abi.stderr b/tests/ui/lint/removed-lints/undefined_naked_function_abi.stderr new file mode 100644 index 0000000000000..5a546688beb5c --- /dev/null +++ b/tests/ui/lint/removed-lints/undefined_naked_function_abi.stderr @@ -0,0 +1,10 @@ +warning: lint `undefined_naked_function_abi` has been removed: converted into hard error, see PR #139001 for more information + --> $DIR/undefined_naked_function_abi.rs:3:9 + | +LL | #![deny(undefined_naked_function_abi)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(renamed_and_removed_lints)]` on by default + +warning: 1 warning emitted + diff --git a/tests/ui/macros/remove-repetition-issue-139480.rs b/tests/ui/macros/remove-repetition-issue-139480.rs new file mode 100644 index 0000000000000..1efb4306763e4 --- /dev/null +++ b/tests/ui/macros/remove-repetition-issue-139480.rs @@ -0,0 +1,28 @@ +macro_rules! ciallo { + ($($v: vis)? $name: ident) => { + //~^ error: repetition matches empty token tree + }; +} + +macro_rules! meow { + ($name: ident $($v: vis)?) => { + //~^ error: repetition matches empty token tree + }; +} + +macro_rules! gbc { + ($name: ident $/* + this comment gets removed by the suggestion + */ + ($v: vis)?) => { + //~^ error: repetition matches empty token tree + }; +} + +ciallo!(hello); + +meow!(miaow, pub); + +gbc!(mygo,); + +fn main() {} diff --git a/tests/ui/macros/remove-repetition-issue-139480.stderr b/tests/ui/macros/remove-repetition-issue-139480.stderr new file mode 100644 index 0000000000000..c2475589ee9ad --- /dev/null +++ b/tests/ui/macros/remove-repetition-issue-139480.stderr @@ -0,0 +1,44 @@ +error: repetition matches empty token tree + --> $DIR/remove-repetition-issue-139480.rs:2:7 + | +LL | ($($v: vis)? $name: ident) => { + | ^^^^^^^^^ + | + = note: a `vis` fragment can already be empty +help: remove the `$(` and `)?` + | +LL - ($($v: vis)? $name: ident) => { +LL + ($v: vis $name: ident) => { + | + +error: repetition matches empty token tree + --> $DIR/remove-repetition-issue-139480.rs:8:20 + | +LL | ($name: ident $($v: vis)?) => { + | ^^^^^^^^^ + | + = note: a `vis` fragment can already be empty +help: remove the `$(` and `)?` + | +LL - ($name: ident $($v: vis)?) => { +LL + ($name: ident $v: vis) => { + | + +error: repetition matches empty token tree + --> $DIR/remove-repetition-issue-139480.rs:17:9 + | +LL | ($v: vis)?) => { + | ^^^^^^^^^ + | + = note: a `vis` fragment can already be empty +help: remove the `$(` and `)?` + | +LL - ($name: ident $/* +LL - this comment gets removed by the suggestion +LL - */ +LL - ($v: vis)?) => { +LL + ($name: ident $v: vis) => { + | + +error: aborting due to 3 previous errors +