@@ -4,17 +4,19 @@ use rustc_attr as attr;
4
4
use rustc_data_structures:: fx:: FxHashSet ;
5
5
use rustc_errors:: Applicability ;
6
6
use rustc_hir as hir;
7
- use rustc_hir:: { is_range_literal, ExprKind , Node } ;
7
+ use rustc_hir:: def_id:: DefId ;
8
+ use rustc_hir:: { is_range_literal, Expr , ExprKind , Node } ;
8
9
use rustc_middle:: ty:: layout:: { IntegerExt , SizeSkeleton } ;
9
10
use rustc_middle:: ty:: subst:: SubstsRef ;
10
- use rustc_middle:: ty:: { self , AdtKind , Ty , TyCtxt , TypeFoldable } ;
11
+ use rustc_middle:: ty:: { self , AdtKind , DefIdTree , Ty , TyCtxt , TypeFoldable } ;
11
12
use rustc_span:: source_map;
12
13
use rustc_span:: symbol:: sym;
13
- use rustc_span:: { Span , DUMMY_SP } ;
14
+ use rustc_span:: { Span , Symbol , DUMMY_SP } ;
14
15
use rustc_target:: abi:: Abi ;
15
16
use rustc_target:: abi:: { Integer , LayoutOf , TagEncoding , Variants } ;
16
17
use rustc_target:: spec:: abi:: Abi as SpecAbi ;
17
18
19
+ use if_chain:: if_chain;
18
20
use std:: cmp;
19
21
use std:: iter;
20
22
use std:: ops:: ControlFlow ;
@@ -1379,3 +1381,234 @@ impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
1379
1381
}
1380
1382
}
1381
1383
}
1384
+
1385
+ declare_lint ! {
1386
+ /// The `invalid_atomic_ordering` lint detects passing an `Ordering`
1387
+ /// to an atomic operation that does not support that ordering.
1388
+ ///
1389
+ /// ### Example
1390
+ ///
1391
+ /// ```rust,compile_fail
1392
+ /// # use core::sync::atomic::{AtomicU8, Ordering};
1393
+ /// let atom = AtomicU8::new(0);
1394
+ /// let value = atom.load(Ordering::Release);
1395
+ /// # let _ = value;
1396
+ /// ```
1397
+ ///
1398
+ /// {{produces}}
1399
+ ///
1400
+ /// ### Explanation
1401
+ ///
1402
+ /// Some atomic operations are only supported for a subset of the
1403
+ /// `atomic::Ordering` variants. Passing an unsupported variant will cause
1404
+ /// an unconditional panic at runtime, which is detected by this lint.
1405
+ ///
1406
+ /// This lint will trigger in the following cases: (where `AtomicType` is an
1407
+ /// atomic type from `core::sync::atomic`, such as `AtomicBool`,
1408
+ /// `AtomicPtr`, `AtomicUsize`, or any of the other integer atomics).
1409
+ ///
1410
+ /// - Passing `Ordering::Acquire` or `Ordering::AcqRel` to
1411
+ /// `AtomicType::store`.
1412
+ ///
1413
+ /// - Passing `Ordering::Release` or `Ordering::AcqRel` to
1414
+ /// `AtomicType::load`.
1415
+ ///
1416
+ /// - Passing `Ordering::Relaxed` to `core::sync::atomic::fence` or
1417
+ /// `core::sync::atomic::compiler_fence`.
1418
+ ///
1419
+ /// - Passing `Ordering::Release` or `Ordering::AcqRel` as the failure
1420
+ /// ordering for any of `AtomicType::compare_exchange`,
1421
+ /// `AtomicType::compare_exchange_weak`, or `AtomicType::fetch_update`.
1422
+ ///
1423
+ /// - Passing in a pair of orderings to `AtomicType::compare_exchange`,
1424
+ /// `AtomicType::compare_exchange_weak`, or `AtomicType::fetch_update`
1425
+ /// where the failure ordering is stronger than the success ordering.
1426
+ INVALID_ATOMIC_ORDERING ,
1427
+ Deny ,
1428
+ "usage of invalid atomic ordering in atomic operations and memory fences"
1429
+ }
1430
+
1431
+ declare_lint_pass ! ( InvalidAtomicOrdering => [ INVALID_ATOMIC_ORDERING ] ) ;
1432
+
1433
+ impl InvalidAtomicOrdering {
1434
+ fn inherent_atomic_method_call < ' hir > (
1435
+ cx : & LateContext < ' _ > ,
1436
+ expr : & Expr < ' hir > ,
1437
+ ) -> Option < ( Symbol , & ' hir [ Expr < ' hir > ] ) > {
1438
+ const ATOMIC_TYPES : & [ Symbol ] = & [
1439
+ sym:: AtomicBool ,
1440
+ sym:: AtomicPtr ,
1441
+ sym:: AtomicUsize ,
1442
+ sym:: AtomicU8 ,
1443
+ sym:: AtomicU16 ,
1444
+ sym:: AtomicU32 ,
1445
+ sym:: AtomicU64 ,
1446
+ sym:: AtomicU128 ,
1447
+ sym:: AtomicIsize ,
1448
+ sym:: AtomicI8 ,
1449
+ sym:: AtomicI16 ,
1450
+ sym:: AtomicI32 ,
1451
+ sym:: AtomicI64 ,
1452
+ sym:: AtomicI128 ,
1453
+ ] ;
1454
+ if_chain ! {
1455
+ if let ExprKind :: MethodCall ( ref method_path, _, args, _) = & expr. kind;
1456
+ if let Some ( m_def_id) = cx. typeck_results( ) . type_dependent_def_id( expr. hir_id) ;
1457
+ if let Some ( impl_did) = cx. tcx. impl_of_method( m_def_id) ;
1458
+ if let Some ( adt) = cx. tcx. type_of( impl_did) . ty_adt_def( ) ;
1459
+ // skip extension traits, only lint functions from the standard library
1460
+ if cx. tcx. trait_id_of_impl( impl_did) . is_none( ) ;
1461
+
1462
+ if let Some ( parent) = cx. tcx. parent( adt. did) ;
1463
+ if cx. tcx. is_diagnostic_item( sym:: atomic_mod, parent) ;
1464
+ if ATOMIC_TYPES . contains( & cx. tcx. item_name( adt. did) ) ;
1465
+ then {
1466
+ return Some ( ( method_path. ident. name, args) ) ;
1467
+ }
1468
+ }
1469
+ None
1470
+ }
1471
+
1472
+ fn matches_ordering ( cx : & LateContext < ' _ > , did : DefId , orderings : & [ Symbol ] ) -> bool {
1473
+ let tcx = cx. tcx ;
1474
+ let atomic_ordering = tcx. get_diagnostic_item ( sym:: Ordering ) ;
1475
+ orderings. iter ( ) . any ( |ordering| {
1476
+ tcx. item_name ( did) == * ordering && {
1477
+ let parent = tcx. parent ( did) ;
1478
+ parent == atomic_ordering
1479
+ // needed in case this is a ctor, not a variant
1480
+ || parent. map_or ( false , |parent| tcx. parent ( parent) == atomic_ordering)
1481
+ }
1482
+ } )
1483
+ }
1484
+
1485
+ fn opt_ordering_defid ( cx : & LateContext < ' _ > , ord_arg : & Expr < ' _ > ) -> Option < DefId > {
1486
+ if let ExprKind :: Path ( ref ord_qpath) = ord_arg. kind {
1487
+ cx. qpath_res ( ord_qpath, ord_arg. hir_id ) . opt_def_id ( )
1488
+ } else {
1489
+ None
1490
+ }
1491
+ }
1492
+
1493
+ fn check_atomic_load_store ( cx : & LateContext < ' _ > , expr : & Expr < ' _ > ) {
1494
+ use rustc_hir:: def:: { DefKind , Res } ;
1495
+ use rustc_hir:: QPath ;
1496
+ if_chain ! {
1497
+ if let Some ( ( method, args) ) = Self :: inherent_atomic_method_call( cx, expr) ;
1498
+ if let Some ( ( ordering_arg, invalid_ordering) ) = match method {
1499
+ sym:: load => Some ( ( & args[ 1 ] , sym:: Release ) ) ,
1500
+ sym:: store => Some ( ( & args[ 2 ] , sym:: Acquire ) ) ,
1501
+ _ => None ,
1502
+ } ;
1503
+
1504
+ if let ExprKind :: Path ( QPath :: Resolved ( _, path) ) = ordering_arg. kind;
1505
+ if let Res :: Def ( DefKind :: Ctor ( ..) , ctor_id) = path. res;
1506
+ if Self :: matches_ordering( cx, ctor_id, & [ invalid_ordering, sym:: AcqRel ] ) ;
1507
+ then {
1508
+ cx. struct_span_lint( INVALID_ATOMIC_ORDERING , ordering_arg. span, |diag| {
1509
+ if method == sym:: load {
1510
+ diag. build( "atomic loads cannot have `Release` or `AcqRel` ordering" )
1511
+ . help( "consider using ordering modes `Acquire`, `SeqCst` or `Relaxed`" )
1512
+ . emit( )
1513
+ } else {
1514
+ debug_assert_eq!( method, sym:: store) ;
1515
+ diag. build( "atomic stores cannot have `Acquire` or `AcqRel` ordering" )
1516
+ . help( "consider using ordering modes `Release`, `SeqCst` or `Relaxed`" )
1517
+ . emit( ) ;
1518
+ }
1519
+ } ) ;
1520
+ }
1521
+ }
1522
+ }
1523
+
1524
+ fn check_memory_fence ( cx : & LateContext < ' _ > , expr : & Expr < ' _ > ) {
1525
+ if_chain ! {
1526
+ if let ExprKind :: Call ( ref func, ref args) = expr. kind;
1527
+ if let ExprKind :: Path ( ref func_qpath) = func. kind;
1528
+ if let Some ( def_id) = cx. qpath_res( func_qpath, func. hir_id) . opt_def_id( ) ;
1529
+ if cx. tcx. is_diagnostic_item( sym:: fence, def_id) ||
1530
+ cx. tcx. is_diagnostic_item( sym:: compiler_fence, def_id) ;
1531
+ if let ExprKind :: Path ( ref ordering_qpath) = & args[ 0 ] . kind;
1532
+ if let Some ( ordering_def_id) = cx. qpath_res( ordering_qpath, args[ 0 ] . hir_id) . opt_def_id( ) ;
1533
+ if Self :: matches_ordering( cx, ordering_def_id, & [ sym:: Relaxed ] ) ;
1534
+ then {
1535
+ cx. struct_span_lint( INVALID_ATOMIC_ORDERING , args[ 0 ] . span, |diag| {
1536
+ diag. build( "memory fences cannot have `Relaxed` ordering" )
1537
+ . help( "consider using ordering modes `Acquire`, `Release`, `AcqRel` or `SeqCst`" )
1538
+ . emit( ) ;
1539
+ } ) ;
1540
+ }
1541
+ }
1542
+ }
1543
+
1544
+ fn check_atomic_compare_exchange ( cx : & LateContext < ' _ > , expr : & Expr < ' _ > ) {
1545
+ if_chain ! {
1546
+ if let Some ( ( method, args) ) = Self :: inherent_atomic_method_call( cx, expr) ;
1547
+ if let Some ( ( success_order_arg, failure_order_arg) ) = match method {
1548
+ sym:: fetch_update => Some ( ( & args[ 1 ] , & args[ 2 ] ) ) ,
1549
+ sym:: compare_exchange | sym:: compare_exchange_weak => Some ( ( & args[ 3 ] , & args[ 4 ] ) ) ,
1550
+ _ => None ,
1551
+ } ;
1552
+
1553
+ if let Some ( fail_ordering_def_id) = Self :: opt_ordering_defid( cx, failure_order_arg) ;
1554
+ then {
1555
+ // Helper type holding on to some checking and error reporting data. Has
1556
+ // - (success ordering,
1557
+ // - list of failure orderings forbidden by the success order,
1558
+ // - suggestion message)
1559
+ type OrdLintInfo = ( Symbol , & ' static [ Symbol ] , & ' static str ) ;
1560
+ const RELAXED : OrdLintInfo = ( sym:: Relaxed , & [ sym:: SeqCst , sym:: Acquire ] , "ordering mode `Relaxed`" ) ;
1561
+ const ACQUIRE : OrdLintInfo = ( sym:: Acquire , & [ sym:: SeqCst ] , "ordering modes `Acquire` or `Relaxed`" ) ;
1562
+ const SEQ_CST : OrdLintInfo = ( sym:: SeqCst , & [ ] , "ordering modes `Acquire`, `SeqCst` or `Relaxed`" ) ;
1563
+ const RELEASE : OrdLintInfo = ( sym:: Release , RELAXED . 1 , RELAXED . 2 ) ;
1564
+ const ACQREL : OrdLintInfo = ( sym:: AcqRel , ACQUIRE . 1 , ACQUIRE . 2 ) ;
1565
+ const SEARCH : [ OrdLintInfo ; 5 ] = [ RELAXED , ACQUIRE , SEQ_CST , RELEASE , ACQREL ] ;
1566
+
1567
+ let success_lint_info = Self :: opt_ordering_defid( cx, success_order_arg)
1568
+ . and_then( |success_ord_def_id| -> Option <OrdLintInfo > {
1569
+ SEARCH
1570
+ . iter( )
1571
+ . copied( )
1572
+ . find( |( ordering, ..) | {
1573
+ Self :: matches_ordering( cx, success_ord_def_id, & [ * ordering] )
1574
+ } )
1575
+ } ) ;
1576
+ if Self :: matches_ordering( cx, fail_ordering_def_id, & [ sym:: Release , sym:: AcqRel ] ) {
1577
+ // If we don't know the success order is, use what we'd suggest
1578
+ // if it were maximally permissive.
1579
+ let suggested = success_lint_info. unwrap_or( SEQ_CST ) . 2 ;
1580
+ cx. struct_span_lint( INVALID_ATOMIC_ORDERING , failure_order_arg. span, |diag| {
1581
+ let msg = format!(
1582
+ "{}'s failure ordering may not be `Release` or `AcqRel`" ,
1583
+ method,
1584
+ ) ;
1585
+ diag. build( & msg)
1586
+ . help( & format!( "consider using {} instead" , suggested) )
1587
+ . emit( ) ;
1588
+ } ) ;
1589
+ } else if let Some ( ( success_ord, bad_ords_given_success, suggested) ) = success_lint_info {
1590
+ if Self :: matches_ordering( cx, fail_ordering_def_id, bad_ords_given_success) {
1591
+ cx. struct_span_lint( INVALID_ATOMIC_ORDERING , failure_order_arg. span, |diag| {
1592
+ let msg = format!(
1593
+ "{}'s failure ordering may not be stronger than the success ordering of `{}`" ,
1594
+ method,
1595
+ success_ord,
1596
+ ) ;
1597
+ diag. build( & msg)
1598
+ . help( & format!( "consider using {} instead" , suggested) )
1599
+ . emit( ) ;
1600
+ } ) ;
1601
+ }
1602
+ }
1603
+ }
1604
+ }
1605
+ }
1606
+ }
1607
+
1608
+ impl < ' tcx > LateLintPass < ' tcx > for InvalidAtomicOrdering {
1609
+ fn check_expr ( & mut self , cx : & LateContext < ' tcx > , expr : & ' tcx Expr < ' _ > ) {
1610
+ Self :: check_atomic_load_store ( cx, expr) ;
1611
+ Self :: check_memory_fence ( cx, expr) ;
1612
+ Self :: check_atomic_compare_exchange ( cx, expr) ;
1613
+ }
1614
+ }
0 commit comments