@@ -4,18 +4,20 @@ use rustc_attr as attr;
4
4
use rustc_data_structures:: fx:: FxHashSet ;
5
5
use rustc_errors:: Applicability ;
6
6
use rustc_hir as hir;
7
- use rustc_hir:: { is_range_literal, ExprKind , Node } ;
7
+ use rustc_hir:: def_id:: DefId ;
8
+ use rustc_hir:: { is_range_literal, Expr , ExprKind , Node } ;
8
9
use rustc_index:: vec:: Idx ;
9
10
use rustc_middle:: ty:: layout:: { IntegerExt , SizeSkeleton } ;
10
11
use rustc_middle:: ty:: subst:: SubstsRef ;
11
12
use rustc_middle:: ty:: { self , AdtKind , Ty , TyCtxt , TypeFoldable } ;
12
13
use rustc_span:: source_map;
13
14
use rustc_span:: symbol:: sym;
14
- use rustc_span:: { Span , DUMMY_SP } ;
15
+ use rustc_span:: { Span , Symbol , DUMMY_SP } ;
15
16
use rustc_target:: abi:: Abi ;
16
17
use rustc_target:: abi:: { Integer , LayoutOf , TagEncoding , VariantIdx , Variants } ;
17
18
use rustc_target:: spec:: abi:: Abi as SpecAbi ;
18
19
20
+ use if_chain:: if_chain;
19
21
use std:: cmp;
20
22
use std:: ops:: ControlFlow ;
21
23
use tracing:: debug;
@@ -1358,3 +1360,216 @@ impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
1358
1360
}
1359
1361
}
1360
1362
}
1363
+
1364
+ declare_lint ! {
1365
+ /// The `invalid_atomic_ordering` lint detects using passing an `Ordering`
1366
+ /// to an atomic operation that does not support that ordering.
1367
+ ///
1368
+ /// ### Example
1369
+ ///
1370
+ /// ```rust
1371
+ /// # use core::sync::atomic::{AtomicU8, Ordering};
1372
+ /// let atom = AtomicU8::new(0);
1373
+ /// let value = atom.load(Ordering::Release);
1374
+ /// # let _ = value;
1375
+ /// ```
1376
+ ///
1377
+ /// {{produces}}
1378
+ ///
1379
+ /// ### Explanation
1380
+ ///
1381
+ /// Some atomic operations are only supported for a subset of the
1382
+ /// `atomic::Ordering` variants. Passing an unsupported variant will cause
1383
+ /// an unconditional panic at runtime, which is detected by this lint.
1384
+ ///
1385
+ /// This lint will trigger in the following cases: (where `AtomicType` is an
1386
+ /// atomic type from `core::sync::atomic`, such as `AtomicBool`,
1387
+ /// `AtomicPtr`, `AtomicUsize`, or any of the other integer atomics).
1388
+ ///
1389
+ /// - Passing `Ordering::Acquire` or `Ordering::AcqRel` to
1390
+ /// `AtomicType::store`.
1391
+ ///
1392
+ /// - Passing `Ordering::Release` or `Ordering::AcqRel` to
1393
+ /// `AtomicType::load`.
1394
+ ///
1395
+ /// - Passing `Ordering::Relaxed` to `core::sync::atomic::fence` or
1396
+ /// `core::sync::atomic::compiler_fence`.
1397
+ ///
1398
+ /// - Passing `Ordering::Release` or `Ordering::AcqRel` as the failure
1399
+ /// ordering for any of `AtomicType::compare_exchange`,
1400
+ /// `AtomicType::compare_exchange_weak`, or `AtomicType::fetch_update`.
1401
+ ///
1402
+ /// - Passing in a pair of orderings to `AtomicType::compare_exchange`,
1403
+ /// `AtomicType::compare_exchange_weak`, or `AtomicType::fetch_update`
1404
+ /// where the failure ordering is stronger than the success ordering.
1405
+ INVALID_ATOMIC_ORDERING ,
1406
+ Deny ,
1407
+ "usage of invalid atomic ordering in atomic operations and memory fences"
1408
+ }
1409
+
1410
+ declare_lint_pass ! ( InvalidAtomicOrdering => [ INVALID_ATOMIC_ORDERING ] ) ;
1411
+
1412
+ impl InvalidAtomicOrdering {
1413
+ fn type_is_atomic ( cx : & LateContext < ' _ > , expr : & Expr < ' _ > ) -> bool {
1414
+ const ATOMIC_TYPES : & [ Symbol ] = & [
1415
+ sym:: atomic_bool_type,
1416
+ sym:: atomic_ptr_type,
1417
+ sym:: atomic_usize_type,
1418
+ sym:: atomic_u8_type,
1419
+ sym:: atomic_u16_type,
1420
+ sym:: atomic_u32_type,
1421
+ sym:: atomic_u64_type,
1422
+ sym:: atomic_u128_type,
1423
+ sym:: atomic_isize_type,
1424
+ sym:: atomic_i8_type,
1425
+ sym:: atomic_i16_type,
1426
+ sym:: atomic_i32_type,
1427
+ sym:: atomic_i64_type,
1428
+ sym:: atomic_i128_type,
1429
+ ] ;
1430
+ if let ty:: Adt ( & ty:: AdtDef { did, .. } , _) = cx. typeck_results ( ) . expr_ty ( expr) . kind ( ) {
1431
+ ATOMIC_TYPES . iter ( ) . any ( |sym| cx. tcx . is_diagnostic_item ( * sym, did) )
1432
+ } else {
1433
+ false
1434
+ }
1435
+ }
1436
+
1437
+ fn matches_ordering_def_path ( cx : & LateContext < ' _ > , did : DefId , orderings : & [ Symbol ] ) -> bool {
1438
+ orderings. iter ( ) . any ( |ordering| {
1439
+ cx. match_def_path ( did, & [ sym:: core, sym:: sync, sym:: atomic, sym:: Ordering , * ordering] )
1440
+ } )
1441
+ }
1442
+
1443
+ fn opt_ordering_defid ( cx : & LateContext < ' _ > , ord_arg : & Expr < ' _ > ) -> Option < DefId > {
1444
+ if let ExprKind :: Path ( ref ord_qpath) = ord_arg. kind {
1445
+ cx. qpath_res ( ord_qpath, ord_arg. hir_id ) . opt_def_id ( )
1446
+ } else {
1447
+ None
1448
+ }
1449
+ }
1450
+
1451
+ fn check_atomic_load_store ( cx : & LateContext < ' _ > , expr : & Expr < ' _ > ) {
1452
+ if_chain ! {
1453
+ if let ExprKind :: MethodCall ( ref method_path, _, args, _) = & expr. kind;
1454
+ let method = method_path. ident. name;
1455
+ if Self :: type_is_atomic( cx, & args[ 0 ] ) ;
1456
+ if method == sym:: load || method == sym:: store;
1457
+ let ordering_arg = if method == sym:: load { & args[ 1 ] } else { & args[ 2 ] } ;
1458
+ if let ExprKind :: Path ( ref ordering_qpath) = ordering_arg. kind;
1459
+ if let Some ( ordering_def_id) = cx. qpath_res( ordering_qpath, ordering_arg. hir_id) . opt_def_id( ) ;
1460
+ then {
1461
+ if method == sym:: load &&
1462
+ Self :: matches_ordering_def_path( cx, ordering_def_id, & [ sym:: Release , sym:: AcqRel ] ) {
1463
+ cx. struct_span_lint( INVALID_ATOMIC_ORDERING , ordering_arg. span, |diag| {
1464
+ diag. build( "atomic loads cannot have `Release` or `AcqRel` ordering" )
1465
+ . help( "consider using ordering modes `Acquire`, `SeqCst` or `Relaxed`" )
1466
+ . emit( ) ;
1467
+ } ) ;
1468
+ } else if method == sym:: store &&
1469
+ Self :: matches_ordering_def_path( cx, ordering_def_id, & [ sym:: Acquire , sym:: AcqRel ] ) {
1470
+ cx. struct_span_lint( INVALID_ATOMIC_ORDERING , ordering_arg. span, |diag| {
1471
+ diag. build( "atomic stores cannot have `Acquire` or `AcqRel` ordering" )
1472
+ . help( "consider using ordering modes `Release`, `SeqCst` or `Relaxed`" )
1473
+ . emit( ) ;
1474
+ } ) ;
1475
+ }
1476
+ }
1477
+ }
1478
+ }
1479
+
1480
+ fn check_memory_fence ( cx : & LateContext < ' _ > , expr : & Expr < ' _ > ) {
1481
+ if_chain ! {
1482
+ if let ExprKind :: Call ( ref func, ref args) = expr. kind;
1483
+ if let ExprKind :: Path ( ref func_qpath) = func. kind;
1484
+ if let Some ( def_id) = cx. qpath_res( func_qpath, func. hir_id) . opt_def_id( ) ;
1485
+ if cx. tcx. is_diagnostic_item( sym:: fence, def_id) ||
1486
+ cx. tcx. is_diagnostic_item( sym:: compiler_fence, def_id) ;
1487
+ if let ExprKind :: Path ( ref ordering_qpath) = & args[ 0 ] . kind;
1488
+ if let Some ( ordering_def_id) = cx. qpath_res( ordering_qpath, args[ 0 ] . hir_id) . opt_def_id( ) ;
1489
+ if Self :: matches_ordering_def_path( cx, ordering_def_id, & [ sym:: Relaxed ] ) ;
1490
+ then {
1491
+ cx. struct_span_lint( INVALID_ATOMIC_ORDERING , args[ 0 ] . span, |diag| {
1492
+ diag. build( "memory fences cannot have `Relaxed` ordering" )
1493
+ . help( "consider using ordering modes `Acquire`, `Release`, `AcqRel` or `SeqCst`" )
1494
+ . emit( ) ;
1495
+ } ) ;
1496
+ }
1497
+ }
1498
+ }
1499
+
1500
+ fn check_atomic_compare_exchange ( cx : & LateContext < ' _ > , expr : & Expr < ' _ > ) {
1501
+ if_chain ! {
1502
+ if let ExprKind :: MethodCall ( ref method_path, _, args, _) = & expr. kind;
1503
+ let method = method_path. ident. name;
1504
+ if Self :: type_is_atomic( cx, & args[ 0 ] ) ;
1505
+ if method == sym:: compare_exchange || method == sym:: compare_exchange_weak || method == sym:: fetch_update;
1506
+ let ( success_order_arg, failure_order_arg) = if method == sym:: fetch_update {
1507
+ ( & args[ 1 ] , & args[ 2 ] )
1508
+ } else {
1509
+ ( & args[ 3 ] , & args[ 4 ] )
1510
+ } ;
1511
+ if let Some ( fail_ordering_def_id) = Self :: opt_ordering_defid( cx, failure_order_arg) ;
1512
+ then {
1513
+ // Helper type holding on to some checking and error reporting data. Has
1514
+ // - (success ordering,
1515
+ // - list of failure orderings forbidden by the success order,
1516
+ // - suggestion message)
1517
+ type OrdLintInfo = ( Symbol , & ' static [ Symbol ] , & ' static str ) ;
1518
+ let relaxed: OrdLintInfo = ( sym:: Relaxed , & [ sym:: SeqCst , sym:: Acquire ] , "ordering mode `Relaxed`" ) ;
1519
+ let acquire: OrdLintInfo = ( sym:: Acquire , & [ sym:: SeqCst ] , "ordering modes `Acquire` or `Relaxed`" ) ;
1520
+ let seq_cst: OrdLintInfo = ( sym:: SeqCst , & [ ] , "ordering modes `Acquire`, `SeqCst` or `Relaxed`" ) ;
1521
+ let release = ( sym:: Release , relaxed. 1 , relaxed. 2 ) ;
1522
+ let acqrel = ( sym:: AcqRel , acquire. 1 , acquire. 2 ) ;
1523
+ let search = [ relaxed, acquire, seq_cst, release, acqrel] ;
1524
+
1525
+ let success_lint_info = Self :: opt_ordering_defid( cx, success_order_arg)
1526
+ . and_then( |success_ord_def_id| -> Option <OrdLintInfo > {
1527
+ search
1528
+ . iter( )
1529
+ . find( |( ordering, ..) | {
1530
+ cx. match_def_path(
1531
+ success_ord_def_id,
1532
+ & [ sym:: core, sym:: sync, sym:: atomic, sym:: Ordering , * ordering] ,
1533
+ )
1534
+ } )
1535
+ . copied( )
1536
+ } ) ;
1537
+ if Self :: matches_ordering_def_path( cx, fail_ordering_def_id, & [ sym:: Release , sym:: AcqRel ] ) {
1538
+ // If we don't know the success order is, use what we'd suggest
1539
+ // if it were maximally permissive.
1540
+ let suggested = success_lint_info. unwrap_or( seq_cst) . 2 ;
1541
+ cx. struct_span_lint( INVALID_ATOMIC_ORDERING , failure_order_arg. span, |diag| {
1542
+ let msg = format!(
1543
+ "{}'s failure ordering may not be `Release` or `AcqRel`" ,
1544
+ method,
1545
+ ) ;
1546
+ diag. build( & msg)
1547
+ . help( & format!( "consider using {} instead" , suggested) )
1548
+ . emit( ) ;
1549
+ } ) ;
1550
+ } else if let Some ( ( success_ord, bad_ords_given_success, suggested) ) = success_lint_info {
1551
+ if Self :: matches_ordering_def_path( cx, fail_ordering_def_id, bad_ords_given_success) {
1552
+ cx. struct_span_lint( INVALID_ATOMIC_ORDERING , failure_order_arg. span, |diag| {
1553
+ let msg = format!(
1554
+ "{}'s failure ordering may not be stronger than the success ordering of `{}`" ,
1555
+ method,
1556
+ success_ord,
1557
+ ) ;
1558
+ diag. build( & msg)
1559
+ . help( & format!( "consider using {} instead" , suggested) )
1560
+ . emit( ) ;
1561
+ } ) ;
1562
+ }
1563
+ }
1564
+ }
1565
+ }
1566
+ }
1567
+ }
1568
+
1569
+ impl < ' tcx > LateLintPass < ' tcx > for InvalidAtomicOrdering {
1570
+ fn check_expr ( & mut self , cx : & LateContext < ' tcx > , expr : & ' tcx Expr < ' _ > ) {
1571
+ Self :: check_atomic_load_store ( cx, expr) ;
1572
+ Self :: check_memory_fence ( cx, expr) ;
1573
+ Self :: check_atomic_compare_exchange ( cx, expr) ;
1574
+ }
1575
+ }
0 commit comments