2
2
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch
3
3
4
4
use crate :: cmp;
5
- use crate :: intrinsics;
6
5
use crate :: mem;
7
6
8
7
const LO_USIZE : usize = usize:: repeat_u8 ( 0x01 ) ;
@@ -17,53 +16,51 @@ const USIZE_BYTES: usize = mem::size_of::<usize>();
17
16
/// bytes where the borrow propagated all the way to the most significant
18
17
/// bit."
19
18
#[ inline]
20
- fn contains_zero_byte ( x : usize ) -> bool {
19
+ const fn contains_zero_byte ( x : usize ) -> bool {
21
20
x. wrapping_sub ( LO_USIZE ) & !x & HI_USIZE != 0
22
21
}
23
22
24
23
#[ cfg( target_pointer_width = "16" ) ]
25
24
#[ inline]
26
- fn repeat_byte ( b : u8 ) -> usize {
25
+ const fn repeat_byte ( b : u8 ) -> usize {
27
26
( b as usize ) << 8 | b as usize
28
27
}
29
28
30
29
#[ cfg( not( target_pointer_width = "16" ) ) ]
31
30
#[ inline]
32
- fn repeat_byte ( b : u8 ) -> usize {
31
+ const fn repeat_byte ( b : u8 ) -> usize {
33
32
( b as usize ) * ( usize:: MAX / 255 )
34
33
}
35
34
36
35
/// Returns the first index matching the byte `x` in `text`.
37
36
#[ must_use]
38
37
#[ inline]
39
38
pub const fn memchr ( x : u8 , text : & [ u8 ] ) -> Option < usize > {
40
- #[ inline]
41
- fn rt_impl ( x : u8 , text : & [ u8 ] ) -> Option < usize > {
42
- // Fast path for small slices
43
- if text. len ( ) < 2 * USIZE_BYTES {
44
- return text. iter ( ) . position ( |elt| * elt == x) ;
45
- }
46
-
47
- memchr_general_case ( x, text)
39
+ // Fast path for small slices.
40
+ if text. len ( ) < 2 * USIZE_BYTES {
41
+ return memchr_naive ( x, text) ;
48
42
}
49
43
50
- const fn const_impl ( x : u8 , bytes : & [ u8 ] ) -> Option < usize > {
51
- let mut i = 0 ;
52
- while i < bytes. len ( ) {
53
- if bytes[ i] == x {
54
- return Some ( i) ;
55
- }
56
- i += 1 ;
44
+ memchr_aligned ( x, text)
45
+ }
46
+
47
+ #[ inline]
48
+ const fn memchr_naive ( x : u8 , text : & [ u8 ] ) -> Option < usize > {
49
+ let mut i = 0 ;
50
+
51
+ // FIXME(const-hack): Replace with `text.iter().pos(|c| *c == x)`.
52
+ while i < text. len ( ) {
53
+ if text[ i] == x {
54
+ return Some ( i) ;
57
55
}
58
56
59
- None
57
+ i += 1 ;
60
58
}
61
59
62
- // SAFETY: The const and runtime versions have identical behavior
63
- unsafe { intrinsics:: const_eval_select ( ( x, text) , const_impl, rt_impl) }
60
+ None
64
61
}
65
62
66
- fn memchr_general_case ( x : u8 , text : & [ u8 ] ) -> Option < usize > {
63
+ const fn memchr_aligned ( x : u8 , text : & [ u8 ] ) -> Option < usize > {
67
64
// Scan for a single byte value by reading two `usize` words at a time.
68
65
//
69
66
// Split `text` in three parts
@@ -78,7 +75,7 @@ fn memchr_general_case(x: u8, text: &[u8]) -> Option<usize> {
78
75
79
76
if offset > 0 {
80
77
offset = cmp:: min ( offset, len) ;
81
- if let Some ( index) = text[ ..offset] . iter ( ) . position ( |elt| * elt == x ) {
78
+ if let Some ( index) = memchr_naive ( x , & text[ ..offset] ) {
82
79
return Some ( index) ;
83
80
}
84
81
}
@@ -103,7 +100,8 @@ fn memchr_general_case(x: u8, text: &[u8]) -> Option<usize> {
103
100
}
104
101
105
102
// Find the byte after the point the body loop stopped.
106
- text[ offset..] . iter ( ) . position ( |elt| * elt == x) . map ( |i| offset + i)
103
+ // FIXME(const-hack): Use `?` instead.
104
+ if let Some ( i) = memchr_naive ( x, & text[ offset..] ) { Some ( offset + i) } else { None }
107
105
}
108
106
109
107
/// Returns the last index matching the byte `x` in `text`.
0 commit comments