@@ -567,26 +567,28 @@ unsafe fn find_identifier_end_avx2(input: &str, mut offset: usize) -> usize {
567
567
use core:: mem:: size_of;
568
568
use std:: arch:: x86_64:: * ;
569
569
570
- unsafe fn range_mask ( x : __m256i , range : RangeInclusive < u8 > ) -> __m256i {
570
+ type Chunk = __m256i ;
571
+
572
+ unsafe fn range_mask ( x : Chunk , range : RangeInclusive < u8 > ) -> Chunk {
571
573
unsafe {
572
574
let lower = _mm256_cmpgt_epi8 ( _mm256_set1_epi8 ( * range. end ( ) as i8 + 1 ) , x) ;
573
575
let upper = _mm256_cmpgt_epi8 ( x, _mm256_set1_epi8 ( * range. start ( ) as i8 - 1 ) ) ;
574
576
_mm256_and_si256 ( upper, lower)
575
577
}
576
578
}
577
579
578
- unsafe fn any_non_ascii ( chunk : std :: arch :: x86_64 :: __m256i ) -> bool {
580
+ unsafe fn any_non_ascii ( chunk : Chunk ) -> bool {
579
581
unsafe { _mm256_testz_si256 ( _mm256_set1_epi8 ( i8:: MIN ) , chunk) == 0 }
580
582
}
581
583
582
- while ( offset + size_of :: < __m256 > ( ) ) <= input. len ( ) {
584
+ while ( offset + size_of :: < Chunk > ( ) ) <= input. len ( ) {
583
585
// SAFETY: requires that a 32-byte load from `input.as_ptr() + offset` does not touch uninitialised memory.
584
586
// The above length check guarantees this.
585
587
let ident_mask = unsafe {
586
588
let chunk = _mm256_loadu_si256 (
587
589
// the `loadu` variant of this intrinsic doesn't require aligned addresses
588
590
#[ allow( clippy:: cast_ptr_alignment) ]
589
- input. as_ptr ( ) . add ( offset) . cast ( ) ,
591
+ input. as_ptr ( ) . add ( offset) . cast :: < Chunk > ( ) ,
590
592
) ;
591
593
if any_non_ascii ( chunk) {
592
594
break ;
@@ -609,7 +611,7 @@ unsafe fn find_identifier_end_avx2(input: &str, mut offset: usize) -> usize {
609
611
offset += ident_mask. trailing_ones ( ) as usize ;
610
612
return offset;
611
613
}
612
- offset += size_of :: < __m256 > ( ) ;
614
+ offset += size_of :: < Chunk > ( ) ;
613
615
}
614
616
615
617
find_identifier_end_generic ( input, offset)
0 commit comments