@@ -497,12 +497,14 @@ impl<'a> Tokenizer<'a> {
497
497
Ok ( tokens)
498
498
}
499
499
500
+ // Tokenize the identifer or keywords in `ch`
500
501
fn tokenize_identifier_or_keyword (
501
502
& self ,
502
- ch : String ,
503
+ ch : impl IntoIterator < Item = char > ,
503
504
chars : & mut State ,
504
505
) -> Result < Option < Token > , TokenizerError > {
505
506
chars. next ( ) ; // consume the first char
507
+ let ch: String = ch. into_iter ( ) . collect ( ) ;
506
508
let word = self . tokenize_word ( ch, chars) ;
507
509
508
510
// TODO: implement parsing of exponent here
@@ -550,7 +552,7 @@ impl<'a> Tokenizer<'a> {
550
552
}
551
553
_ => {
552
554
// regular identifier starting with an "b" or "B"
553
- let s = self . tokenize_word ( b. to_string ( ) , chars) ;
555
+ let s = self . tokenize_word ( b, chars) ;
554
556
Ok ( Some ( Token :: make_word ( & s, None ) ) )
555
557
}
556
558
}
@@ -569,7 +571,7 @@ impl<'a> Tokenizer<'a> {
569
571
}
570
572
_ => {
571
573
// regular identifier starting with an "r" or "R"
572
- let s = self . tokenize_word ( b. to_string ( ) , chars) ;
574
+ let s = self . tokenize_word ( b, chars) ;
573
575
Ok ( Some ( Token :: make_word ( & s, None ) ) )
574
576
}
575
577
}
@@ -585,7 +587,7 @@ impl<'a> Tokenizer<'a> {
585
587
}
586
588
_ => {
587
589
// regular identifier starting with an "N"
588
- let s = self . tokenize_word ( n. to_string ( ) , chars) ;
590
+ let s = self . tokenize_word ( n, chars) ;
589
591
Ok ( Some ( Token :: make_word ( & s, None ) ) )
590
592
}
591
593
}
@@ -602,7 +604,7 @@ impl<'a> Tokenizer<'a> {
602
604
}
603
605
_ => {
604
606
// regular identifier starting with an "E" or "e"
605
- let s = self . tokenize_word ( x. to_string ( ) , chars) ;
607
+ let s = self . tokenize_word ( x, chars) ;
606
608
Ok ( Some ( Token :: make_word ( & s, None ) ) )
607
609
}
608
610
}
@@ -619,7 +621,7 @@ impl<'a> Tokenizer<'a> {
619
621
}
620
622
_ => {
621
623
// regular identifier starting with an "X"
622
- let s = self . tokenize_word ( x. to_string ( ) , chars) ;
624
+ let s = self . tokenize_word ( x, chars) ;
623
625
Ok ( Some ( Token :: make_word ( & s, None ) ) )
624
626
}
625
627
}
@@ -794,9 +796,7 @@ impl<'a> Tokenizer<'a> {
794
796
match chars. peek ( ) {
795
797
Some ( ' ' ) => self . consume_and_return ( chars, Token :: Mod ) ,
796
798
Some ( sch) if self . dialect . is_identifier_start ( '%' ) => {
797
- let mut s = ch. to_string ( ) ;
798
- s. push_str ( & sch. to_string ( ) ) ;
799
- self . tokenize_identifier_or_keyword ( s, chars)
799
+ self . tokenize_identifier_or_keyword ( [ ch, * sch] , chars)
800
800
}
801
801
_ => self . consume_and_return ( chars, Token :: Mod ) ,
802
802
}
@@ -917,9 +917,7 @@ impl<'a> Tokenizer<'a> {
917
917
}
918
918
Some ( ' ' ) => Ok ( Some ( Token :: Sharp ) ) ,
919
919
Some ( sch) if self . dialect . is_identifier_start ( '#' ) => {
920
- let mut s = ch. to_string ( ) ;
921
- s. push_str ( & sch. to_string ( ) ) ;
922
- self . tokenize_identifier_or_keyword ( s, chars)
920
+ self . tokenize_identifier_or_keyword ( [ ch, * sch] , chars)
923
921
}
924
922
_ => Ok ( Some ( Token :: Sharp ) ) ,
925
923
}
@@ -934,19 +932,14 @@ impl<'a> Tokenizer<'a> {
934
932
match chars. peek ( ) {
935
933
Some ( ' ' ) => Ok ( Some ( Token :: AtAt ) ) ,
936
934
Some ( tch) if self . dialect . is_identifier_start ( '@' ) => {
937
- let mut s = ch. to_string ( ) ;
938
- s. push ( '@' ) ;
939
- s. push_str ( & tch. to_string ( ) ) ;
940
- self . tokenize_identifier_or_keyword ( s, chars)
935
+ self . tokenize_identifier_or_keyword ( [ ch, '@' , * tch] , chars)
941
936
}
942
937
_ => Ok ( Some ( Token :: AtAt ) ) ,
943
938
}
944
939
}
945
940
Some ( ' ' ) => Ok ( Some ( Token :: AtSign ) ) ,
946
941
Some ( sch) if self . dialect . is_identifier_start ( '@' ) => {
947
- let mut s = ch. to_string ( ) ;
948
- s. push_str ( & sch. to_string ( ) ) ;
949
- self . tokenize_identifier_or_keyword ( s, chars)
942
+ self . tokenize_identifier_or_keyword ( [ ch, * sch] , chars)
950
943
}
951
944
_ => Ok ( Some ( Token :: AtSign ) ) ,
952
945
}
@@ -959,7 +952,7 @@ impl<'a> Tokenizer<'a> {
959
952
960
953
// identifier or keyword
961
954
ch if self . dialect . is_identifier_start ( ch) => {
962
- self . tokenize_identifier_or_keyword ( ch . to_string ( ) , chars)
955
+ self . tokenize_identifier_or_keyword ( [ ch ] , chars)
963
956
}
964
957
'$' => Ok ( Some ( self . tokenize_dollar_preceded_value ( chars) ?) ) ,
965
958
@@ -1086,8 +1079,8 @@ impl<'a> Tokenizer<'a> {
1086
1079
}
1087
1080
1088
1081
/// Tokenize an identifier or keyword, after the first char is already consumed.
1089
- fn tokenize_word ( & self , first_chars : String , chars : & mut State ) -> String {
1090
- let mut s = first_chars;
1082
+ fn tokenize_word ( & self , first_chars : impl Into < String > , chars : & mut State ) -> String {
1083
+ let mut s = first_chars. into ( ) ;
1091
1084
s. push_str ( & peeking_take_while ( chars, |ch| {
1092
1085
self . dialect . is_identifier_part ( ch)
1093
1086
} ) ) ;
0 commit comments