8
8
// option. This file may not be copied, modified, or distributed
9
9
// except according to those terms.
10
10
11
+ use convert:: TryFrom ;
11
12
use mem;
12
13
use ops:: { self , Add , Sub } ;
13
14
use usize;
@@ -21,7 +22,7 @@ use super::{FusedIterator, TrustedLen};
21
22
#[ unstable( feature = "step_trait" ,
22
23
reason = "likely to be replaced by finer-grained traits" ,
23
24
issue = "42168" ) ]
24
- pub trait Step : PartialOrd + Sized {
25
+ pub trait Step : Clone + PartialOrd + Sized {
25
26
/// Returns the number of steps between two step objects. The count is
26
27
/// inclusive of `start` and exclusive of `end`.
27
28
///
@@ -40,6 +41,9 @@ pub trait Step: PartialOrd + Sized {
40
41
41
42
/// Subtracts one to this step, returning the result
42
43
fn sub_one ( & self ) -> Self ;
44
+
45
+ /// Add an usize, returning None on overflow
46
+ fn add_usize ( & self , n : usize ) -> Option < Self > ;
43
47
}
44
48
45
49
// These are still macro-generated because the integer literals resolve to different types.
@@ -84,12 +88,20 @@ macro_rules! step_impl_unsigned {
84
88
}
85
89
}
86
90
91
+ #[ inline]
92
+ fn add_usize( & self , n: usize ) -> Option <Self > {
93
+ match <$t>:: try_from( n) {
94
+ Ok ( n_as_t) => self . checked_add( n_as_t) ,
95
+ Err ( _) => None ,
96
+ }
97
+ }
98
+
87
99
step_identical_methods!( ) ;
88
100
}
89
101
) * )
90
102
}
91
103
macro_rules! step_impl_signed {
92
- ( $( $t: ty) * ) => ( $(
104
+ ( $( [ $t: ty : $unsigned : ty ] ) * ) => ( $(
93
105
#[ unstable( feature = "step_trait" ,
94
106
reason = "likely to be replaced by finer-grained traits" ,
95
107
issue = "42168" ) ]
@@ -107,6 +119,24 @@ macro_rules! step_impl_signed {
107
119
}
108
120
}
109
121
122
+ #[ inline]
123
+ fn add_usize( & self , n: usize ) -> Option <Self > {
124
+ match <$unsigned>:: try_from( n) {
125
+ Ok ( n_as_unsigned) => {
126
+ // Wrapping in unsigned space handles cases like
127
+ // `-120_i8.add_usize(200) == Some(80_i8)`,
128
+ // even though 200_usize is out of range for i8.
129
+ let wrapped = ( * self as $unsigned) . wrapping_add( n_as_unsigned) as $t;
130
+ if wrapped >= * self {
131
+ Some ( wrapped)
132
+ } else {
133
+ None // Addition overflowed
134
+ }
135
+ }
136
+ Err ( _) => None ,
137
+ }
138
+ }
139
+
110
140
step_identical_methods!( ) ;
111
141
}
112
142
) * )
@@ -123,17 +153,22 @@ macro_rules! step_impl_no_between {
123
153
None
124
154
}
125
155
156
+ #[ inline]
157
+ fn add_usize( & self , n: usize ) -> Option <Self > {
158
+ self . checked_add( n as $t)
159
+ }
160
+
126
161
step_identical_methods!( ) ;
127
162
}
128
163
) * )
129
164
}
130
165
131
166
step_impl_unsigned ! ( usize u8 u16 u32 ) ;
132
- step_impl_signed ! ( isize i8 i16 i32 ) ;
167
+ step_impl_signed ! ( [ isize : usize ] [ i8 : u8 ] [ i16 : u16 ] [ i32 : u32 ] ) ;
133
168
#[ cfg( target_pointer_width = "64" ) ]
134
169
step_impl_unsigned ! ( u64 ) ;
135
170
#[ cfg( target_pointer_width = "64" ) ]
136
- step_impl_signed ! ( i64 ) ;
171
+ step_impl_signed ! ( [ i64 : u64 ] ) ;
137
172
// If the target pointer width is not 64-bits, we
138
173
// assume here that it is less than 64-bits.
139
174
#[ cfg( not( target_pointer_width = "64" ) ) ]
@@ -194,6 +229,19 @@ impl<A: Step> Iterator for ops::Range<A> {
194
229
None => ( 0 , None )
195
230
}
196
231
}
232
+
233
+ #[ inline]
234
+ fn nth ( & mut self , n : usize ) -> Option < A > {
235
+ if let Some ( plus_n) = self . start . add_usize ( n) {
236
+ if plus_n < self . end {
237
+ self . start = plus_n. add_one ( ) ;
238
+ return Some ( plus_n)
239
+ }
240
+ }
241
+
242
+ self . start = self . end . clone ( ) ;
243
+ None
244
+ }
197
245
}
198
246
199
247
// These macros generate `ExactSizeIterator` impls for various range types.
@@ -211,7 +259,7 @@ range_trusted_len_impl!(usize isize u8 i8 u16 i16 u32 i32 i64 u64);
211
259
range_incl_trusted_len_impl ! ( usize isize u8 i8 u16 i16 u32 i32 i64 u64 ) ;
212
260
213
261
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
214
- impl < A : Step + Clone > DoubleEndedIterator for ops:: Range < A > {
262
+ impl < A : Step > DoubleEndedIterator for ops:: Range < A > {
215
263
#[ inline]
216
264
fn next_back ( & mut self ) -> Option < A > {
217
265
if self . start < self . end {
@@ -241,6 +289,13 @@ impl<A: Step> Iterator for ops::RangeFrom<A> {
241
289
fn size_hint ( & self ) -> ( usize , Option < usize > ) {
242
290
( usize:: MAX , None )
243
291
}
292
+
293
+ #[ inline]
294
+ fn nth ( & mut self , n : usize ) -> Option < A > {
295
+ let plus_n = self . start . add_usize ( n) . expect ( "overflow in RangeFrom::nth" ) ;
296
+ self . start = plus_n. add_one ( ) ;
297
+ Some ( plus_n)
298
+ }
244
299
}
245
300
246
301
#[ unstable( feature = "fused" , issue = "35602" ) ]
@@ -279,6 +334,30 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
279
334
None => ( 0 , None ) ,
280
335
}
281
336
}
337
+
338
+ #[ inline]
339
+ fn nth ( & mut self , n : usize ) -> Option < A > {
340
+ if let Some ( plus_n) = self . start . add_usize ( n) {
341
+ use cmp:: Ordering :: * ;
342
+
343
+ match plus_n. partial_cmp ( & self . end ) {
344
+ Some ( Less ) => {
345
+ self . start = plus_n. add_one ( ) ;
346
+ return Some ( plus_n)
347
+ }
348
+ Some ( Equal ) => {
349
+ self . start . replace_one ( ) ;
350
+ self . end . replace_zero ( ) ;
351
+ return Some ( plus_n)
352
+ }
353
+ _ => { }
354
+ }
355
+ }
356
+
357
+ self . start . replace_one ( ) ;
358
+ self . end . replace_zero ( ) ;
359
+ None
360
+ }
282
361
}
283
362
284
363
#[ unstable( feature = "inclusive_range" , reason = "recently added, follows RFC" , issue = "28237" ) ]
0 commit comments