@@ -271,28 +271,10 @@ def process_tokens(self, tokens):
271
271
# tokens[index][2] is the actual position and also is
272
272
# reported by IronPython.
273
273
self ._elifs .extend ([tokens [index ][2 ], tokens [index + 1 ][2 ]])
274
- elif six .PY3 and token .exact_type == tokenize .COMMA :
275
- self ._check_one_element_trailing_comma_tuple (tokens , token , index )
276
-
277
- def _check_one_element_trailing_comma_tuple (self , tokens , token , index ):
278
- left_tokens = itertools .islice (tokens , index + 1 , None )
279
- same_line_remaining_tokens = list (itertools .takewhile (
280
- lambda other_token , _token = token : other_token .start [0 ] == _token .start [0 ],
281
- left_tokens
282
- ))
283
- is_last_element = all (
284
- other_token .type in (tokenize .NEWLINE , tokenize .COMMENT )
285
- for other_token in same_line_remaining_tokens
286
- )
287
-
288
- if not same_line_remaining_tokens or not is_last_element :
289
- return
290
-
291
- assign_token = tokens [index - 2 :index - 1 ]
292
- if assign_token and '=' in assign_token [0 ].string :
293
- if self .linter .is_message_enabled ('trailing-comma-tuple' ):
294
- self .add_message ('trailing-comma-tuple' ,
295
- line = token .start [0 ])
274
+ elif six .PY3 and is_trailing_comma (tokens , index ):
275
+ if self .linter .is_message_enabled ('trailing-comma-tuple' ):
276
+ self .add_message ('trailing-comma-tuple' ,
277
+ line = token .start [0 ])
296
278
297
279
def leave_module (self , _ ):
298
280
self ._init ()
@@ -834,6 +816,46 @@ def visit_compare(self, node):
834
816
self .add_message ('len-as-condition' , node = node )
835
817
836
818
819
+ def is_trailing_comma (tokens , index ):
820
+ """Check if the given token is a trailing comma
821
+
822
+ :param tokens: Sequence of modules tokens
823
+ :type tokens: list[tokenize.TokenInfo]
824
+ :param int index: Index of token under check in tokens
825
+ :returns: True if the token is a comma which trails an expression
826
+ :rtype: bool
827
+ """
828
+ token = tokens [index ]
829
+ if token .exact_type != tokenize .COMMA :
830
+ return False
831
+ # Must have remaining tokens on the same line such as NEWLINE
832
+ left_tokens = itertools .islice (tokens , index + 1 , None )
833
+ same_line_remaining_tokens = list (itertools .takewhile (
834
+ lambda other_token , _token = token : other_token .start [0 ] == _token .start [0 ],
835
+ left_tokens
836
+ ))
837
+ # Note: If the newline is tokenize.NEWLINE and not tokenize.NL
838
+ # then the newline denotes the end of expression
839
+ is_last_element = all (
840
+ other_token .type in (tokenize .NEWLINE , tokenize .COMMENT )
841
+ for other_token in same_line_remaining_tokens
842
+ )
843
+ if not same_line_remaining_tokens or not is_last_element :
844
+ return False
845
+ def get_curline_index_start ():
846
+ """Get the index denoting the start of the current line"""
847
+ for subindex , token in enumerate (reversed (tokens [:index ])):
848
+ # See Lib/tokenize.py and Lib/token.py in cpython for more info
849
+ if token .type in (tokenize .NEWLINE , tokenize .NL ):
850
+ return index - subindex
851
+ return 0
852
+ curline_start = get_curline_index_start ()
853
+ for prevtoken in tokens [curline_start :index ]:
854
+ if '=' in prevtoken .string :
855
+ return True
856
+ return False
857
+
858
+
837
859
def register (linter ):
838
860
"""Required method to auto register this checker."""
839
861
linter .register_checker (RefactoringChecker (linter ))
0 commit comments