@@ -958,22 +958,25 @@ def compound_statements(logical_line):
958958 line = logical_line
959959 last_char = len (line ) - 1
960960 found = line .find (':' )
961+ prev_found = 0
962+ counts = dict ((char , 0 ) for char in '{}[]()' )
961963 while - 1 < found < last_char :
962- before = line [:found ]
963- if ((before . count ( '{' ) <= before . count ( '}' ) and # {'a': 1} (dict)
964- before . count ( '[' ) <= before . count ( ']' ) and # [1:2] (slice)
965- before . count ( '(' ) <= before . count ( ')' ) )): # (annotation)
966- lambda_kw = LAMBDA_REGEX .search (before )
964+ update_counts ( line [prev_found :found ], counts )
965+ if ((counts [ '{' ] <= counts [ '}' ] and # {'a': 1} (dict)
966+ counts [ '[' ] <= counts [ ']' ] and # [1:2] (slice)
967+ counts [ '(' ] <= counts [ ')' ] )): # (annotation)
968+ lambda_kw = LAMBDA_REGEX .search (line , 0 , found )
967969 if lambda_kw :
968970 before = line [:lambda_kw .start ()].rstrip ()
969971 if before [- 1 :] == '=' and isidentifier (before [:- 1 ].strip ()):
970972 yield 0 , ("E731 do not assign a lambda expression, use a "
971973 "def" )
972974 break
973- if before .startswith ('def ' ):
975+ if line .startswith ('def ' ):
974976 yield 0 , "E704 multiple statements on one line (def)"
975977 else :
976978 yield found , "E701 multiple statements on one line (colon)"
979+ prev_found = found
977980 found = line .find (':' , found + 1 )
978981 found = line .find (';' )
979982 while - 1 < found :
@@ -1333,8 +1336,18 @@ def filename_match(filename, patterns, default=True):
13331336 return any (fnmatch (filename , pattern ) for pattern in patterns )
13341337
13351338
1339+ def update_counts (s , counts ):
1340+ r"""Adds one to the counts of each appearence of characters in s,
1341+ for characters in counts"""
1342+ for char in s :
1343+ if char in counts :
1344+ counts [char ] += 1
1345+
1346+
13361347def _is_eol_token (token ):
13371348 return token [0 ] in NEWLINE or token [4 ][token [3 ][1 ]:].lstrip () == '\\ \n '
1349+
1350+
13381351if COMMENT_WITH_NL :
13391352 def _is_eol_token (token , _eol_token = _is_eol_token ):
13401353 return _eol_token (token ) or (token [0 ] == tokenize .COMMENT and
0 commit comments