@@ -218,7 +218,14 @@ def maximum_line_length(physical_line, max_line_length, multiline):
218218 if length > max_line_length and not noqa (line ):
219219 # Sometimes, long lines in docstrings are hard to avoid -- like,
220220 # a long URL that can't be wrapped because it has no whitespace.
221- if multiline and re .match (r'^\s*\S+$' , line ):
221+ # And similar case exists with URL in comments.
222+ #
223+ # A check is added to still report the error when the 72 first
224+ # chars are whitespaces (79 - 7).
225+ chunks = line .split ()
226+ if ((len (chunks ) == 1 and multiline ) or
227+ (len (chunks ) == 2 and chunks [0 ] == '#' )) and \
228+ len (line ) - len (chunks [- 1 ]) < max_line_length - 7 :
222229 return
223230 if hasattr (line , 'decode' ): # Python 2
224231 # The line could contain multi-byte characters
@@ -1351,8 +1358,8 @@ def generate_tokens(self):
13511358 tokengen = tokenize .generate_tokens (self .readline )
13521359 try :
13531360 for token in tokengen :
1354- yield token
13551361 self .maybe_check_physical (token )
1362+ yield token
13561363 except (SyntaxError , tokenize .TokenError ):
13571364 self .report_invalid_syntax ()
13581365
@@ -1365,7 +1372,7 @@ def maybe_check_physical(self, token):
13651372 if token [0 ] in (tokenize .NEWLINE , tokenize .NL ):
13661373 # Obviously, a newline token ends a single physical line.
13671374 self .check_physical (token [4 ])
1368- elif token [0 ] == tokenize .STRING and token [ 1 ]. count ( '\n ' ) :
1375+ elif token [0 ] == tokenize .STRING and '\n ' in token [ 1 ] :
13691376 # Less obviously, a string that contains newlines is a
13701377 # multiline string, either triple-quoted or with internal
13711378 # newlines backslash-escaped. Check every physical line in the
0 commit comments