11#!/usr/bin/env python
22#
3- # Copyright (C) 2012-2022 Steven Myint
3+ # Copyright (C) 2012-2023 Steven Myint
44#
55# Permission is hereby granted, free of charge, to any person obtaining
66# a copy of this software and associated documentation files (the
@@ -229,13 +229,13 @@ def _do_format_code(self, source):
229229 The text from the source file.
230230 """
231231 try :
232- original_newline = self .encodor .do_find_newline (
232+ _original_newline = self .encodor .do_find_newline (
233233 source .splitlines (True )
234234 )
235- code = self ._format_code (source )
235+ _code = self ._format_code (source )
236236
237237 return _strings .normalize_line_endings (
238- code .splitlines (True ), original_newline
238+ _code .splitlines (True ), _original_newline
239239 )
240240 except (tokenize .TokenError , IndentationError ):
241241 return source
@@ -310,29 +310,16 @@ def _format_code(
310310 only_comments_so_far = False
311311
312312 previous_token_type = token_type
313+ modified_tokens .append (
314+ (token_type , token_string , start , end , line )
315+ )
313316
314- # If the current token is a newline, the previous token was a
315- # newline or a comment, and these two sequential newlines
316- # follow a function or method definition, ignore the blank
317- # line before the docstring.
318- if (
319- len (modified_tokens ) <= 2
320- or token_type not in {tokenize .NL , tokenize .NEWLINE }
321- or modified_tokens [- 1 ][0 ]
322- not in {tokenize .NL , tokenize .NEWLINE }
323- or modified_tokens [- 2 ][1 ] != ":"
324- and modified_tokens [- 2 ][0 ] != tokenize .COMMENT
325- or not modified_tokens [- 2 ][4 ].lstrip ().startswith (("def" ))
326- ):
327- modified_tokens .append (
328- (token_type , token_string , start , end , line )
329- )
330- modified_tokens = self ._do_remove_blank_lines_after_method (
331- modified_tokens
332- )
333- modified_tokens = self ._do_remove_blank_lines_before_class (
334- modified_tokens
335- )
317+ modified_tokens = self ._do_remove_blank_lines_after_definitions (
318+ modified_tokens
319+ )
320+ modified_tokens = self ._do_remove_blank_lines_after_docstring (
321+ modified_tokens
322+ )
336323
337324 return untokenize .untokenize (modified_tokens )
338325 except tokenize .TokenError :
@@ -521,8 +508,11 @@ def _do_format_multiline_docstring(
521508 '''
522509
523510 @staticmethod
524- def _do_remove_blank_lines_after_method (modified_tokens ):
525- """Remove blank lines after method docstring.
511+ def _do_remove_blank_lines_after_definitions (modified_tokens ):
512+ """Remove blank lines between definitions and docstrings.
513+
514+ Blank lines between class, method, function, and variable
515+ definitions and the docstring will be removed.
526516
527517 Parameters
528518 ----------
@@ -532,23 +522,37 @@ def _do_remove_blank_lines_after_method(modified_tokens):
532522 Returns
533523 -------
534524 modified_tokens: list
535- The list of tokens with any blank lines following a method
536- docstring removed.
525+ The list of tokens with any blank lines following a variable
526+ definition removed.
537527 """
538- with contextlib .suppress (IndexError ):
539- if (
540- modified_tokens [- 1 ][4 ] == "\n "
541- and modified_tokens [- 2 ][4 ].lstrip ().startswith ('"""' )
542- and modified_tokens [- 5 ][4 ].lstrip ().startswith ("def" )
543- ):
544- modified_tokens .pop (- 1 )
528+ for _idx , _token in enumerate (modified_tokens ):
529+ if _token [0 ] == 3 :
530+ # Remove newline between variable definition and docstring.
531+ j = 1
532+ while modified_tokens [_idx - j ][
533+ 4
534+ ] == "\n " and not modified_tokens [_idx - j - 1 ][
535+ 4
536+ ].strip ().endswith (
537+ '"""'
538+ ):
539+ modified_tokens .pop (_idx - j )
540+ j += 1
541+
542+ # Remove newline between class, method, and function
543+ # definitions and docstring.
544+ j = 2
545+ while modified_tokens [_idx - j ][4 ] == "\n " and modified_tokens [
546+ _idx - j - 2
547+ ][4 ].strip ().startswith (("def" , "class" )):
548+ modified_tokens .pop (_idx - j )
549+ j += 1
550+
545551 return modified_tokens
546552
547553 @staticmethod
548- def _do_remove_blank_lines_before_class (modified_tokens ):
549- """Remove blank lines before class docstring.
550-
551- If there is no class docstring, leave any blank lines as is.
554+ def _do_remove_blank_lines_after_docstring (modified_tokens ):
555+ """Remove blank lines between docstring and first Python statement.
552556
553557 Parameters
554558 ----------
@@ -558,16 +562,30 @@ def _do_remove_blank_lines_before_class(modified_tokens):
558562 Returns
559563 -------
560564 modified_tokens: list
561- The list of tokens with any blank lines following a method
562- docstring removed.
565+ The list of tokens with any blank lines following a docstring
566+ removed.
563567 """
564- with contextlib .suppress (IndexError ):
565- if (
566- modified_tokens [- 3 ][4 ] == "\n "
567- and modified_tokens [- 2 ][4 ].lstrip ().startswith ('"""' )
568- and modified_tokens [- 6 ][4 ].lstrip ().startswith ("class" )
569- ):
570- modified_tokens .pop (- 3 )
568+ # Remove all newlines between docstring and first Python
569+ # statement as long as it's not a stub function.
570+ for _idx , _token in enumerate (modified_tokens ):
571+ with contextlib .suppress (IndexError ):
572+ if (
573+ _token [0 ] == 1
574+ and not _token [4 ]
575+ .lstrip ()
576+ .startswith (("class " , "def " , "@" ))
577+ and not modified_tokens [_idx - 2 ][4 ]
578+ .strip ()
579+ .endswith ('"""' )
580+ and modified_tokens [_idx - 6 ][4 ]
581+ .lstrip ()
582+ .startswith (("class " , "def " , "@" ))
583+ ):
584+ j = 1
585+ while modified_tokens [_idx - j ][4 ] == "\n " :
586+ modified_tokens .pop (_idx - j )
587+ j += 1
588+
571589 return modified_tokens
572590
573591 def _do_strip_docstring (self , docstring : str ) -> Tuple [str , str ]:
0 commit comments