Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Jun 30, 2024
1 parent 4b4ea5c commit c07134e
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 25 deletions.
28 changes: 20 additions & 8 deletions pylint/checkers/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,20 +96,24 @@ class EncodingChecker(BaseTokenChecker, BaseRawFileChecker):
"type": "yn",
"metavar": "<y or n>",
"default": False,
"help": "Whether or not to search for fixme's in docstrings."
}
)
"help": "Whether or not to search for fixme's in docstrings.",
},
),
)

def open(self) -> None:
super().open()

notes = "|".join(re.escape(note) for note in self.linter.config.notes)
if self.linter.config.notes_rgx:
comment_regex = rf"#\s*({notes}|{self.linter.config.notes_rgx})(?=(:|\s|\Z))"
comment_regex = (
rf"#\s*({notes}|{self.linter.config.notes_rgx})(?=(:|\s|\Z))"
)
self._comment_fixme_pattern = re.compile(comment_regex, re.I)
if self.linter.config.check_fixme_in_docstring:
docstring_regex = rf"\"\"\"\s*({notes}|{self.linter.config.notes_rgx})(?=(:|\s|\Z))"
docstring_regex = (
rf"\"\"\"\s*({notes}|{self.linter.config.notes_rgx})(?=(:|\s|\Z))"
)
self._docstring_fixme_pattern = re.compile(docstring_regex, re.I)
else:
comment_regex = rf"#\s*({notes})(?=(:|\s|\Z))"
Expand Down Expand Up @@ -149,15 +153,20 @@ def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
return
for token_info in tokens:
if token_info.type == tokenize.COMMENT:
comment_text = token_info.string[1:].lstrip() # trim '#' and white-spaces
comment_text = token_info.string[
1:
].lstrip() # trim '#' and white-spaces
if self._comment_fixme_pattern.search("#" + comment_text.lower()):
self.add_message(
"fixme",
col_offset=token_info.start[1] + 1,
args=comment_text,
line=token_info.start[0],
)
elif self.linter.config.check_fixme_in_docstring and self._is_docstring_comment(token_info):
elif (
self.linter.config.check_fixme_in_docstring
and self._is_docstring_comment(token_info)
):
docstring_lines = token_info.string.split("\n")
for line_no, line in enumerate(docstring_lines):
if line.startswith('"""'):
Expand All @@ -174,7 +183,10 @@ def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
)

def _is_docstring_comment(self, token_info: tokenize.TokenInfo) -> bool:
return token_info.type == tokenize.STRING and token_info.line.lstrip().startswith('"""')
return (
token_info.type == tokenize.STRING
and token_info.line.lstrip().startswith('"""')
)


def register(linter: PyLinter) -> None:
Expand Down
35 changes: 18 additions & 17 deletions tests/checkers/unittest_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,6 @@ def test_docstring_with_message(self) -> None:
"""
with self.assertAddsMessages(
MessageTest(msg_id="fixme", line=2, args="FIXME message", col_offset=9)

):
self.checker.process_tokens(_tokenize_str(code))

Expand All @@ -150,7 +149,7 @@ def test_docstring_with_nl_message_multi(self) -> None:
"""
with self.assertAddsMessages(
MessageTest(msg_id="fixme", line=3, args="FIXME this", col_offset=9),
MessageTest(msg_id="fixme", line=4, args="TODO: that", col_offset=9)
MessageTest(msg_id="fixme", line=4, args="TODO: that", col_offset=9),
):
self.checker.process_tokens(_tokenize_str(code))

Expand All @@ -166,7 +165,7 @@ def test_docstring_with_comment(self) -> None:
with self.assertAddsMessages(
MessageTest(msg_id="fixme", line=2, args="XXX message1", col_offset=9),
MessageTest(msg_id="fixme", line=4, args="FIXME message2", col_offset=9),
MessageTest(msg_id="fixme", line=5, args="TODO message3", col_offset=9)
MessageTest(msg_id="fixme", line=5, args="TODO message3", col_offset=9),
):
self.checker.process_tokens(_tokenize_str(code))

Expand Down Expand Up @@ -206,29 +205,27 @@ def test_docstring_todo_mult(self) -> None:
\"\"\"
"""
with self.assertAddsMessages(
MessageTest(msg_id="fixme", line=3, args="FIXME this TODO that", col_offset=9),
MessageTest(
msg_id="fixme", line=3, args="FIXME this TODO that", col_offset=9
),
):
self.checker.process_tokens(_tokenize_str(code))

@set_config(
check_fixme_in_docstring=True,
notes=["CODETAG"]
)

@set_config(check_fixme_in_docstring=True, notes=["CODETAG"])
def test_docstring_custom_note(self) -> None:
code = """
\"\"\"
CODETAG implement this
\"\"\"
"""
with self.assertAddsMessages(
MessageTest(msg_id="fixme", line=3, args="CODETAG implement this", col_offset=9),
MessageTest(
msg_id="fixme", line=3, args="CODETAG implement this", col_offset=9
),
):
self.checker.process_tokens(_tokenize_str(code))

@set_config(
check_fixme_in_docstring=True,
notes_rgx="FIX.*"
)

@set_config(check_fixme_in_docstring=True, notes_rgx="FIX.*")
def test_docstring_custom_rgx(self) -> None:
code = """
\"\"\"
Expand All @@ -237,7 +234,11 @@ def test_docstring_custom_rgx(self) -> None:
\"\"\"
"""
with self.assertAddsMessages(
MessageTest(msg_id="fixme", line=3, args="FIXME implement this", col_offset=9),
MessageTest(msg_id="fixme", line=4, args="FIXTHIS also implement this", col_offset=9),
MessageTest(
msg_id="fixme", line=3, args="FIXME implement this", col_offset=9
),
MessageTest(
msg_id="fixme", line=4, args="FIXTHIS also implement this", col_offset=9
),
):
self.checker.process_tokens(_tokenize_str(code))

0 comments on commit c07134e

Please sign in to comment.