Skip to content

Commit

Permalink
Restrict noqa comment to only apply to physical line with comment ins…
Browse files Browse the repository at this point in the history
…tead of logical line

Fixes #5
  • Loading branch information
plinss committed Jun 13, 2022
1 parent c5f951c commit 999b128
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
2 changes: 1 addition & 1 deletion flake8_noqa/noqa_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def __iter__(self) -> Iterator[Tuple[Tuple[int, int], str]]:
sep_name='colon' if (':' in file_comment.sep) else 'equals',
noqa=file_comment.noqa)

inline_comment = InlineComment.match(token, self.tokens[0])
inline_comment = InlineComment.match(token)
if (inline_comment):
noqa_filter.InlineComment.add_comment(self.filename, inline_comment)
if (not inline_comment.valid):
Expand Down
12 changes: 7 additions & 5 deletions flake8_noqa/noqa_comment.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,16 +50,15 @@ class InlineComment:
codes: str
valid: bool
flake8_codes: str
start_line: int
token: tokenize.TokenInfo

@classmethod
def match(cls, token: tokenize.TokenInfo, line_start_token: tokenize.TokenInfo) -> Optional['InlineComment']:
def match(cls, token: tokenize.TokenInfo) -> Optional['InlineComment']:
"""Create an InlineComment if it matches the token."""
match = NOQA_INLINE.match(token.string)
if (not match):
return None
return InlineComment(match, token, line_start_token)
return InlineComment(match, token)

@classmethod
def add_comment(cls, filename: str, comment: 'InlineComment') -> None:
Expand All @@ -75,7 +74,7 @@ def start_line(comment: InlineComment) -> int:
return comment.start_line
return sorted(cls.comments.get(filename, []), key=start_line)

def __init__(self, match: Match, token: tokenize.TokenInfo, line_start_token: tokenize.TokenInfo) -> None:
def __init__(self, match: Match, token: tokenize.TokenInfo) -> None:
self.noqa = match.group('noqa')
self.sep = match.group('sep') or ''
self.codes = match.group('codes') or ''
Expand All @@ -84,9 +83,12 @@ def __init__(self, match: Match, token: tokenize.TokenInfo, line_start_token: to
self.valid = (flake8_match is not None)
self.flake8_codes = (flake8_match.group('codes') or '') if (flake8_match is not None) else ''

self.start_line = line_start_token.start[0]
self.token = token

@property
def start_line(self) -> int:
return self.token.start[0]

@property
def end_line(self) -> int:
return self.token.start[0]
Expand Down

0 comments on commit 999b128

Please sign in to comment.