Skip to content

Commit 04ecb0c

Browse files
authored
remove handling of # type: comments now that pyflakes is py3+ (#684)
1 parent 4a2407d commit 04ecb0c

File tree

7 files changed

+30
-430
lines changed

7 files changed

+30
-430
lines changed

pyflakes/api.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,7 @@ def check(codeString, filename, reporter=None):
4444
reporter.unexpectedError(filename, 'problem decoding source')
4545
return 1
4646
# Okay, it's syntactically valid. Now check it.
47-
file_tokens = checker.make_tokens(codeString)
48-
w = checker.Checker(tree, file_tokens=file_tokens, filename=filename)
47+
w = checker.Checker(tree, filename=filename)
4948
w.messages.sort(key=lambda m: m.lineno)
5049
for warning in w.messages:
5150
reporter.flake(warning)

pyflakes/checker.py

+7-101
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,14 @@
77
import __future__
88
import builtins
99
import ast
10-
import bisect
11-
import collections
1210
import contextlib
1311
import doctest
1412
import functools
1513
import os
1614
import re
1715
import string
1816
import sys
19-
import tokenize
17+
import warnings
2018

2119
from pyflakes import messages
2220

@@ -78,16 +76,6 @@ def _is_name_or_attr(node, name): # type: (ast.AST, str) -> bool
7876
)
7977

8078

81-
# https://github.com/python/typed_ast/blob/1.4.0/ast27/Parser/tokenizer.c#L102-L104
82-
TYPE_COMMENT_RE = re.compile(r'^#\s*type:\s*')
83-
# https://github.com/python/typed_ast/blob/1.4.0/ast27/Parser/tokenizer.c#L1408-L1413
84-
ASCII_NON_ALNUM = ''.join([chr(i) for i in range(128) if not chr(i).isalnum()])
85-
TYPE_IGNORE_RE = re.compile(
86-
TYPE_COMMENT_RE.pattern + fr'ignore([{ASCII_NON_ALNUM}]|$)')
87-
# https://github.com/python/typed_ast/blob/1.4.0/ast27/Grammar/Grammar#L147
88-
TYPE_FUNC_RE = re.compile(r'^(\(.*?\))\s*->\s*(.*)$')
89-
90-
9179
MAPPING_KEY_RE = re.compile(r'\(([^()]*)\)')
9280
CONVERSION_FLAG_RE = re.compile('[#0+ -]*')
9381
WIDTH_RE = re.compile(r'(?:\*|\d*)')
@@ -623,13 +611,6 @@ class DoctestScope(ModuleScope):
623611
"""Scope for a doctest."""
624612

625613

626-
class DummyNode:
627-
"""Used in place of an `ast.AST` to set error message positions"""
628-
def __init__(self, lineno, col_offset):
629-
self.lineno = lineno
630-
self.col_offset = col_offset
631-
632-
633614
class DetectClassScopedMagic:
634615
names = dir()
635616

@@ -749,63 +730,6 @@ def in_annotation_func(self, *args, **kwargs):
749730
return in_annotation_func
750731

751732

752-
def make_tokens(code):
753-
# PY3: tokenize.tokenize requires readline of bytes
754-
if not isinstance(code, bytes):
755-
code = code.encode('UTF-8')
756-
lines = iter(code.splitlines(True))
757-
# next(lines, b'') is to prevent an error in pypy3
758-
return tuple(tokenize.tokenize(lambda: next(lines, b'')))
759-
760-
761-
class _TypeableVisitor(ast.NodeVisitor):
762-
"""Collect the line number and nodes which are deemed typeable by
763-
PEP 484
764-
765-
https://www.python.org/dev/peps/pep-0484/#type-comments
766-
"""
767-
def __init__(self):
768-
self.typeable_lines = []
769-
self.typeable_nodes = {}
770-
771-
def _typeable(self, node):
772-
# if there is more than one typeable thing on a line last one wins
773-
self.typeable_lines.append(node.lineno)
774-
self.typeable_nodes[node.lineno] = node
775-
776-
self.generic_visit(node)
777-
778-
visit_Assign = visit_For = visit_FunctionDef = visit_With = _typeable
779-
visit_AsyncFor = visit_AsyncFunctionDef = visit_AsyncWith = _typeable
780-
781-
782-
def _collect_type_comments(tree, tokens):
783-
visitor = _TypeableVisitor()
784-
visitor.visit(tree)
785-
786-
type_comments = collections.defaultdict(list)
787-
for tp, text, start, _, _ in tokens:
788-
if (
789-
tp != tokenize.COMMENT or # skip non comments
790-
not TYPE_COMMENT_RE.match(text) or # skip non-type comments
791-
TYPE_IGNORE_RE.match(text) # skip ignores
792-
):
793-
continue
794-
795-
# search for the typeable node at or before the line number of the
796-
# type comment.
797-
# if the bisection insertion point is before any nodes this is an
798-
# invalid type comment which is ignored.
799-
lineno, _ = start
800-
idx = bisect.bisect_right(visitor.typeable_lines, lineno)
801-
if idx == 0:
802-
continue
803-
node = visitor.typeable_nodes[visitor.typeable_lines[idx - 1]]
804-
type_comments[node].append((start, text))
805-
806-
return type_comments
807-
808-
809733
class Checker:
810734
"""
811735
I check the cleanliness and sanity of Python code.
@@ -842,9 +766,6 @@ class Checker:
842766
builtIns.update(_customBuiltIns.split(','))
843767
del _customBuiltIns
844768

845-
# TODO: file_tokens= is required to perform checks on type comments,
846-
# eventually make this a required positional argument. For now it
847-
# is defaulted to `()` for api compatibility.
848769
def __init__(self, tree, filename='(none)', builtins=None,
849770
withDoctest='PYFLAKES_DOCTEST' in os.environ, file_tokens=()):
850771
self._nodeHandlers = {}
@@ -862,7 +783,6 @@ def __init__(self, tree, filename='(none)', builtins=None,
862783
raise RuntimeError('No scope implemented for the node %r' % tree)
863784
self.exceptHandlers = [()]
864785
self.root = tree
865-
self._type_comments = _collect_type_comments(tree, file_tokens)
866786
for builtin in self.builtIns:
867787
self.addBinding(None, Builtin(builtin))
868788
self.handleChildren(tree)
@@ -879,6 +799,12 @@ def __init__(self, tree, filename='(none)', builtins=None,
879799
self.popScope()
880800
self.checkDeadScopes()
881801

802+
if file_tokens:
803+
warnings.warn(
804+
'`file_tokens` will be removed in a future version',
805+
stacklevel=2,
806+
)
807+
882808
def deferFunction(self, callable):
883809
"""
884810
Schedule a function handler to be called just before completion.
@@ -1308,27 +1234,7 @@ def _in_postponed_annotation(self):
13081234
self.annotationsFutureEnabled
13091235
)
13101236

1311-
def _handle_type_comments(self, node):
1312-
for (lineno, col_offset), comment in self._type_comments.get(node, ()):
1313-
comment = comment.split(':', 1)[1].strip()
1314-
func_match = TYPE_FUNC_RE.match(comment)
1315-
if func_match:
1316-
parts = (
1317-
func_match.group(1).replace('*', ''),
1318-
func_match.group(2).strip(),
1319-
)
1320-
else:
1321-
parts = (comment,)
1322-
1323-
for part in parts:
1324-
self.deferFunction(functools.partial(
1325-
self.handleStringAnnotation,
1326-
part, DummyNode(lineno, col_offset), lineno, col_offset,
1327-
messages.CommentAnnotationSyntaxError,
1328-
))
1329-
13301237
def handleChildren(self, tree, omit=None):
1331-
self._handle_type_comments(tree)
13321238
for node in iter_child_nodes(tree, omit=omit):
13331239
self.handleNode(node, tree)
13341240

pyflakes/messages.py

-8
Original file line numberDiff line numberDiff line change
@@ -248,14 +248,6 @@ def __init__(self, filename, loc, annotation):
248248
self.message_args = (annotation,)
249249

250250

251-
class CommentAnnotationSyntaxError(Message):
252-
message = 'syntax error in type comment %r'
253-
254-
def __init__(self, filename, loc, annotation):
255-
Message.__init__(self, filename, loc)
256-
self.message_args = (annotation,)
257-
258-
259251
class RaiseNotImplemented(Message):
260252
message = "'raise NotImplemented' should be 'raise NotImplementedError'"
261253

pyflakes/test/harness.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,10 @@ class TestCase(unittest.TestCase):
1616

1717
def flakes(self, input, *expectedOutputs, **kw):
1818
tree = ast.parse(textwrap.dedent(input))
19-
file_tokens = checker.make_tokens(textwrap.dedent(input))
2019
if kw.get('is_segment'):
2120
tree = tree.body[0]
2221
kw.pop('is_segment')
23-
w = checker.Checker(
24-
tree, file_tokens=file_tokens, withDoctest=self.withDoctest, **kw
25-
)
22+
w = checker.Checker(tree, withDoctest=self.withDoctest, **kw)
2623
outputs = [type(o) for o in w.messages]
2724
expectedOutputs = list(expectedOutputs)
2825
outputs.sort(key=lambda t: t.__name__)

pyflakes/test/test_checker.py

-184
This file was deleted.

0 commit comments

Comments
 (0)