Skip to content

Commit

Permalink
remove dependency on six (#155)
Browse files Browse the repository at this point in the history
  • Loading branch information
a-detiste authored Oct 6, 2024
1 parent ad41ab6 commit 4eebd86
Show file tree
Hide file tree
Showing 10 changed files with 29 additions and 75 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ jobs:
- name: Mypy testing
run: |
# Not an exact mypy version, as we need 0.942 for pypy-3.8 support, but it's not available on 3.5
pip install types-six "mypy>=0.910,<=0.942"
pip install "mypy>=0.910,<=0.942"
python -m mypy asttokens tests/*.py
- name: Fast tests with coverage
Expand Down
15 changes: 4 additions & 11 deletions asttokens/asttokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,6 @@
from ast import Module
from typing import Iterable, Iterator, List, Optional, Tuple, Any, cast, TYPE_CHECKING

import six
from six.moves import xrange # pylint: disable=redefined-builtin

from .line_numbers import LineNumbers
from .util import (
Token, match_token, is_non_coding_token, patched_generate_tokens, last_stmt,
Expand All @@ -33,18 +30,14 @@
from .util import AstNode, TokenInfo


class ASTTextBase(six.with_metaclass(abc.ABCMeta, object)):
def __init__(self, source_text, filename):
# type: (Any, str) -> None
# FIXME: Strictly, the type of source_text is one of the six string types, but hard to specify with mypy given
# https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases

class ASTTextBase(metaclass=abc.ABCMeta):
def __init__(self, source_text: str, filename: str) -> None:
self._filename = filename

# Decode source after parsing to let Python 2 handle coding declarations.
# (If the encoding was not utf-8 compatible, then even if it parses correctly,
# we'll fail with a unicode error here.)
source_text = six.ensure_text(source_text)
source_text = str(source_text)

self._text = source_text
self._line_numbers = LineNumbers(source_text)
Expand Down Expand Up @@ -249,7 +242,7 @@ def token_range(self,
Yields all tokens in order from first_token through and including last_token. If
include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT.
"""
for i in xrange(first_token.index, last_token.index + 1):
for i in range(first_token.index, last_token.index + 1):
if include_extra or not is_non_coding_token(self._tokens[i].type):
yield self._tokens[i]

Expand Down
21 changes: 1 addition & 20 deletions asttokens/mark_tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@
from ast import Module
from typing import Callable, List, Union, cast, Optional, Tuple, TYPE_CHECKING

import six

from . import util
from .asttokens import ASTTokens
from .util import AstConstant
Expand Down Expand Up @@ -186,16 +184,6 @@ def visit_listcomp(self, node, first_token, last_token):
# type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
return self.handle_comp('[', node, first_token, last_token)

if six.PY2:
# We shouldn't do this on PY3 because its SetComp/DictComp already have a correct start.
def visit_setcomp(self, node, first_token, last_token):
# type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
return self.handle_comp('{', node, first_token, last_token)

def visit_dictcomp(self, node, first_token, last_token):
# type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
return self.handle_comp('{', node, first_token, last_token)

def visit_comprehension(self,
node, # type: AstNode
first_token, # type: util.Token
Expand Down Expand Up @@ -435,7 +423,7 @@ def visit_const(self, node, first_token, last_token):
assert isinstance(node, AstConstant) or isinstance(node, nc.Const)
if isinstance(node.value, numbers.Number):
return self.handle_num(node, node.value, first_token, last_token)
elif isinstance(node.value, (six.text_type, six.binary_type)):
elif isinstance(node.value, (str, bytes)):
return self.visit_str(node, first_token, last_token)
return (first_token, last_token)

Expand Down Expand Up @@ -473,13 +461,6 @@ def visit_assignname(self, node, first_token, last_token):
first_token = last_token = self._code.prev_token(colon)
return (first_token, last_token)

if six.PY2:
# No need for this on Python3, which already handles 'with' nodes correctly.
def visit_with(self, node, first_token, last_token):
# type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
first = self._code.find_token(first_token, token.NAME, 'with', reverse=True)
return (first, last_token)

# Async nodes should typically start with the word 'async'
# but Python < 3.7 doesn't put the col_offset there
# AsyncFunctionDef is slightly different because it might have
Expand Down
7 changes: 3 additions & 4 deletions asttokens/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
from typing import Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union, cast, Any, TYPE_CHECKING

import astroid
from six import iteritems


if TYPE_CHECKING: # pragma: no cover
Expand Down Expand Up @@ -139,7 +138,7 @@ def iter_children_astroid(node, include_joined_str=False):
return node.get_children()


SINGLETONS = {c for n, c in iteritems(ast.__dict__) if isinstance(c, type) and
SINGLETONS = {c for n, c in ast.__dict__.items() if isinstance(c, type) and
issubclass(c, (ast.expr_context, ast.boolop, ast.operator, ast.unaryop, ast.cmpop))}


Expand All @@ -165,9 +164,9 @@ def iter_children_ast(node, include_joined_str=False):
yield child


stmt_class_names = {n for n, c in iteritems(ast.__dict__)
stmt_class_names = {n for n, c in ast.__dict__.items()
if isinstance(c, type) and issubclass(c, ast.stmt)}
expr_class_names = ({n for n, c in iteritems(ast.__dict__)
expr_class_names = ({n for n, c in ast.__dict__.items()
if isinstance(c, type) and issubclass(c, ast.expr)} |
{'AssignName', 'DelName', 'Const', 'AssignAttr', 'DelAttr'})

Expand Down
1 change: 0 additions & 1 deletion docs/requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,3 @@
sphinx
sphinx_rtd_theme
readthedocs-sphinx-search
six
2 changes: 0 additions & 2 deletions docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@ readthedocs-sphinx-search==0.3.2
# via -r requirements.in
requests==2.31.0
# via sphinx
six==1.16.0
# via -r requirements.in
snowballstemmer==2.2.0
# via sphinx
sphinx==6.2.1
Expand Down
7 changes: 2 additions & 5 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -29,17 +29,14 @@ classifiers =

[options]
packages = asttokens
install_requires =
six >= 1.12.0
typing; python_version < "3.5"
setup_requires = setuptools>=44; setuptools_scm[toml]>=3.4.3
python_requires = >=3.8

[options.extras_require]
astroid =
astroid >=2, <4; python_version >= "3"
astroid >=2, <4
test =
astroid >=2, <4; python_version >= "3"
astroid >=2, <4
pytest
pytest-cov
pytest-xdist
Expand Down
3 changes: 1 addition & 2 deletions tests/test_asttokens.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
import ast
import six
import token
import tokenize
import unittest
Expand Down Expand Up @@ -103,7 +102,7 @@ def test_unicode_offsets(self):
# translate correctly.
source = "foo('фыва',a,b)\n"
atok = asttokens.ASTTokens(source)
self.assertEqual([six.text_type(t) for t in atok.tokens], [
self.assertEqual([str(t) for t in atok.tokens], [
"NAME:'foo'",
"OP:'('",
'STRING:"%s"' % repr('фыва').lstrip('u'),
Expand Down
42 changes: 16 additions & 26 deletions tests/test_mark_tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
from time import time

import astroid
import six
from asttokens import util, ASTTokens

from . import tools
Expand Down Expand Up @@ -139,7 +138,7 @@ def verify_fixture_file(self, path):
m = self.create_mark_checker(source, verify=False)
tested_nodes = m.verify_all_nodes(self)

exp_index = (0 if six.PY2 else 1) + (3 if self.is_astroid_test else 0)
exp_index = 1 + (3 if self.is_astroid_test else 0)
if not self.is_astroid_test:
# For ast on Python 3.9, slices are expressions, we handle them and test them.
if issubclass(ast.Slice, ast.expr):
Expand Down Expand Up @@ -280,7 +279,7 @@ def test_print_function(self):
# verify_all_nodes doesn't work on Python 2 because the print() call parsed in isolation
# is viewed as a Print node since it doesn't see the future import
source = tools.read_fixture('astroid/nonregr.py')
m = self.create_mark_checker(source, verify=six.PY3)
m = self.create_mark_checker(source, verify=True)

# Line 16 is: [indent 8] print(v.get('yo'))
self.assertEqual(m.view_nodes_at(16, 8),
Expand All @@ -290,8 +289,7 @@ def test_print_function(self):
# To make sure we can handle various hard cases, we include tests for issues reported for a
# similar project here: https://bitbucket.org/plas/thonny

if not six.PY2:
def test_nonascii(self):
def test_nonascii(self):
# Test of https://bitbucket.org/plas/thonny/issues/162/weird-range-marker-crash-with-non-ascii
# Only on PY3 because Py2 doesn't support unicode identifiers.
for source in (
Expand All @@ -305,7 +303,7 @@ def test_nonascii(self):
"%s:%s" % ("AssignName" if self.is_astroid_test else "Name", source.split("=")[0]),
})

def test_bytes_smoke(self):
def test_bytes_smoke(self):
const = 'Const' if self.is_astroid_test else (
'Constant'
if sys.version_info >= (3, 8)
Expand Down Expand Up @@ -400,8 +398,7 @@ def print_all(a, b, c, d, e):
m = self.create_mark_checker(source)
self.assertEqual(m.view_nodes_at(5, 0),
{ "Expr:print_all(*arr)", "Call:print_all(*arr)", "Name:print_all" })
if not six.PY2 or self.is_astroid_test:
self.assertEqual(m.view_nodes_at(5, 10), { "Starred:*arr" })
self.assertEqual(m.view_nodes_at(5, 10), { "Starred:*arr" })
self.assertEqual(m.view_nodes_at(5, 11), { "Name:arr" })


Expand All @@ -419,16 +416,12 @@ def test_conditional_expr(self):
m = self.create_mark_checker(source)
name_a = 'AssignName:a' if self.is_astroid_test else 'Name:a'
const_true = ('Const:True' if self.is_astroid_test else
'Name:True' if six.PY2 else
'Constant:True')
self.assertEqual(m.view_nodes_at(1, 0),
{name_a, "Assign:a = True if True else False", "Module:" + source})
self.assertEqual(m.view_nodes_at(1, 4),
{const_true, 'IfExp:True if True else False'})
if six.PY2:
self.assertEqual(m.view_nodes_at(2, 0), {"Print:print(a)"})
else:
self.assertEqual(m.view_nodes_at(2, 0), {"Name:print", "Call:print(a)", "Expr:print(a)"})
self.assertEqual(m.view_nodes_at(2, 0), {"Name:print", "Call:print(a)", "Expr:print(a)"})

def test_calling_lambdas(self):
# See https://bitbucket.org/plas/thonny/issues/96/calling-lambdas-crash-the-debugger
Expand Down Expand Up @@ -502,8 +495,7 @@ def test_del_dict(self):
self.assertEqual(m.view_nodes_at(2, 0), {'Delete:del x[4]'})
self.assertEqual(m.view_nodes_at(2, 4), {'Name:x', 'Subscript:x[4]'})

if not six.PY2:
def test_bad_tokenless_types(self):
def test_bad_tokenless_types(self):
# Cases where _get_text_positions_tokenless is incorrect in 3.8.
source = textwrap.dedent("""
def foo(*, name: str): # keyword-only argument with type annotation
Expand All @@ -513,7 +505,7 @@ def foo(*, name: str): # keyword-only argument with type annotation
""")
self.create_mark_checker(source)

def test_return_annotation(self):
def test_return_annotation(self):
# See https://bitbucket.org/plas/thonny/issues/9/range-marker-crashes-on-function-return
source = textwrap.dedent("""
def liida_arvud(x: int, y: int) -> int:
Expand Down Expand Up @@ -602,7 +594,7 @@ def f(x):
log(x)
''')
# verification fails on Python2 which turns `with X, Y` turns into `with X: with Y`.
m = self.create_mark_checker(source, verify=six.PY3)
m = self.create_mark_checker(source, verify=True)
self.assertEqual(m.view_nodes_at(5, 4), {
'With:with B() as b, C() as c: log(b, c)'
})
Expand Down Expand Up @@ -679,9 +671,8 @@ def test_complex_slice_and_parens(self):
source = 'f((x)[:, 0])'
self.create_mark_checker(source)

if six.PY3:
@pytest.mark.slow
def test_sys_modules(self):
@pytest.mark.slow
def test_sys_modules(self):
"""
Verify all nodes on source files obtained from sys.modules.
Expand Down Expand Up @@ -735,11 +726,10 @@ def test_sys_modules(self):
# it's purely an astroid bug that we can safely ignore.
continue

if six.PY3:
def test_dict_merge(self):
def test_dict_merge(self):
self.create_mark_checker("{**{}}")

def test_async_def(self):
def test_async_def(self):
self.create_mark_checker("""
async def foo():
pass
Expand All @@ -749,7 +739,7 @@ async def foo():
pass
""")

def test_async_for_and_with(self):
def test_async_for_and_with(self):
# Can't verify all nodes because in < 3.7
# async for/with outside of a function is invalid syntax
m = self.create_mark_checker("""
Expand All @@ -760,7 +750,7 @@ async def foo():
assert m.view_nodes_at(3, 2) == {"AsyncFor:async for x in y: pass"}
assert m.view_nodes_at(4, 2) == {"AsyncWith:async with x as y: pass"}

def test_await(self):
def test_await(self):
# Can't verify all nodes because in astroid
# await outside of an async function is invalid syntax
m = self.create_mark_checker("""
Expand Down Expand Up @@ -923,7 +913,7 @@ def assert_nodes_equal(self, t1, t2):
)
else:
# Weird bug in astroid that collapses spaces in docstrings sometimes maybe
if self.is_astroid_test and isinstance(t1, six.string_types):
if self.is_astroid_test and isinstance(t1, str):
t1 = re.sub(r'^ +$', '', t1, flags=re.MULTILINE)
t2 = re.sub(r'^ +$', '', t2, flags=re.MULTILINE)

Expand Down
4 changes: 1 addition & 3 deletions tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

import astroid
import pytest
import six

from .context import asttokens
from .tools import get_node_name
Expand Down Expand Up @@ -115,8 +114,7 @@ def test_expect_token():
asttokens.util.expect_token(tok, token.OP)


if six.PY3:
def test_combine_tokens():
def test_combine_tokens():
from tokenize import TokenInfo, generate_tokens, ERRORTOKEN, OP, NUMBER, NAME
from asttokens.util import combine_tokens, patched_generate_tokens

Expand Down

0 comments on commit 4eebd86

Please sign in to comment.