Skip to content

Commit

Permalink
Drop support for python 3.10. Reasons:
Browse files Browse the repository at this point in the history
* typings in py3.11 are way simpler (no List, Dict, etc)
* when this get to 1.0, python 3.12 will be probably released
  • Loading branch information
msaelices committed Sep 10, 2023
1 parent 4669617 commit 65eefda
Show file tree
Hide file tree
Showing 6 changed files with 12 additions and 14 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11"]
python-version: ["3.11"]

steps:
- uses: actions/checkout@v3
Expand Down
4 changes: 2 additions & 2 deletions py2mojo/converters/assignment.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import ast
from functools import partial
from typing import Iterable, List
from typing import Iterable

from tokenize_rt import Token

from ..helpers import ast_to_offset, get_annotation_type, find_token, find_token_by_name, get_mojo_type


def _replace_assignment(tokens: List[Token], i: int, level: int, new_type: str) -> None:
def _replace_assignment(tokens: list[Token], i: int, level: int, new_type: str) -> None:
tokens.insert(i, Token(name='NAME', src='var '))
ann_idx = find_token(tokens, i, ':')
type_idx = find_token_by_name(tokens, ann_idx, name='NAME')
Expand Down
4 changes: 2 additions & 2 deletions py2mojo/converters/functiondef.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import ast
from functools import partial
from typing import Iterable, Optional
from typing import Iterable

from tokenize_rt import Token

Expand All @@ -15,7 +15,7 @@


def _replace_annotation(
tokens: list, i: int, level: int, end_offset: int, new_type: str, ann_offset: Optional[int] = None
tokens: list, i: int, level: int, end_offset: int, new_type: str, ann_offset: int | None = None
) -> None:
if ann_offset:
ann_idx = find_token_after_offset(tokens, i, ann_offset)
Expand Down
11 changes: 5 additions & 6 deletions py2mojo/helpers.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
import ast
import re
from typing import List, Union

from tokenize_rt import UNIMPORTANT_WS, Offset, Token


def ast_to_offset(node: Union[ast.expr, ast.stmt]) -> Offset:
def ast_to_offset(node: ast.expr | ast.stmt) -> Offset:
return Offset(node.lineno, node.col_offset)


def find_token(tokens: List[Token], i: int, src: str) -> int:
def find_token(tokens: list[Token], i: int, src: str) -> int:
"""Find the index of the token with the given src."""
try:
while tokens[i].src != src:
Expand All @@ -19,7 +18,7 @@ def find_token(tokens: List[Token], i: int, src: str) -> int:
return i


def find_token_by_name(tokens: List[Token], i: int, name: str) -> int:
def find_token_by_name(tokens: list[Token], i: int, name: str) -> int:
"""Find the index of the token with the given name."""
try:
while tokens[i].name != name:
Expand All @@ -29,7 +28,7 @@ def find_token_by_name(tokens: List[Token], i: int, name: str) -> int:
return i


def find_token_after_offset(tokens: List[Token], i: int, offset: int) -> int:
def find_token_after_offset(tokens: list[Token], i: int, offset: int) -> int:
"""Find the index of the token after the given offset."""
try:
while tokens[i].utf8_byte_offset < offset:
Expand All @@ -39,7 +38,7 @@ def find_token_after_offset(tokens: List[Token], i: int, offset: int) -> int:
return i


def fixup_dedent_tokens(tokens: List[Token]) -> None:
def fixup_dedent_tokens(tokens: list[Token]) -> None:
# copied from pyupgrade
"""For whatever reason the DEDENT / UNIMPORTANT_WS tokens are misordered
| if True:
Expand Down
4 changes: 2 additions & 2 deletions py2mojo/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@
import sys
import tokenize
from collections import defaultdict
from typing import Callable, List, Sequence
from typing import Callable, Sequence

from tokenize_rt import Token, reversed_enumerate, src_to_tokens, tokens_to_src

from .converters import convert_assignment, convert_functiondef, convert_classdef
from .helpers import fixup_dedent_tokens


TokenFunc = Callable[[List[Token], int], None]
TokenFunc = Callable[[list[Token], int], None]


def get_converters(klass: type) -> list[TokenFunc]:
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ classifiers = [
"License :: OSI Approved :: MIT License",

"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3 :: Only",
]
Expand Down

0 comments on commit 65eefda

Please sign in to comment.