Skip to content

Commit

Permalink
fix: mypy linter errors
Browse files Browse the repository at this point in the history
  • Loading branch information
jnoortheen committed Oct 16, 2024
1 parent 53c3713 commit 923bfb1
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 35 deletions.
40 changes: 24 additions & 16 deletions peg_parser/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def compound_stmt(self) -> Any | None:
return None

def assignment(self) -> ast.stmt | None:
# assignment: NAME ':' expression ['=' annotated_rhs] | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] | (assignment_lhs)+ annotated_rhs !'=' TYPE_COMMENT? | single_target augassign ~ annotated_rhs | invalid_assignment
# assignment: NAME ':' expression ['=' annotated_rhs] | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] | (assignment_lhs)+ annotated_rhs !'=' type_comment_str? | single_target augassign ~ annotated_rhs | invalid_assignment
mark = self._mark()
_lnum, _col = self._tokenizer.peek().start
if (a := self.name()) and (self.expect(":")) and (b := self.expression()) and (c := self._tmp_6(),):
Expand Down Expand Up @@ -220,7 +220,7 @@ def assignment(self) -> ast.stmt | None:
(a2 := self.repeated(self.assignment_lhs))
and (b2 := self.annotated_rhs())
and (self.negative_lookahead(self.expect, "="))
and (tc := self.token("TYPE_COMMENT"),)
and (tc := self.type_comment_str(),)
):
return ast.Assign(targets=a2, value=b2, type_comment=tc, **self.span(_lnum, _col))
self._reset(mark)
Expand Down Expand Up @@ -958,7 +958,7 @@ def while_stmt(self) -> ast.While | None:
return None

def for_stmt(self) -> ast.For | ast.AsyncFor | None:
# for_stmt: invalid_for_stmt | 'for' star_targets 'in' ~ star_expressions &&':' TYPE_COMMENT? block else_block? | 'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? | invalid_for_target
# for_stmt: invalid_for_stmt | 'for' star_targets 'in' ~ star_expressions &&':' type_comment_str? block else_block? | 'async' 'for' star_targets 'in' ~ star_expressions ':' type_comment_str? block else_block? | invalid_for_target
mark = self._mark()
_lnum, _col = self._tokenizer.peek().start
if self.call_invalid_rules and (self.invalid_for_stmt()):
Expand All @@ -972,7 +972,7 @@ def for_stmt(self) -> ast.For | ast.AsyncFor | None:
and (cut := True)
and (ex := self.star_expressions())
and (self.expect_forced(self.expect(":"), "':'"))
and (tc := self.token("TYPE_COMMENT"),)
and (tc := self.type_comment_str(),)
and (b := self.block())
and (el := self.else_block(),)
):
Expand All @@ -991,7 +991,7 @@ def for_stmt(self) -> ast.For | ast.AsyncFor | None:
and (cut := True)
and (ex := self.star_expressions())
and (self.expect(":"))
and (tc := self.token("TYPE_COMMENT"),)
and (tc := self.type_comment_str(),)
and (b := self.block())
and (el := self.else_block(),)
):
Expand All @@ -1007,7 +1007,7 @@ def for_stmt(self) -> ast.For | ast.AsyncFor | None:
return None

def with_stmt(self) -> ast.With | ast.AsyncWith | None:
# with_stmt: invalid_with_stmt_indent | &with_macro_start ~ with_macro_stmt | 'with' '(' ','.with_item+ ','? ')' ':' block | 'with' ','.with_item+ ':' TYPE_COMMENT? block | 'async' 'with' '(' ','.with_item+ ','? ')' ':' block | 'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block | invalid_with_stmt
# with_stmt: invalid_with_stmt_indent | &with_macro_start ~ with_macro_stmt | 'with' '(' ','.with_item+ ','? ')' ':' block | 'with' ','.with_item+ ':' type_comment_str? block | 'async' 'with' '(' ','.with_item+ ','? ')' ':' block | 'async' 'with' ','.with_item+ ':' type_comment_str? block | invalid_with_stmt
mark = self._mark()
_lnum, _col = self._tokenizer.peek().start
if self.call_invalid_rules and (self.invalid_with_stmt_indent()):
Expand Down Expand Up @@ -1038,7 +1038,7 @@ def with_stmt(self) -> ast.With | ast.AsyncWith | None:
(self.expect("with"))
and (a := self.gathered(self.with_item, self.expect, ","))
and (self.expect(":"))
and (tc := self.token("TYPE_COMMENT"),)
and (tc := self.type_comment_str(),)
and (b := self.block())
):
return ast.With(items=a, body=b, type_comment=tc, **self.span(_lnum, _col))
Expand All @@ -1060,7 +1060,7 @@ def with_stmt(self) -> ast.With | ast.AsyncWith | None:
and (self.expect("with"))
and (a := self.gathered(self.with_item, self.expect, ","))
and (self.expect(":"))
and (tc := self.token("TYPE_COMMENT"),)
and (tc := self.type_comment_str(),)
and (b := self.block())
):
return ast.AsyncWith(items=a, body=b, type_comment=tc, **self.span(_lnum, _col))
Expand Down Expand Up @@ -3225,10 +3225,10 @@ def star_atom(self) -> ast.expr | None:
return self.set_expr_context(a1, Store)
self._reset(mark)
if (self.expect("(")) and (a2 := self.star_targets_tuple_seq(),) and (self.expect(")")):
return ast.Tuple(elts=a2, ctx=Store, **self.span(_lnum, _col))
return ast.Tuple(elts=a2 or [], ctx=Store, **self.span(_lnum, _col))
self._reset(mark)
if (self.expect("[")) and (a3 := self.star_targets_list_seq(),) and (self.expect("]")):
return ast.List(elts=a3, ctx=Store, **self.span(_lnum, _col))
return ast.List(elts=a3 or [], ctx=Store, **self.span(_lnum, _col))
self._reset(mark)
return None

Expand Down Expand Up @@ -3319,7 +3319,7 @@ def t_lookahead(self) -> Any | None:
(self.expect, "."),
)

def del_targets(self) -> Any | None:
def del_targets(self) -> list[ast.expr] | None:
# del_targets: ','.del_target+ ','?
mark = self._mark()
if (a := self.gathered(self.del_target, self.expect, ",")) and (self.expect(","),):
Expand Down Expand Up @@ -3365,15 +3365,23 @@ def del_t_atom(self) -> ast.expr | None:
return self.set_expr_context(b, Del)
self._reset(mark)
if (self.expect("(")) and (c := self.del_targets(),) and (self.expect(")")):
return ast.Tuple(elts=c, ctx=Del, **self.span(_lnum, _col))
return ast.Tuple(elts=c or [], ctx=Del, **self.span(_lnum, _col))
self._reset(mark)
if (self.expect("[")) and (c := self.del_targets(),) and (self.expect("]")):
return ast.List(elts=c, ctx=Del, **self.span(_lnum, _col))
return ast.List(elts=c or [], ctx=Del, **self.span(_lnum, _col))
self._reset(mark)
return None

def type_comment_str(self) -> str | None:
# type_comment_str: TYPE_COMMENT
mark = self._mark()
if t := self.token("TYPE_COMMENT"):
return t.string
self._reset(mark)
return None

def func_type_comment(self) -> str | None:
# func_type_comment: NEWLINE TYPE_COMMENT &(NEWLINE INDENT) | invalid_double_type_comments | TYPE_COMMENT
# func_type_comment: NEWLINE TYPE_COMMENT &(NEWLINE INDENT) | invalid_double_type_comments | type_comment_str
mark = self._mark()
if (
(self.token("NEWLINE"))
Expand All @@ -3385,8 +3393,8 @@ def func_type_comment(self) -> str | None:
if self.call_invalid_rules and (self.invalid_double_type_comments()):
return None
self._reset(mark)
if t := self.token("TYPE_COMMENT"):
return t.string
if type_comment_str := self.type_comment_str():
return type_comment_str
self._reset(mark)
return None

Expand Down
6 changes: 3 additions & 3 deletions peg_parser/subheader.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ class SpanDict(TypedDict):
TR = TypeVar("TR") # repeated
TS = TypeVar("TS")
TG = TypeVar("TG")
TCtx = ast.Name | ast.Attribute
P = TypeVar("P", bound="Parser")
P1 = ParamSpec("P1")
F = TypeVar("F", bound=Callable[..., Any])
Expand Down Expand Up @@ -486,9 +485,10 @@ def get_invalid_target(self, target: Target, node: Node | None) -> Node | None:
return None
return node

def set_expr_context(self, node: TCtx, context: ast.Load | ast.Store | ast.Del) -> TCtx:
def set_expr_context(self, node: T, context: ast.Load | ast.Store | ast.Del) -> T:
"""Set the context (Load, Store, Del) of an ast node."""
node.ctx = context
if hasattr(node, "ctx"):
node.ctx = context
return node

def ensure_real(self, number: TokenInfo) -> float | int:
Expand Down
6 changes: 1 addition & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -164,10 +164,6 @@ local_partial_types = true
warn_unreachable = false
disable_error_code = [
"func-returns-value", # 61
# "var-annotated", # 35
"arg-type", # 331 errors
# "return-value",
# "call-overload",
"no-any-return",
]
strict = true
#strict = false
25 changes: 14 additions & 11 deletions tasks/xonsh.gram
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ assignment[ast.stmt]:
LOCATIONS,
)
}
| a2=(assignment_lhs)+ b2=annotated_rhs !'=' tc=[TYPE_COMMENT] {
| a2=(assignment_lhs)+ b2=annotated_rhs !'=' tc=[type_comment_str] {
ast.Assign(targets=a2, value=b2, type_comment=tc, LOCATIONS)
}
| a3=single_target b3=augassign ~ c3=annotated_rhs {
Expand Down Expand Up @@ -385,9 +385,9 @@ while_stmt[ast.While]:

for_stmt[Union[ast.For, ast.AsyncFor]]:
| invalid_for_stmt
| 'for' t=star_targets 'in' ~ ex=star_expressions &&':' tc=[TYPE_COMMENT] b=block el=[else_block] {
| 'for' t=star_targets 'in' ~ ex=star_expressions &&':' tc=[type_comment_str] b=block el=[else_block] {
ast.For(target=t, iter=ex, body=b, orelse=el or [], type_comment=tc, LOCATIONS) }
| 'async' 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
| 'async' 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[type_comment_str] b=block el=[else_block] {
ast.AsyncFor(target=t, iter=ex, body=b, orelse=el or [], type_comment=tc, LOCATIONS) }
| invalid_for_target

Expand All @@ -400,13 +400,13 @@ with_stmt[Union[ast.With, ast.AsyncWith]]:
| 'with' '(' a=','.with_item+ ','? ')' ':' b=block {
ast.With(items=a, body=b, LOCATIONS)
}
| 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
| 'with' a=','.with_item+ ':' tc=[type_comment_str] b=block {
ast.With(items=a, body=b, type_comment=tc, LOCATIONS)
}
| 'async' 'with' '(' a=','.with_item+ ','? ')' ':' b=block {
ast.AsyncWith(items=a, body=b, LOCATIONS)
}
| 'async' 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
| 'async' 'with' a=','.with_item+ ':' tc=[type_comment_str] b=block {
ast.AsyncWith(items=a, body=b, type_comment=tc, LOCATIONS) }
| invalid_with_stmt

Expand Down Expand Up @@ -1294,8 +1294,8 @@ target_with_star_atom[ast.expr] (memo):
star_atom[ast.expr]:
| a=NAME { ast.Name(id=a.string, ctx=Store, LOCATIONS) }
| '(' a1=target_with_star_atom ')' { self.set_expr_context(a1, Store) }
| '(' a2=[star_targets_tuple_seq] ')' { ast.Tuple(elts=a2, ctx=Store, LOCATIONS) }
| '[' a3=[star_targets_list_seq] ']' { ast.List(elts=a3, ctx=Store, LOCATIONS) }
| '(' a2=[star_targets_tuple_seq] ')' { ast.Tuple(elts=a2 or [], ctx=Store, LOCATIONS) }
| '[' a3=[star_targets_list_seq] ']' { ast.List(elts=a3 or [], ctx=Store, LOCATIONS) }

single_target:
| single_subscript_attribute_target
Expand Down Expand Up @@ -1326,7 +1326,7 @@ t_lookahead: '(' | '[' | '.'
# Targets for del statements
# --------------------------

del_targets: a=','.del_target+ [','] { a }
del_targets[list[ast.expr]]: a=','.del_target+ [','] { a }

del_target[ast.expr] (memo):
| a=t_primary '.' b=NAME !t_lookahead { ast.Attribute(value=a, attr=b.string, ctx=Del, LOCATIONS) }
Expand All @@ -1336,17 +1336,20 @@ del_target[ast.expr] (memo):
del_t_atom[ast.expr]:
| a=NAME { ast.Name(id=a.string, ctx=Del, LOCATIONS) }
| '(' b=del_target ')' { self.set_expr_context(b, Del) }
| '(' c=[del_targets] ')' { ast.Tuple(elts=c, ctx=Del, LOCATIONS) }
| '[' c=[del_targets] ']' { ast.List(elts=c, ctx=Del, LOCATIONS) }
| '(' c=[del_targets] ')' { ast.Tuple(elts=c or [], ctx=Del, LOCATIONS) }
| '[' c=[del_targets] ']' { ast.List(elts=c or [], ctx=Del, LOCATIONS) }


# TYPING ELEMENTS
# ---------------

type_comment_str[str]:
| t=TYPE_COMMENT { t.string }

func_type_comment[str]:
| NEWLINE t=TYPE_COMMENT &(NEWLINE INDENT) { t.string } # Must be followed by indented block
| invalid_double_type_comments
| t=TYPE_COMMENT { t.string }
| type_comment_str

# ========================= END OF THE GRAMMAR ===========================

Expand Down

0 comments on commit 923bfb1

Please sign in to comment.