Skip to content

Commit

Permalink
typing(): more typing
Browse files Browse the repository at this point in the history
  • Loading branch information
PaulRenvoise committed Jun 2, 2024
1 parent 05b802e commit 264b757
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 18 deletions.
15 changes: 10 additions & 5 deletions flashback/debugging/filters/call_highlight_filter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
from __future__ import annotations

from collections.abc import Generator

from pygments.filters import Filter
from pygments.token import Name
from pygments.token import Name, _TokenType
from pygments.lexer import Lexer


class CallHighlightFilter(Filter):
Expand All @@ -15,7 +20,7 @@ def __init__(self, **kwargs) -> None:
"""
Filter.__init__(self, **kwargs)

def filter(self, _lexer, stream):
def filter(self, _lexer: Lexer, stream: Generator) -> Generator[tuple[_TokenType, str], None, None]:
"""
Iterates over the stream of tokens and searches for a name followed by an opening paren to
change its type to Name.Function.
Expand All @@ -31,11 +36,11 @@ def filter(self, _lexer, stream):
the stack.
Params:
lexer (pygments.lexer.Lexer): the lexer instance
stream (generator): the stream of couples tokentype-value
lexer: the lexer instance
stream: the stream of couples tokentype-value
Yields:
tuple<pygments.token._TokenType, str>: the token type and token value
the token type and token value
"""
try:
stack = [next(stream)]
Expand Down
15 changes: 10 additions & 5 deletions flashback/debugging/filters/decorator_operator_filter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
from __future__ import annotations

from collections.abc import Generator

from pygments.filters import Filter
from pygments.token import Name, Operator
from pygments.token import Name, Operator, _TokenType
from pygments.lexer import Lexer


class DecoratorOperatorFilter(Filter):
Expand All @@ -15,7 +20,7 @@ def __init__(self, **kwargs):
"""
Filter.__init__(self, **kwargs)

def filter(self, _lexer, stream):
def filter(self, _lexer: Lexer, stream: Generator) -> Generator[tuple[_TokenType, str], None, None]:
"""
Iterates over the stream of tokens and splits a `pygments.token.Name.Decorator: into two
components.
Expand All @@ -24,11 +29,11 @@ def filter(self, _lexer, stream):
but pygments treat the whole thing as a decorator. This filter fixes this behaviour.
Params:
lexer (pygments.lexer.Lexer): the lexer instance
stream (generator): the stream of couples tokentype-value
lexer: the lexer instance
stream: the stream of couples tokentype-value
Yields:
tuple<pygments.token._TokenType, str>: the token type and token value
the token type and token value
"""
for ttype, value in stream:
if ttype is Name.Decorator:
Expand Down
21 changes: 13 additions & 8 deletions flashback/debugging/filters/type_highlight_filter.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,38 @@
from __future__ import annotations

from collections.abc import Generator

from pygments.lexer import Lexer
from pygments.filters import Filter
from pygments.token import Name, Keyword
from pygments.token import Name, Keyword, _TokenType


class TypeHighlightFilter(Filter):
"""
Modifies the token type of a Name token to Keyword.Type if its value appears in a list of values.
"""

def __init__(self, names, **kwargs):
def __init__(self, names: list[str], **kwargs) -> None:
"""
Params:
names (Iterable<str>): the list of names to change the token type
kwargs (dict): every additional keyword parameters
names: the list of names to change the token type
kwargs: every additional keyword parameters
"""
Filter.__init__(self, **kwargs)

self.names = set(names)

def filter(self, _lexer, stream):
def filter(self, _lexer: Lexer, stream: Generator) -> Generator[tuple[_TokenType, str], None, None]:
"""
Iterates over the stream of tokens and modifies a token's type if its value appears in a
list of names.
Params:
lexer (pygments.lexer.Lexer): the lexer instance
stream (generator): the stream of couples tokentype-value
lexer: the lexer instance
stream: the stream of couples tokentype-value
Yields:
tuple<pygments.token._TokenType, str>: the token type and token value
the token type and token value
"""
for ttype, value in stream:
if ttype in Name and value in self.names:
Expand Down
2 changes: 2 additions & 0 deletions flashback/debugging/styles/jellybeans.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations

from typing import ClassVar

from pygments.style import Style
Expand Down
2 changes: 2 additions & 0 deletions flashback/sampled.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations

from collections.abc import Callable
from queue import Queue
import functools
Expand Down

0 comments on commit 264b757

Please sign in to comment.