Skip to content

Commit

Permalink
Enable mypy lintrunner, Part 5 (test/*)
Browse files Browse the repository at this point in the history
  • Loading branch information
mergennachin committed Jan 6, 2025
1 parent 3fdff26 commit 89d0272
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 25 deletions.
2 changes: 1 addition & 1 deletion .lintrunner.toml
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ include_patterns = [
'profiler/**/*.py',
'runtime/**/*.py',
'scripts/**/*.py',
# 'test/**/*.py',
'test/**/*.py',
'util/**/*.py',
'*.py',
]
Expand Down
10 changes: 10 additions & 0 deletions .mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,14 @@ files =
profiler,
runtime,
scripts,
test,
util

mypy_path = executorch

[mypy-executorch.backends.*]
follow_untyped_imports = True

[mypy-executorch.codegen.*]
follow_untyped_imports = True

Expand All @@ -46,6 +50,12 @@ follow_untyped_imports = True
[mypy-executorch.runtime.*]
follow_untyped_imports = True

[mypy-executorch.test.*]
follow_untyped_imports = True

[mypy-functorch.*]
follow_untyped_imports = True

[mypy-requests.*]
follow_untyped_imports = True

Expand Down
20 changes: 6 additions & 14 deletions test/end2end/exported_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,9 +126,7 @@ def return_wrapper():
trace_inputs_method = "get_upper_bound_inputs"
get_trace_inputs = get_inputs_adapter(
(
# pyre-fixme[6]: For 1st argument expected `(...) -> Any` but got
# `Union[Module, Tensor]`.
getattr(eager_module, trace_inputs_method)
getattr(eager_module, trace_inputs_method) # type: ignore[arg-type]
if hasattr(eager_module, trace_inputs_method)
else eager_module.get_random_inputs
),
Expand All @@ -144,18 +142,14 @@ def return_wrapper():
if hasattr(eager_module, "get_dynamic_shapes"):
assert capture_config is not None
assert capture_config.enable_aot is True
# pyre-fixme[29]: `Union[nn.modules.module.Module,
# torch._tensor.Tensor]` is not a function.
trace_dynamic_shapes = eager_module.get_dynamic_shapes()
trace_dynamic_shapes = eager_module.get_dynamic_shapes() # type: ignore[operator]
method_name_to_dynamic_shapes = {}
for method in methods:
method_name_to_dynamic_shapes[method] = trace_dynamic_shapes

memory_planning_pass = MemoryPlanningPass()
if hasattr(eager_module, "get_memory_planning_pass"):
# pyre-fixme[29]: `Union[nn.modules.module.Module,
# torch._tensor.Tensor]` is not a function.
memory_planning_pass = eager_module.get_memory_planning_pass()
memory_planning_pass = eager_module.get_memory_planning_pass() # type: ignore[operator]

class WrapperModule(nn.Module):
def __init__(self, method):
Expand All @@ -172,7 +166,7 @@ def __init__(self, method):
assert method_name == "forward"
ep = _export(
eager_module,
method_input,
method_input, # type: ignore[arg-type]
dynamic_shapes=(
method_name_to_dynamic_shapes[method_name]
if method_name_to_dynamic_shapes
Expand All @@ -184,7 +178,7 @@ def __init__(self, method):
else:
exported_methods[method_name] = export(
eager_module,
method_input,
method_input, # type: ignore[arg-type]
dynamic_shapes=(
method_name_to_dynamic_shapes[method_name]
if method_name_to_dynamic_shapes
Expand Down Expand Up @@ -220,9 +214,7 @@ def __init__(self, method):

# Get a function that creates random inputs appropriate for testing.
get_random_inputs_fn = get_inputs_adapter(
# pyre-fixme[6]: For 1st argument expected `(...) -> Any` but got
# `Union[Module, Tensor]`.
eager_module.get_random_inputs,
eager_module.get_random_inputs, # type: ignore[arg-type]
# all exported methods must have the same signature so just pick the first one.
methods[0],
)
Expand Down
6 changes: 1 addition & 5 deletions test/end2end/test_end2end.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,7 @@
kernel_mode = None # either aten mode or lean mode
try:
from executorch.extension.pybindings.portable_lib import (
_load_bundled_program_from_buffer,
_load_for_executorch_from_buffer,
_load_for_executorch_from_bundled_program,
)

kernel_mode = "lean"
Expand All @@ -63,10 +61,8 @@
pass

try:
from executorch.extension.pybindings.aten_lib import (
_load_bundled_program_from_buffer,
from executorch.extension.pybindings.aten_lib import ( # type: ignore[import-not-found]
_load_for_executorch_from_buffer,
_load_for_executorch_from_bundled_program,
)

assert kernel_mode is None
Expand Down
6 changes: 2 additions & 4 deletions test/models/export_delegated_program.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,9 +118,7 @@ def export_module_to_program(
eager_module = module_class().eval()
inputs = ()
if hasattr(eager_module, "get_random_inputs"):
# pyre-fixme[29]: `Union[nn.modules.module.Module, torch._tensor.Tensor]` is
# not a function.
inputs = eager_module.get_random_inputs()
inputs = eager_module.get_random_inputs() # type: ignore[operator]

class WrapperModule(torch.nn.Module):
def __init__(self, fn):
Expand Down Expand Up @@ -153,7 +151,7 @@ def forward(self, *args, **kwargs):
).to_executorch(config=et_config)
else:
edge: exir.EdgeProgramManager = to_edge(exported_program)
lowered_module = to_backend(
lowered_module = to_backend( # type: ignore[call-arg]
backend_id, edge.exported_program(), compile_specs=[]
)

Expand Down
4 changes: 3 additions & 1 deletion test/models/generate_linear_out_bundled_program.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@
from executorch.exir.passes import MemoryPlanningPass, ToOutVarPass
from executorch.exir.print_program import pretty_print

from executorch.test.models.linear_model import LinearModel
from executorch.test.models.linear_model import ( # type: ignore[import-not-found]
LinearModel,
)
from torch.export import export


Expand Down

0 comments on commit 89d0272

Please sign in to comment.