Skip to content

Commit

Permalink
Remove all type annotations for consistency.
Browse files Browse the repository at this point in the history
Some tools don't like the mix of code with and without type hints.
  • Loading branch information
hertschuh committed Jan 15, 2025
1 parent 617b821 commit baf1eaa
Show file tree
Hide file tree
Showing 18 changed files with 40 additions and 43 deletions.
2 changes: 1 addition & 1 deletion keras/src/backend/common/backend_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ def _vectorize_parse_input_dimensions(
f"expected {len(input_core_dims)}, got {len(args)}"
)
shapes = []
dim_sizes: dict[str, int] = {}
dim_sizes = {}
for arg, core_dims in zip(args, input_core_dims):
_vectorize_update_dim_sizes(
dim_sizes, arg.shape, core_dims, is_input=True
Expand Down
2 changes: 1 addition & 1 deletion keras/src/backend/common/dtypes_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def setUp(self):
self.jax_enable_x64.__enter__()
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
self.jax_enable_x64.__exit__(None, None, None)
return super().tearDown()

Expand Down
2 changes: 1 addition & 1 deletion keras/src/backend/common/variables_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -820,7 +820,7 @@ def setUp(self):
self.jax_enable_x64.__enter__()
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
self.jax_enable_x64.__exit__(None, None, None)
return super().tearDown()

Expand Down
14 changes: 7 additions & 7 deletions keras/src/backend/openvino/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,9 +243,9 @@ def __getitem__(self, indices):
def __len__(self):
ov_output = self.output
ov_shape = ov_output.get_partial_shape()
assert ov_shape.rank.is_static and ov_shape.rank.get_length() > 0, (
"rank must be static and greater than zero"
)
assert (
ov_shape.rank.is_static and ov_shape.rank.get_length() > 0
), "rank must be static and greater than zero"
assert ov_shape[0].is_static, "the first dimension must be static"
return ov_shape[0].get_length()

Expand Down Expand Up @@ -428,10 +428,10 @@ def convert_to_numpy(x):
x = x.value
else:
return x.value.data
assert isinstance(x, OpenVINOKerasTensor), (
"unsupported type {} for `convert_to_numpy` in openvino backend".format(
type(x)
)
assert isinstance(
x, OpenVINOKerasTensor
), "unsupported type {} for `convert_to_numpy` in openvino backend".format(
type(x)
)
try:
ov_result = x.output
Expand Down
6 changes: 3 additions & 3 deletions keras/src/backend/openvino/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,9 +325,9 @@ def depthwise_conv(
data_format = backend.standardize_data_format(data_format)
num_spatial_dims = inputs.get_partial_shape().rank.get_length() - 2

assert data_format == "channels_last", (
"`depthwise_conv` is supported only for channels_last data_format"
)
assert (
data_format == "channels_last"
), "`depthwise_conv` is supported only for channels_last data_format"

strides = _adjust_strides_dilation(strides, num_spatial_dims)
dilation_rate = _adjust_strides_dilation(dilation_rate, num_spatial_dims)
Expand Down
12 changes: 6 additions & 6 deletions keras/src/backend/openvino/numpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,9 @@ def mean(x, axis=None, keepdims=False):


def max(x, axis=None, keepdims=False, initial=None):
assert initial is None, (
"`max` with not None initial is not supported by openvino backend"
)
assert (
initial is None
), "`max` with not None initial is not supported by openvino backend"
x = get_ov_output(x)
reduce_axis = ov_opset.constant(axis, Type.i32).output(0)
return OpenVINOKerasTensor(
Expand Down Expand Up @@ -260,9 +260,9 @@ def bincount(x, weights=None, minlength=0, sparse=False):


def broadcast_to(x, shape):
assert isinstance(shape, (tuple, list)), (
"`broadcast_to` is supported only for tuple and list `shape`"
)
assert isinstance(
shape, (tuple, list)
), "`broadcast_to` is supported only for tuple and list `shape`"
target_shape = ov_opset.constant(list(shape), Type.i32).output(0)
x = get_ov_output(x)
return OpenVINOKerasTensor(ov_opset.broadcast(x, target_shape).output(0))
Expand Down
11 changes: 4 additions & 7 deletions keras/src/backend/torch/layer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
from typing import Iterator
from typing import Tuple

import torch

from keras.src.backend.common.stateless_scope import in_stateless_scope
Expand Down Expand Up @@ -30,10 +27,10 @@ def _track_variables(self):

def named_parameters(
self,
prefix: str = "",
recurse: bool = True,
remove_duplicate: bool = True,
) -> Iterator[Tuple[str, torch.nn.Parameter]]:
prefix="",
recurse=True,
remove_duplicate=True,
):
if not hasattr(self, "_torch_params"):
self._track_variables()
return torch.nn.Module.named_parameters(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ def __init__(self):
def convert_format(
self,
boxes,
source: str,
target: str,
source,
target,
height=None,
width=None,
dtype="float32",
Expand Down
2 changes: 1 addition & 1 deletion keras/src/ops/core_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -936,7 +936,7 @@ def setUp(self):
self.jax_enable_x64.__enter__()
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
self.jax_enable_x64.__exit__(None, None, None)
return super().tearDown()

Expand Down
8 changes: 4 additions & 4 deletions keras/src/ops/image_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def setUp(self):
backend.set_image_data_format("channels_last")
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
backend.set_image_data_format(self.data_format)
return super().tearDown()

Expand Down Expand Up @@ -171,7 +171,7 @@ def setUp(self):
backend.set_image_data_format("channels_last")
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
backend.set_image_data_format(self.data_format)
return super().tearDown()

Expand Down Expand Up @@ -396,7 +396,7 @@ def setUp(self):
backend.set_image_data_format("channels_last")
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
backend.set_image_data_format(self.data_format)
return super().tearDown()

Expand Down Expand Up @@ -1144,7 +1144,7 @@ def setUp(self):
backend.set_image_data_format("channels_last")
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
backend.set_image_data_format(self.data_format)
return super().tearDown()

Expand Down
2 changes: 1 addition & 1 deletion keras/src/ops/math_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -1003,7 +1003,7 @@ def setUp(self):
self.jax_enable_x64.__enter__()
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
self.jax_enable_x64.__exit__(None, None, None)
return super().tearDown()

Expand Down
2 changes: 1 addition & 1 deletion keras/src/ops/nn_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2474,7 +2474,7 @@ def setUp(self):
self.jax_enable_x64.__enter__()
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
self.jax_enable_x64.__exit__(None, None, None)
return super().tearDown()

Expand Down
2 changes: 1 addition & 1 deletion keras/src/ops/numpy_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5357,7 +5357,7 @@ def setUp(self):
self.jax_enable_x64.__enter__()
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
self.jax_enable_x64.__exit__(None, None, None)
return super().tearDown()

Expand Down
2 changes: 1 addition & 1 deletion keras/src/random/random_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,7 @@ def setUp(self):
self.jax_enable_x64.__enter__()
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
if backend.backend() == "jax":
self.jax_enable_x64.__exit__(None, None, None)
return super().tearDown()
Expand Down
4 changes: 2 additions & 2 deletions keras/src/saving/saving_lib_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def setUp(self):
saving_lib._MEMORY_UPPER_BOUND = 0
return super().setUp()

def tearDown(self) -> None:
def tearDown(self):
saving_lib._MEMORY_UPPER_BOUND = self.original_value
return super().tearDown()

Expand Down Expand Up @@ -621,7 +621,7 @@ def test_partial_load(self):
)

@pytest.mark.requires_trainable_backend
def test_save_to_fileobj(self) -> None:
def test_save_to_fileobj(self):
model = keras.Sequential(
[keras.layers.Dense(1, input_shape=(1,)), keras.layers.Dense(1)]
)
Expand Down
2 changes: 1 addition & 1 deletion keras/src/visualization/draw_segmentation_masks.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,6 @@ def draw_segmentation_masks(
return outputs


def _generate_color_palette(num_classes: int):
def _generate_color_palette(num_classes):
palette = np.array([2**25 - 1, 2**15 - 1, 2**21 - 1])
return [((i * palette) % 255).tolist() for i in range(num_classes)]
4 changes: 2 additions & 2 deletions keras/src/wrappers/sklearn_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
def wrapped_parametrize_with_checks(
estimators,
*,
legacy: bool = True,
legacy=True,
expected_failed_checks=None,
):
"""Wrapped `parametrize_with_checks` handling backwards compat."""
Expand Down Expand Up @@ -77,7 +77,7 @@ def dynamic_model(X, y, loss, layers=[10]):


@contextmanager
def use_floatx(x: str):
def use_floatx(x):
"""Context manager to temporarily
set the keras backend precision.
"""
Expand Down
2 changes: 1 addition & 1 deletion keras/src/wrappers/sklearn_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def __sklearn_clone__(self):
)

@property
def epoch_(self) -> int:
def epoch_(self):
"""The current training epoch."""
return getattr(self, "history_", {}).get("epoch", 0)

Expand Down

0 comments on commit baf1eaa

Please sign in to comment.