Skip to content

Commit

Permalink
style(mypy): silence incorrect typing warnings (#375)
Browse files Browse the repository at this point in the history
* style(pyproject): update mypy config

* style(mypy): silenced typing warnings
  • Loading branch information
frgfm authored Sep 11, 2024
1 parent 71ba1ab commit 84f775c
Show file tree
Hide file tree
Showing 7 changed files with 8 additions and 8 deletions.
2 changes: 1 addition & 1 deletion holocron/optim/adabelief.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import torch
from torch import Tensor
from torch.optim import Adam
from torch.optim import Adam # type: ignore[attr-defined]

__all__ = ["AdaBelief", "adabelief"]

Expand Down
2 changes: 1 addition & 1 deletion holocron/optim/adamp.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import torch
from torch import Tensor
from torch.nn import functional as F
from torch.optim import Adam
from torch.optim import Adam # type: ignore[attr-defined]

__all__ = ["AdamP", "adamp"]

Expand Down
2 changes: 1 addition & 1 deletion holocron/optim/adan.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import torch
from torch import Tensor
from torch.optim import Adam
from torch.optim import Adam # type: ignore[attr-defined]

__all__ = ["Adan", "adan"]

Expand Down
2 changes: 1 addition & 1 deletion holocron/optim/ademamix.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import torch
from torch import Tensor
from torch.optim import Optimizer
from torch.optim import Optimizer # type: ignore[attr-defined]

__all__ = ["AdEMAMix", "ademamix"]

Expand Down
4 changes: 2 additions & 2 deletions holocron/optim/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class Lookahead(Optimizer):

def __init__(
self,
base_optimizer: torch.optim.Optimizer,
base_optimizer: torch.optim.Optimizer, # type: ignore[name-defined]
sync_rate: float = 0.5,
sync_period: int = 6,
) -> None:
Expand Down Expand Up @@ -154,7 +154,7 @@ class Scout(Optimizer):

def __init__(
self,
base_optimizer: torch.optim.Optimizer,
base_optimizer: torch.optim.Optimizer, # type: ignore[name-defined]
sync_rate: float = 0.5,
sync_period: int = 6,
) -> None:
Expand Down
2 changes: 1 addition & 1 deletion holocron/trainer/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def __init__(
train_loader: DataLoader,
val_loader: DataLoader,
criterion: nn.Module,
optimizer: torch.optim.Optimizer,
optimizer: torch.optim.Optimizer, # type: ignore[name-defined]
gpu: Optional[int] = None,
output_file: str = "./checkpoint.pth",
amp: bool = False,
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ quote-style = "double"
indent-style = "space"

[tool.mypy]
python_version = "3.9"
python_version = "3.11"
files = "holocron/"
show_error_codes = true
pretty = true
Expand Down

0 comments on commit 84f775c

Please sign in to comment.