Skip to content

Commit

Permalink
refactor: remove assert statement from non-test files (#7029)
Browse files Browse the repository at this point in the history
Usage of `assert` statement in application logic is discouraged.
`assert` is removed with compiling to optimized byte code. Consider
raising an exception instead. Ideally, `assert` statement should be used
only in tests.

---------

Co-authored-by: deepsource-autofix[bot] <62050782+deepsource-autofix[bot]@users.noreply.github.com>
  • Loading branch information
deepsource-autofix[bot] authored Sep 22, 2023
1 parent c21df49 commit 9b81f10
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 3 deletions.
2 changes: 1 addition & 1 deletion monai/apps/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def download_url(
with urlopen(url) as response:
code = response.getcode()
if code == 200:
download_url = json.loads(response.read())["href"]
download_url = json.load(response)["href"]
_download_with_progress(download_url, tmp_name, progress=progress)
else:
raise RuntimeError(
Expand Down
6 changes: 4 additions & 2 deletions monai/networks/blocks/pos_embed_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ def build_sincos_position_embedding(

grid_h, grid_w = torch.meshgrid(grid_h, grid_w, indexing="ij")

assert embed_dim % 4 == 0, "Embed dimension must be divisible by 4 for 2D sin-cos position embedding"
if embed_dim % 4 != 0:
raise AssertionError("Embed dimension must be divisible by 4 for 2D sin-cos position embedding")

pos_dim = embed_dim // 4
omega = torch.arange(pos_dim, dtype=torch.float32) / pos_dim
Expand All @@ -75,7 +76,8 @@ def build_sincos_position_embedding(

grid_h, grid_w, grid_d = torch.meshgrid(grid_h, grid_w, grid_d, indexing="ij")

assert embed_dim % 6 == 0, "Embed dimension must be divisible by 6 for 3D sin-cos position embedding"
if embed_dim % 6 != 0:
raise AssertionError("Embed dimension must be divisible by 6 for 3D sin-cos position embedding")

pos_dim = embed_dim // 6
omega = torch.arange(pos_dim, dtype=torch.float32) / pos_dim
Expand Down

0 comments on commit 9b81f10

Please sign in to comment.