Skip to content

Commit

Permalink
remove max inplace grad
Browse files Browse the repository at this point in the history
  • Loading branch information
phlrain committed Jan 26, 2025
1 parent cf03afb commit 926d321
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 3 deletions.
1 change: 0 additions & 1 deletion test_tipc/supplementary/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ def amp_scaler(config):
if "AMP" in config and config["AMP"]["use_amp"] is True:
AMP_RELATED_FLAGS_SETTING = {
"FLAGS_cudnn_batchnorm_spatial_persistent": 1,
"FLAGS_max_inplace_grad_add": 8,
}
paddle.set_flags(AMP_RELATED_FLAGS_SETTING)
scale_loss = config["AMP"].get("scale_loss", 1.0)
Expand Down
1 change: 0 additions & 1 deletion tools/eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ def main():
if use_amp:
AMP_RELATED_FLAGS_SETTING = {
"FLAGS_cudnn_batchnorm_spatial_persistent": 1,
"FLAGS_max_inplace_grad_add": 8,
}
paddle.set_flags(AMP_RELATED_FLAGS_SETTING)
scale_loss = config["Global"].get("scale_loss", 1.0)
Expand Down
1 change: 0 additions & 1 deletion tools/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,6 @@ def main(config, device, logger, vdl_writer, seed):
pass
if use_amp:
AMP_RELATED_FLAGS_SETTING = {
"FLAGS_max_inplace_grad_add": 8,
}
if paddle.is_compiled_with_cuda():
AMP_RELATED_FLAGS_SETTING.update(
Expand Down

0 comments on commit 926d321

Please sign in to comment.