Skip to content

Commit

Permalink
warn if current device doesn't support
Browse files Browse the repository at this point in the history
  • Loading branch information
will-jl944 committed Jan 21, 2025
1 parent aaa8e2e commit e72360c
Showing 1 changed file with 13 additions and 3 deletions.
16 changes: 13 additions & 3 deletions paddlenlp/utils/memory_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,25 @@

import paddle

from .log import logger
from .tools import get_env_device

__all__ = [
"empty_device_cache",
]


def empty_device_cache():
if paddle.device.is_compiled_with_cuda():
device = get_env_device()
if device == "gpu":
paddle.device.cuda.empty_cache()
elif paddle.device.is_compiled_with_xpu():
elif device == "xpu":
paddle.device.xpu.empty_cache()
else:
pass
if not getattr(empty_device_cache, "has_warned", False):
logger.warning(
"The current device ({}) does not support empty cache, calling empty_device_cache() will have no effect.".format(
device
)
)
setattr(empty_device_cache, "has_warned", True)

0 comments on commit e72360c

Please sign in to comment.