diff --git a/CHANGELOG.md b/CHANGELOG.md index e050ac21..b5fd06d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,62 @@ # Release Notes +## v1.3.0 + +Feature and maintenance update. + +### Highlights + +* `python 3.13` support +* `tide` model +* bugfixes for TFT + +### Enhancements + +* [ENH] Tide model. by @Sohaib-Ahmed21 in https://github.com/sktime/pytorch-forecasting/pull/1734 +* [ENH] refactor `__init__` modules to no longer contain classes - preparatory commit by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1739 +* [ENH] refactor `__init__` modules to no longer contain classes by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1738 +* [ENH] extend package author attribution requirement in license to present by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1737 +* [ENH] linting tide model by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1742 +* [ENH] move tide model - part 1 by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1743 +* [ENH] move tide model - part 2 by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1744 +* [ENH] clean-up refactor of `TimeSeriesDataSet` by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1746 + +### Fixes + +* [BUG] Bugfix when no exogenous variable is passed to TFT by @XinyuWuu in https://github.com/sktime/pytorch-forecasting/pull/1667 +* [BUG] Fix issue when training TFT model on mac M1 mps device. element 0 of tensors does not require grad and does not have a grad_fn by @fnhirwa in https://github.com/sktime/pytorch-forecasting/pull/1725 + +### Documentation + +* [DOC] Fix the spelling error of holding by @xiaokongkong in https://github.com/sktime/pytorch-forecasting/pull/1719 +* [DOC] Updated documentation on `TimeSeriesDataSet.predict_mode` by @madprogramer in https://github.com/sktime/pytorch-forecasting/pull/1720 +* [DOC] General PR to improve docs by @julian-fong in https://github.com/sktime/pytorch-forecasting/pull/1705 +* [DOC] Correct argument for optimizer `ranger` in `Temporal Fusion Transformer` tutorial by @fnhirwa in https://github.com/sktime/pytorch-forecasting/pull/1724 +* [DOC] Fixed typo "monotone_constaints" by @Luke-Chesley in https://github.com/sktime/pytorch-forecasting/pull/1516 +* [DOC] minor fixes in documentation by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1763 +* [DOC] improve and add `tide` model to docs by @PranavBhatP in https://github.com/sktime/pytorch-forecasting/pull/1762 + +### Maintenance + +* [MNT] update linting: limit line length to 88, add `isort` by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1740 +* [MNT] update nbeats/sub_modules.py to remove overhead in tensor creation by @d-schmitt in https://github.com/sktime/pytorch-forecasting/pull/1580 +* [MNT] Temporary fix for lint errors to conform to the recent changes in linting rules see #1749 by @fnhirwa in https://github.com/sktime/pytorch-forecasting/pull/1748 +* [MNT] python 3.13 support by @fkiraly in https://github.com/sktime/pytorch-forecasting/pull/1691 + +### All Contributors + +@d-schmitt, +@fkiraly, +@fnhirwa, +@julian-fong, +@Luke-Chesley, +@madprogramer, +@PranavBhatP, +@Sohaib-Ahmed21, +@xiaokongkong, +@XinyuWuu + + ## v1.2.0 Maintenance update, minor feature additions and bugfixes. diff --git a/pyproject.toml b/pyproject.toml index 9f324908..f3d1e339 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "pytorch-forecasting" readme = "README.md" # Markdown files are supported -version = "1.2.0" # is being replaced automatically +version = "1.3.0" # is being replaced automatically authors = [ {name = "Jan Beitner"}, diff --git a/pytorch_forecasting/__init__.py b/pytorch_forecasting/__init__.py index 6ec197d6..dede44fd 100644 --- a/pytorch_forecasting/__init__.py +++ b/pytorch_forecasting/__init__.py @@ -2,7 +2,7 @@ PyTorch Forecasting package for timeseries forecasting with PyTorch. """ -__version__ = "1.2.0" +__version__ = "1.3.0" from pytorch_forecasting.data import ( EncoderNormalizer, diff --git a/pytorch_forecasting/models/base_model.py b/pytorch_forecasting/models/base_model.py index 9a5bb90c..5e6c6839 100644 --- a/pytorch_forecasting/models/base_model.py +++ b/pytorch_forecasting/models/base_model.py @@ -485,7 +485,7 @@ def __init__( optimizer_params: Dict[str, Any] = None, monotone_constraints: Dict[str, int] = {}, output_transformer: Callable = None, - optimizer=None, + optimizer="adam", ): """ BaseModel for timeseries forecasting from which to inherit from @@ -518,9 +518,7 @@ def __init__( optimizer (str): Optimizer, "ranger", "sgd", "adam", "adamw" or class name of optimizer in ``torch.optim`` or ``pytorch_optimizer``. Alternatively, a class or function can be passed which takes parameters as first argument and - a `lr` argument (optionally also `weight_decay`). Defaults to - `"ranger" `_, - if pytorch_optimizer is installed, otherwise "adam". + a `lr` argument (optionally also `weight_decay`). Defaults to "adam". """ # noqa: E501 if monotone_constraints is None: monotone_constraints = {} @@ -529,38 +527,6 @@ def __init__( frame = inspect.currentframe() init_args = get_init_args(frame) - # TODO 1.2.0: remove warnings and change default optimizer to "adam" - if init_args["optimizer"] is None: - ptopt_in_env = "pytorch_optimizer" in _get_installed_packages() - if ptopt_in_env: - init_args["optimizer"] = "ranger" - warnings.warn( - "In pytorch-forecasting models, from version 1.2.0, " - "the default optimizer will be 'adam', in order to " - "minimize the number of dependencies in default" - " parameter settings. Users who wish to ensure their" - " code continues using 'ranger' as optimizer should ensure" - " that pytorch_optimizer is installed, and set the optimizer " - "parameter explicitly to 'ranger'.", - stacklevel=2, - ) - else: - init_args["optimizer"] = "adam" - warnings.warn( - "In pytorch-forecasting models, on versions 1.1.X, " - "the default optimizer defaults to 'adam', " - "if pytorch_optimizer is not installed, " - "otherwise it defaults to 'ranger' from pytorch_optimizer. " - "From version 1.2.0, the default optimizer will be 'adam' " - "regardless of whether pytorch_optimizer is installed, in order to " - "minimize the number of dependencies in default parameter" - " settings. Users who wish to ensure their code continues" - " using 'ranger' as optimizer should ensure that pytorch_optimizer" - " is installed, and set the optimizer " - "parameter explicitly to 'ranger'.", - stacklevel=2, - ) - self.save_hyperparameters( { name: val