From 57d6ca66741f5bbd6638e052af6dcd66cc989f31 Mon Sep 17 00:00:00 2001 From: Cheng Gong Date: Wed, 18 Dec 2024 11:55:45 -0500 Subject: [PATCH] add decay for learning rate --- pinnicle/parameter.py | 4 ++++ pinnicle/pinn.py | 7 +++++-- tests/test_parameters.py | 4 ++++ tests/test_pinn.py | 14 ++++++++++++++ 4 files changed, 27 insertions(+), 2 deletions(-) diff --git a/pinnicle/parameter.py b/pinnicle/parameter.py index 5a20e11..356b9a1 100644 --- a/pinnicle/parameter.py +++ b/pinnicle/parameter.py @@ -372,6 +372,10 @@ def set_default(self): self.additional_loss = {} # learning rate self.learning_rate = 0 + # decay steps + self.decay_steps = 0 + # decay rate + self.decay_rate = 0.0 # list of the weights self.loss_weights = [] # setting the callbacks diff --git a/pinnicle/pinn.py b/pinnicle/pinn.py index 4ab8f32..bfb8563 100644 --- a/pinnicle/pinn.py +++ b/pinnicle/pinn.py @@ -34,7 +34,7 @@ def check_path(self, path, loadOnly=False): os.makedirs(path, exist_ok=True) return path - def compile(self, opt=None, loss=None, lr=None, loss_weights=None): + def compile(self, opt=None, loss=None, lr=None, loss_weights=None, decay=None): """ compile the model """ # load from params @@ -47,11 +47,14 @@ def compile(self, opt=None, loss=None, lr=None, loss_weights=None): if lr is None: lr = self.params.training.learning_rate + if (decay is None) and (self.params.training.decay_steps > 0) and (self.params.training.decay_rate>0.0): + decay = ("inverse time", self.params.training.decay_steps, self.params.training.decay_rate) + if loss_weights is None: loss_weights = self.params.training.loss_weights # compile the model - self.model.compile(opt, loss=loss, lr=lr, loss_weights=loss_weights) + self.model.compile(opt, loss=loss, lr=lr, decay=decay, loss_weights=loss_weights) def load_model(self, path="", epochs=-1, subfolder="pinn", name="model", fileformat=""): """laod the neural network from saved model diff --git a/tests/test_parameters.py b/tests/test_parameters.py index 8e807bc..e8de861 100644 --- a/tests/test_parameters.py +++ b/tests/test_parameters.py @@ -156,7 +156,11 @@ def test_dummy_equation_parameters(): def test_training_parameters(): hp = {} + hp['decay_steps'] = 10 + hp['decay_rate'] = 0.3 p = TrainingParameter(hp) + assert p.decay_steps == 10 + assert p.decay_rate == 0.3 assert p.additional_loss == {} u_loss = {} u_loss['name'] = "vel log" diff --git a/tests/test_pinn.py b/tests/test_pinn.py index 82d74ab..82d3b4c 100644 --- a/tests/test_pinn.py +++ b/tests/test_pinn.py @@ -142,6 +142,20 @@ def test_train(tmp_path): hp_local["data"] = {"ISSM": issm} hp_local["equations"] = {"SSA":SSA} experiment = pinn.PINN(params=hp_local) + experiment.compile(decay=("inverse time", 5, 0.3)) + experiment.train() + assert experiment.loss_names == ['fSSA1', 'fSSA2', 'u', 'v', 's', 'H', 'C'] + +def test_train_decay(tmp_path): + hp_local = dict(hp) + hp_local["is_save"] = False + hp_local["num_collocation_points"] = 100 + issm["data_size"] = {"u":None, "v":100, "s":100, "H":100, "C":None} + hp_local["data"] = {"ISSM": issm} + hp_local["equations"] = {"SSA":SSA} + hp_local["decay_steps"] = 5 + hp_local["decay_rate"]= 0.3 + experiment = pinn.PINN(params=hp_local) experiment.compile() experiment.train() assert experiment.loss_names == ['fSSA1', 'fSSA2', 'u', 'v', 's', 'H', 'C']