Skip to content

Commit

Permalink
add decay for learning rate
Browse files Browse the repository at this point in the history
  • Loading branch information
enigne committed Dec 18, 2024
1 parent 8f5f36a commit 57d6ca6
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 2 deletions.
4 changes: 4 additions & 0 deletions pinnicle/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,6 +372,10 @@ def set_default(self):
self.additional_loss = {}
# learning rate
self.learning_rate = 0
# decay steps
self.decay_steps = 0
# decay rate
self.decay_rate = 0.0
# list of the weights
self.loss_weights = []
# setting the callbacks
Expand Down
7 changes: 5 additions & 2 deletions pinnicle/pinn.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def check_path(self, path, loadOnly=False):
os.makedirs(path, exist_ok=True)
return path

def compile(self, opt=None, loss=None, lr=None, loss_weights=None):
def compile(self, opt=None, loss=None, lr=None, loss_weights=None, decay=None):
""" compile the model
"""
# load from params
Expand All @@ -47,11 +47,14 @@ def compile(self, opt=None, loss=None, lr=None, loss_weights=None):
if lr is None:
lr = self.params.training.learning_rate

if (decay is None) and (self.params.training.decay_steps > 0) and (self.params.training.decay_rate>0.0):
decay = ("inverse time", self.params.training.decay_steps, self.params.training.decay_rate)

if loss_weights is None:
loss_weights = self.params.training.loss_weights

# compile the model
self.model.compile(opt, loss=loss, lr=lr, loss_weights=loss_weights)
self.model.compile(opt, loss=loss, lr=lr, decay=decay, loss_weights=loss_weights)

def load_model(self, path="", epochs=-1, subfolder="pinn", name="model", fileformat=""):
"""laod the neural network from saved model
Expand Down
4 changes: 4 additions & 0 deletions tests/test_parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,11 @@ def test_dummy_equation_parameters():

def test_training_parameters():
hp = {}
hp['decay_steps'] = 10
hp['decay_rate'] = 0.3
p = TrainingParameter(hp)
assert p.decay_steps == 10
assert p.decay_rate == 0.3
assert p.additional_loss == {}
u_loss = {}
u_loss['name'] = "vel log"
Expand Down
14 changes: 14 additions & 0 deletions tests/test_pinn.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,20 @@ def test_train(tmp_path):
hp_local["data"] = {"ISSM": issm}
hp_local["equations"] = {"SSA":SSA}
experiment = pinn.PINN(params=hp_local)
experiment.compile(decay=("inverse time", 5, 0.3))
experiment.train()
assert experiment.loss_names == ['fSSA1', 'fSSA2', 'u', 'v', 's', 'H', 'C']

def test_train_decay(tmp_path):
hp_local = dict(hp)
hp_local["is_save"] = False
hp_local["num_collocation_points"] = 100
issm["data_size"] = {"u":None, "v":100, "s":100, "H":100, "C":None}
hp_local["data"] = {"ISSM": issm}
hp_local["equations"] = {"SSA":SSA}
hp_local["decay_steps"] = 5
hp_local["decay_rate"]= 0.3
experiment = pinn.PINN(params=hp_local)
experiment.compile()
experiment.train()
assert experiment.loss_names == ['fSSA1', 'fSSA2', 'u', 'v', 's', 'H', 'C']
Expand Down

0 comments on commit 57d6ca6

Please sign in to comment.