Skip to content

Commit

Permalink
Merge branch 'ISSMteam:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
mansakrishna23 authored May 14, 2024
2 parents 1ff1cfe + d3be2d4 commit aa409b1
Show file tree
Hide file tree
Showing 5 changed files with 97 additions and 6 deletions.
31 changes: 29 additions & 2 deletions pinnicle/nn/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,10 @@ def __init__(self, parameters=NNParameter()):
self.parameters = parameters

# create new NN
self.net = self.createFNN()
if self.parameters.is_parallel:
self.net = self.createPFNN()
else:
self.net = self.createFNN()

# apply transform
# by default, use min-max scale for the input
Expand All @@ -27,8 +30,32 @@ def createFNN(self):
"""
create a fully connected neural network
"""
layer_size = [self.parameters.input_size] + [self.parameters.num_neurons] * self.parameters.num_layers + [self.parameters.output_size]
if isinstance(self.parameters.num_neurons, list):
# directly use the given list of num_neurons
layer_size = [self.parameters.input_size] + \
self.parameters.num_neurons + \
[self.parameters.output_size]
else:
# repeat num_layers times
layer_size = [self.parameters.input_size] + \
[self.parameters.num_neurons] * self.parameters.num_layers + \
[self.parameters.output_size]

return dde.nn.FNN(layer_size, self.parameters.activation, self.parameters.initializer)

def createPFNN(self):
"""
create a parallel fully connected neural network
"""
if isinstance(self.parameters.num_neurons, list):
layer_size = [self.parameters.input_size] + \
[[n]*self.parameters.output_size for n in self.parameters.num_neurons] + \
[self.parameters.output_size]
else:
layer_size = [self.parameters.input_size] + \
[[self.parameters.num_neurons]*self.parameters.output_size] * self.parameters.num_layers + \
[self.parameters.output_size]
return dde.nn.PFNN(layer_size, self.parameters.activation, self.parameters.initializer)

def _add_input_transform(self, func):
"""
Expand Down
7 changes: 7 additions & 0 deletions pinnicle/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,9 @@ def set_default(self):
self.activation = "tanh"
self.initializer = "Glorot uniform"

# parallel neural network
self.is_parallel = False

# scaling parameters
self.input_lb = None
self.input_ub = None
Expand Down Expand Up @@ -207,6 +210,10 @@ def set_parameters(self, pdict: dict):
super().set_parameters(pdict)
self.input_size = len(self.input_variables)
self.output_size = len(self.output_variables)
# num_eurons is list
if isinstance(self.num_neurons, list):
self.num_layers = len(self.num_neurons)



class PhysicsParameter(ParameterBase):
Expand Down
31 changes: 31 additions & 0 deletions tests/test_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,34 @@ def test_output_scale_nn():
x = np.linspace(-1.0, 1.0, 100)
assert np.all(p.net._output_transform(0, x) > d.output_lb - d.output_lb*np.finfo(float).eps)
assert np.all(p.net._output_transform(0, x) < d.output_ub + d.output_ub*np.finfo(float).eps)

def test_pfnn():
hp={}
hp['input_variables'] = ['x','y']
hp['output_variables'] = ['u', 'v','s']
hp['num_neurons'] = 4
hp['num_layers'] = 5
hp['is_parallel'] = False
d = NNParameter(hp)
p = pinn.nn.FNN(d)
assert len(p.net.layers) == 6
hp['is_parallel'] = True
d = NNParameter(hp)
p = pinn.nn.FNN(d)
assert len(p.net.layers) == 18

def test_pfnn_list_neuron():
hp={}
hp['input_variables'] = ['x','y']
hp['output_variables'] = ['u', 'v','s']
hp['num_neurons'] = [3,4,5]
hp['num_layers'] = 5
hp['is_parallel'] = False
d = NNParameter(hp)
p = pinn.nn.FNN(d)
assert len(p.net.layers) == 4
hp['is_parallel'] = True
d = NNParameter(hp)
p = pinn.nn.FNN(d)
assert len(p.net.layers) == 12

4 changes: 3 additions & 1 deletion tests/test_parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,9 @@ def test_nn_parameter():
d.output_lb = 1
d.output_ub = 10
assert d.is_output_scaling()

d = NNParameter({"num_neurons":[1,2,3]})
assert d.num_layers == 3

def test_parameters():
p = Parameters()
domain = DomainParameter()
Expand Down
30 changes: 27 additions & 3 deletions tests/test_pinn.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
hp["activation"] = "tanh"
hp["initializer"] = "Glorot uniform"
hp["num_neurons"] = 10
hp["num_layers"] = 6
hp["num_layers"] = 4

# data
issm = {}
Expand Down Expand Up @@ -99,7 +99,32 @@ def test_save_and_load_setting(tmp_path):
experiment2 = pinn.PINN(loadFrom=tmp_path)
assert experiment.params.param_dict == experiment2.params.param_dict

#def test_train(tmp_path):
def test_train(tmp_path):
hp["is_save"] = False
hp["num_collocation_points"] = 100
issm["data_size"] = {"u":100, "v":100, "s":100, "H":100, "C":None, "vel":100}
hp["data"] = {"ISSM": issm}
experiment = pinn.PINN(params=hp)
experiment.compile()
experiment.train()
assert experiment.loss_names == ['fSSA1', 'fSSA2', 'u', 'v', 's', 'H', 'C', "vel log"]

def test_train_PFNN(tmp_path):
hp["is_parallel"] = True
hp["is_save"] = False
hp["num_collocation_points"] = 100
issm["data_size"] = {"u":100, "v":100, "s":100, "H":100, "C":None, "vel":100}
hp["num_neurons"] = [4,10];
hp["data"] = {"ISSM": issm}
experiment = pinn.PINN(params=hp)
experiment.compile()
experiment.train()
assert experiment.loss_names == ['fSSA1', 'fSSA2', 'u', 'v', 's', 'H', 'C', "vel log"]
assert experiment.params.nn.num_layers == 2
assert len(experiment.model.net.layers) == 5*(2+1)
assert len(experiment.model.net.trainable_weights) == 30

#def test_save_train(tmp_path):
# hp["save_path"] = str(tmp_path)
# hp["is_save"] = True
# hp["num_collocation_points"] = 100
Expand Down Expand Up @@ -144,7 +169,6 @@ def test_only_callbacks(tmp_path):
assert callbacks is not None
assert len(callbacks) == 3


def test_plot(tmp_path):
hp["save_path"] = str(tmp_path)
hp["is_save"] = True
Expand Down

0 comments on commit aa409b1

Please sign in to comment.