Skip to content

Commit

Permalink
arg name changes
Browse files Browse the repository at this point in the history
  • Loading branch information
MartinAchondo committed Jul 16, 2024
1 parent 422ad34 commit d003c00
Show file tree
Hide file tree
Showing 7 changed files with 24 additions and 24 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ The Simulation object import a YAML file with all the problem definitions. An ex
num_hidden_layers: 4
num_neurons_per_layer: 200
activation: tanh
adaptative_activation: true
adaptive_activation: true
fourier_features: true
weight_factorization: false
Expand All @@ -134,7 +134,7 @@ The Simulation object import a YAML file with all the problem definitions. An ex
num_hidden_layers: 4
num_neurons_per_layer: 200
activation: tanh
adaptative_activation: true
adaptive_activation: true
fourier_features: true
weight_factorization: false
```
Expand Down
4 changes: 2 additions & 2 deletions tests/simulations_yaml/test_arg.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ hyperparameters_in:
num_neurons_per_layer: 20
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: true
Expand All @@ -51,7 +51,7 @@ hyperparameters_out:
num_neurons_per_layer: 20
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: true
Expand Down
4 changes: 2 additions & 2 deletions tests/simulations_yaml/test_born_ion.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ hyperparameters_in:
num_neurons_per_layer: 20
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: true
Expand All @@ -51,7 +51,7 @@ hyperparameters_out:
num_neurons_per_layer: 20
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: true
Expand Down
4 changes: 2 additions & 2 deletions tests/simulations_yaml/test_methanol.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ hyperparameters_in:
num_neurons_per_layer: 20
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: true
Expand All @@ -51,7 +51,7 @@ hyperparameters_out:
num_neurons_per_layer: 20
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: true
Expand Down
4 changes: 2 additions & 2 deletions tests/simulations_yaml/test_sphere_+1-1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ hyperparameters_in:
num_neurons_per_layer: 20
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: true
Expand All @@ -51,7 +51,7 @@ hyperparameters_out:
num_neurons_per_layer: 20
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: true
Expand Down
24 changes: 12 additions & 12 deletions xppbe/NN/NeuralNet.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __init__(self,
num_neurons_per_layer=20,
num_hidden_blocks=2,
activation='tanh',
adaptative_activation=False,
adaptive_activation=False,
kernel_initializer='glorot_normal',
architecture_Net='FCNN',
fourier_features=False,
Expand All @@ -75,7 +75,7 @@ def __init__(self,
self.num_hidden_blocks = num_hidden_blocks

self.activation = activation
self.adaptative_activation = adaptative_activation
self.adaptive_activation = adaptive_activation

self.kernel_initializer = kernel_initializer
self.architecture_Net = architecture_Net
Expand Down Expand Up @@ -143,7 +143,7 @@ def create_FCNN(self):
layer = self.Dense_Layer(self.num_neurons_per_layer,
activation=CustomActivation(units=self.num_neurons_per_layer,
activation=self.activation,
adaptative_activation=self.adaptative_activation),
adaptive_activation=self.adaptive_activation),
kernel_initializer=self.kernel_initializer,
name=f'layer_{i}')
self.hidden_layers.append(layer)
Expand All @@ -154,13 +154,13 @@ def create_ModMLP(self):
self.U = self.Dense_Layer(self.num_neurons_per_layer,
activation=CustomActivation(units=self.num_neurons_per_layer,
activation=self.activation,
adaptative_activation=self.adaptative_activation),
adaptive_activation=self.adaptive_activation),
kernel_initializer=self.kernel_initializer,
name=f'layer_u')
self.V = self.Dense_Layer(self.num_neurons_per_layer,
activation=CustomActivation(units=self.num_neurons_per_layer,
activation=self.activation,
adaptative_activation=self.adaptative_activation),
adaptive_activation=self.adaptive_activation),
kernel_initializer=self.kernel_initializer,
name=f'layer_v')
self.call_architecture = self.call_ModMLP
Expand All @@ -170,7 +170,7 @@ def create_ResNet(self):
self.first = self.Dense_Layer(self.num_neurons_per_layer,
activation=CustomActivation(units=self.num_neurons_per_layer,
activation=self.activation,
adaptative_activation=self.adaptative_activation),
adaptive_activation=self.adaptive_activation),
kernel_initializer=self.kernel_initializer,
name=f'layer_0')
self.hidden_blocks = list()
Expand All @@ -180,21 +180,21 @@ def create_ResNet(self):
block.add(self.Dense_Layer(self.num_neurons_per_layer,
activation=CustomActivation(units=self.num_neurons_per_layer,
activation=self.activation,
adaptative_activation=self.adaptative_activation),
adaptive_activation=self.adaptive_activation),
kernel_initializer=self.kernel_initializer))
block.add(self.Dense_Layer(self.num_neurons_per_layer,
activation=None,
kernel_initializer=self.kernel_initializer))
self.hidden_blocks.append(block)
activation_layer = tf.keras.layers.Activation(activation=CustomActivation(units=self.num_neurons_per_layer,
activation=self.activation,
adaptative_activation=self.adaptative_activation))
adaptive_activation=self.adaptive_activation))
self.hidden_blocks_activations.append(activation_layer)

self.last = self.Dense_Layer(self.num_neurons_per_layer,
activation=CustomActivation(units=self.num_neurons_per_layer,
activation=self.activation,
adaptative_activation=self.adaptative_activation),
adaptive_activation=self.adaptive_activation),
kernel_initializer=self.kernel_initializer,
name=f'layer_1')
self.call_architecture = self.call_ResNet
Expand Down Expand Up @@ -235,17 +235,17 @@ def call_ResNet(self, X):

class CustomActivation(tf.keras.layers.Layer):

def __init__(self, units=1, activation='tanh', adaptative_activation=False, **kwargs):
def __init__(self, units=1, activation='tanh', adaptive_activation=False, **kwargs):
super(CustomActivation, self).__init__(**kwargs)
self.units = units
self.activation = activation
self.adaptative_activation = adaptative_activation
self.adaptive_activation = adaptive_activation

def build(self, input_shape):
self.a = self.add_weight(name='a',
shape=(self.units,),
initializer='ones',
trainable=self.adaptative_activation)
trainable=self.adaptive_activation)

def call(self, inputs):
a_expanded = tf.expand_dims(self.a, axis=0)
Expand Down
4 changes: 2 additions & 2 deletions xppbe/Simulation.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ hyperparameters_in:
num_neurons_per_layer: 200
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: false
Expand All @@ -79,7 +79,7 @@ hyperparameters_out:
num_neurons_per_layer: 200
output_dim: 1
activation: tanh
adaptative_activation: true
adaptive_activation: true
architecture_Net: FCNN
fourier_features: true
weight_factorization: false
Expand Down

0 comments on commit d003c00

Please sign in to comment.