Skip to content

Commit

Permalink
APTx Function: Default value of gamma should be 0.5 when trainable=Fa…
Browse files Browse the repository at this point in the history
…lse (#222)

In APTx activation function,  the default value of gamma should be 0.5 when trainable=False
  • Loading branch information
mr-ravin authored Jul 15, 2024
1 parent fb91d55 commit 2e8ef81
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions neurodiffeq/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,13 +190,13 @@ class APTx(nn.Module):
:type trainable: bool
"""

def __init__(self, alpha=1.0, beta=1.0, gamma=1.0, trainable=False):
def __init__(self, alpha=1.0, beta=1.0, gamma=0.5, trainable=False):
super(APTx, self).__init__()
alpha = float(alpha)
beta = float(beta)
gamma = float(gamma)
self.trainable = trainable
if trainable:
if self.trainable:
self.alpha = nn.Parameter(torch.tensor(alpha))
self.beta = nn.Parameter(torch.tensor(beta))
self.gamma = nn.Parameter(torch.tensor(gamma))
Expand All @@ -206,4 +206,4 @@ def __init__(self, alpha=1.0, beta=1.0, gamma=1.0, trainable=False):
self.gamma = gamma

def forward(self, x):
return (self.alpha + torch.nn.functional.tanh(self.beta*x))*self.gamma*x
return (self.alpha + torch.nn.functional.tanh(self.beta*x))*self.gamma*x

0 comments on commit 2e8ef81

Please sign in to comment.