Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Described tutorial part 4 in different steps. #200

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
124 changes: 124 additions & 0 deletions R/p004.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
#part 1 introduction ####
##create inputs
inputs <- c(1,2,3,2.5)
inputs <- matrix(inputs, ncol = 4)
inputs

##create weights
weights <- c(
c(0.2, 0.8, -0.5, 1.0),
c(0.5, -0.91, 0.26, -0.5),
c(-0.26, -0.27, 0.17, 0.87)
)

weights <- matrix(weights, ncol = 4, byrow = T) #change into matrix for easier dot product, use byrow = TRUE to get the familiar shape in the tutorial
weights <- t(weights) # transpose the weights (this can also be obtained by removing byrow = TRUE in the previous line)
weights

##create biases
biases <- c(2,3,0.5)
biases <- matrix(biases, ncol = 3, byrow = T)
biases

#create output
outputs <- (inputs %*% weights) + biases
outputs <- inputs %*% weights
outputs

## part 2: multiple inputs ####
#create more inputs
multiple_inputs <- c(
c(1,2,3,2.5),
c(2.0,5.0,-1.0,2.0),
c(-1.5,2.7,3.3,-0.8)
)
multiple_inputs <- matrix(multiple_inputs, ncol = 4, byrow = T)
multiple_inputs

# we still use the same weights as defined in part 1, but for the new layer (layer 2) we will define new weights
weights # weights layer 1 as previously defined

weights2 <- c(
c(0.1,-0.14,0.5),
c(-0.5, 0.12, -0.33),
c(-0.44, 0.73, -0.13)
) #notice how the weights changed in dimension (3,3) relative to the first set of weights (3,4). This is because of the three outputs of the first layer
weights2 <- matrix(weights2, ncol = 3, byrow = T)
weights2 <- t(weights2)
weights2

#adjust biases of the first layer, because of the greater output and because R doesn't do well summing with one-array matrices. So we got to duplicate the biases for every array of outcomes
biases1 <- c(2,3,0.5)
biases1 <- matrix(rep(biases1,3), ncol = 3, byrow = T)
biases1

#create new set of biases for the second layer
biases2 <- c(-1, 2, -0.5)
biases2 <- matrix(rep(biases2, 3), ncol = 3, byrow = T)
biases2

#define new output line: layer1_outputs
layer1_outputs <- multiple_inputs %*% weights + biases1
layer1_outputs

#give output of layer 1 as input into layer 2 and give the respective weights and biases
layer2_outputs <- layer1_outputs %*% weights2 + biases2

#show output of layer 2
layer2_outputs

## Part 3 create object class ####
#create more inputs
set.seed(115)
X <- matrix(
c(1,2,3,2.5,
2.0,5.0,-1.0,2.0,
-1.5,2.7,3.3,-0.8),
nrow = 3, ncol = 4, byrow = T)
X

#define class
setClass("DenseLayer", slots = c(n_inputs = 'numeric', n_neurons = 'numeric'))

#set init (constructor)
setGeneric('init', function(layer) standardGeneric('init'))

#set method for init
setMethod('init', 'DenseLayer',
function(layer) {
n_weights <- layer@n_inputs * layer@n_neurons
weights <- matrix(rnorm(n_weights),
nrow = layer@n_inputs,
ncol = layer@n_neurons
)
attr(layer, 'weights') <- 0.10 * weights
attr(layer, 'biases') <- rep(0, layer@n_neurons)
layer
})

#set method for forward function
setGeneric('forward', function(layer, inputs) standardGeneric('forward'))
setMethod('forward', 'DenseLayer',
function(layer, inputs){
attr(layer, 'outputs') <- inputs %*% layer@weights + layer@biases
layer
})

#create wrapper for initializing layer object
LayerDense <- function(n_inputs, n_neurons){
init(new('DenseLayer', n_inputs=n_inputs, n_neurons=n_neurons))
}

#create first layer
layer1 <- LayerDense(n_inputs = 4, n_neurons = 5)

layer1 <- forward(layer1, X)

layer1@outputs

#create second layer
layer2 <- LayerDense(n_inputs = 5, n_neurons = 10)

layer2 <- forward(layer2, layer1@outputs)

layer2@outputs
79 changes: 79 additions & 0 deletions R/p005.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
## Neural Network from p4 ####
#create more inputs
set.seed(0)
X <- matrix(
c(1,2,3,2.5,
2.0,5.0,-1.0,2.0,
-1.5,2.7,3.3,-0.8),
nrow = 3, ncol = 4, byrow = T)
X

## part 1: simple activation ####
inputs = c(0,2,-1,3.3,-2.7,1.1,2.2,-100)
outputs = c()

# first method
for (i in inputs){
if (i>0){
outputs <- append(outputs, i)
} else {
outputs <- append(outputs, 0)
}
}

# second method
for (i in inputs){
outputs <- append(outputs, max(0,i))
}

outputs


## part 2: neural network with activation ####
#activation function.
Activation_ReLU <- function(inputs){
return(pmax(inputs,0))
}

#define class
setClass("DenseLayer", slots = c(n_inputs = 'numeric', n_neurons = 'numeric'))

#set init (constructor)
setGeneric('init', function(layer) standardGeneric('init'))

#set method for init
setMethod('init', 'DenseLayer',
function(layer) {
n_weights <- layer@n_inputs * layer@n_neurons
weights <- matrix(rnorm(n_weights),
nrow = layer@n_inputs,
ncol = layer@n_neurons
)
attr(layer, 'weights') <- 0.10 * weights
attr(layer, 'biases') <- rep(0, layer@n_neurons)
layer
})

#set method for forward function
setGeneric('forward', function(layer, inputs) standardGeneric('forward'))
setMethod('forward', 'DenseLayer',
function(layer, inputs){
attr(layer, 'outputs') <- inputs %*% layer@weights + layer@biases
layer
})

#create wrapper for initializing layer object
LayerDense <- function(n_inputs, n_neurons){
init(new('DenseLayer', n_inputs=n_inputs, n_neurons=n_neurons))
}



#create first layer
layer1 <- LayerDense(n_inputs = 4, n_neurons = 5)

layer1 <- forward(layer1, X)

layer1@outputs

Activation_ReLU(layer1@outputs)
154 changes: 154 additions & 0 deletions R/p006.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
##part 1 ####
#example outputs
layer_outputs <- c(4.8, 1.21, 2.385)

#exponentiated list of outputs
exp_values <- c()

#exponentiation of the layer outputs to prevent for example negative values
#without losing meaning of difference in outputs
for (output in layer_outputs) {
exp_values = c(exp_values, exp(output))
}

#show values(expected: 121.510418 3.353485 10.859063)
exp_values

##part 2: normalization####
#get sum of exp. values
norm_base <- sum(exp_values)
norm_base

#insert normalized values of exponential outputs
norm_values <- c()

for (value in exp_values) {
norm_values <- c(norm_values, value/norm_base)
}

#show normalized output
norm_values

#show that total is 100%
sum(norm_values)

##part 3: functional code####
layer_outputs <- matrix(c(4.8, 1.21, 2.385), ncol=3)
exp_values <- exp(layer_outputs)
norm_values <- exp_values / sum(exp_values)
norm_values

##part 4: ####
layer_outputs <- matrix(c(4.8, 1.21, 2.385,
8.9, -1.81, 0.2,
1.41, 1.051, 0.026), ncol = 3,
byrow = T)

layer_outputs
matrix(apply(layer_outputs, 1, max))
matrix(mapply("-", layer_outputs, apply(layer_outputs, 1, max)), nrow = ncol(layer_outputs))

exp_values <- exp(layer_outputs)
exp_values

sums_exp <- rowSums(exp_values)
sums_exp

#norm_values <- exp_values / sums_exp

matrix(mapply("/", exp_values, sums_exp), nrow = ncol(exp_values))

##part 5: implement in existing neural network from p005####
#activation function.
Activation_ReLU <- function(inputs){
return(pmax(inputs,0))
}

Activation_Softmax <- function(inputs){
exp_values <- exp(matrix(mapply("-", inputs, apply(inputs, 1, max)), ncol = ncol(inputs)))
probabilities <- matrix(mapply("/", exp_values, rowSums(exp_values)), ncol = ncol(exp_values))
return(probabilities)
}

Activation_Softmax(matrix(c(4,0,3,1,2,3), ncol = 2, byrow = T))

#define class
setClass("DenseLayer", slots = c(n_inputs = 'numeric', n_neurons = 'numeric'))

#set init (constructor)
setGeneric('init', function(layer) standardGeneric('init'))

#set method for init
setMethod('init', 'DenseLayer',
function(layer) {
n_weights <- layer@n_inputs * layer@n_neurons
weights <- matrix(rnorm(n_weights),
nrow = layer@n_inputs,
ncol = layer@n_neurons
)
attr(layer, 'weights') <- 0.10 * weights
attr(layer, 'biases') <- rep(0, layer@n_neurons)
layer
})

#set method for forward function
setGeneric('forward', function(layer, inputs) standardGeneric('forward'))
setMethod('forward', 'DenseLayer',
function(layer, inputs){
attr(layer, 'outputs') <- inputs %*% layer@weights + layer@biases
layer
})

#create wrapper for initializing layer object
LayerDense <- function(n_inputs, n_neurons){
init(new('DenseLayer', n_inputs=n_inputs, n_neurons=n_neurons))
}

#spiral dataset
##define spiral dataset with 4 classes and 200 examples each ####
N <- 100 #number of points per class
D <- 2 #number of dimensions
K <- 3 #number of classes
X <- data.frame() #data matrix
y <- data.frame() #class labels

set.seed(308) #set random seed for testing purposes

##creating dataset ####
for (j in (1:K)){
r <- seq(0.05, 1, length.out=N) #radius
t <- seq((j-1)*4.7,j*4.7, length.out=N) + rnorm(N,sd=0.3) #theta ??
Xtemp <- data.frame(x = r*sin(t), y= r*cos(t))
ytemp <- data.frame(matrix(j,N,1))
X <- rbind(X, Xtemp)
y <- rbind(y, ytemp)
}

data <- cbind(X, y)
colnames(data) <- c(colnames(X), 'label')
data

library(ggplot2)
ggplot() +
geom_point(data = data, aes(x=x, y=y, color=as.character(label)), size=2) +
scale_color_discrete(name = 'Label') + coord_fixed(ratio = 0.6) +
theme(axis.ticks=element_blank(), panel.grid.major = element_blank(), panel.grid.minor = element_blank(),
axis.text=element_blank(),legend.position = 'none')

#make 'prediction' (no feedback yet, just testing softmax function)
#create first layer
#input is 2, since we have 2 variables (x,y)
layer1 <- LayerDense(n_inputs = 2, n_neurons = 3)
layer1 <- forward(layer1, as.matrix(X))
layer1@outputs

#input is 3, since output of layer 1 is 3 neurons. n_neurons is 3 since we have three classes.
#ReLU activation is initialized here
layer2 <- LayerDense(n_inputs = 3, n_neurons = 3)
layer2 <- forward(layer2, Activation_ReLU(layer1@outputs))

#view first 5 output with softmax activation
head(Activation_Softmax(layer2@outputs))