-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathComplexModel.cs
113 lines (101 loc) · 3.51 KB
/
ComplexModel.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Backend
{
public class ComplexModel : Model
{
DirectedAcyclicGraph graph = new DirectedAcyclicGraph();
bool _inputLayerAdded = false;
bool _readyToTrain = false;
int _batchSize;
CostFunction _costFunction;
float _learningRate;
int _epochs;
public void AddInputLayer(InputLayer layer)
{
if (_inputLayerAdded == false)
{
graph.AddInputNode(layer);
_inputLayerAdded = true;
}
else
{
throw new ArgumentException("Input layer has already been added.");
}
}
public void AddLayer(Layer previousLayer, Layer layer)
{
graph.AddConnection(previousLayer, layer);
}
public override int GetInputSize()
{
// Redundancy for multiple input layers in tentative future development.
List<Layer> inputLayers = graph.GetStartNodes(graph.GetTopology());
return inputLayers[0].GetOutputSize();
}
public override int GetOutputSize()
{
return graph.GetTopologicalSort().Last().GetOutputSize();
}
public override float[,] ForwardPropagate(float[,] inputs)
{
if (_readyToTrain == false)
{
throw new Exception("Model must be compiled before training");
}
if (graph.GetEndNodes(graph.GetTopology()).Count > 1)
{
throw new Exception("Multiple output layers disallowed.");
}
List<Layer> order = graph.GetTopologicalSort();
// Sets up activation output on first layer (the input layer).
inputs = order[0].ForwardPass(inputs);
// Starts from 1 as we are excluding an explicit ForwardPass call on the first layer (the input layer).
for (int i = 1; i < order.Count; i++)
{
// Collects inputs from incoming layers.
List<Layer> incomingLayers = graph.GetIncomingNodes(graph.GetTopology(), order[i]);
List<float[,]> incomingInputs = new List<float[,]>();
foreach (Layer l in incomingLayers)
{
incomingInputs.Add(l.GetActivationOutput());
}
DenseLayer denseLayer = (DenseLayer)order[i];
// Updates activation output for layers as we traverse the graph.
float[,] output = denseLayer.ForwardPass(incomingInputs);
}
// Model output is the activation output of the final layer.
DenseLayer outputLayer = (DenseLayer)order.Last();
return outputLayer.GetActivationOutput();
}
public override void Compile(CostFunction costFunction)
{
_costFunction = costFunction;
_readyToTrain = true;
}
public Dictionary<Layer, List<Layer>> GetTopology()
{
return graph.GetTopology();
}
public int GetBatchSize()
{
return _batchSize;
}
public CostFunction GetCostFunction()
{
return _costFunction;
}
public float GetLearningRate()
{
return _learningRate;
}
public int GetEpochs()
{
return _epochs;
}
}
}