Skip to content

Commit

Permalink
Make activation functions replaceable (#18)
Browse files Browse the repository at this point in the history
  • Loading branch information
phannebohm authored Jul 5, 2024
1 parent 7f80aae commit f0d1bf9
Show file tree
Hide file tree
Showing 18 changed files with 57 additions and 50 deletions.
7 changes: 7 additions & 0 deletions NeuralNetwork/ActivationFunctions/ActivationFunction.mo
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
within NeuralNetwork.ActivationFunctions;

partial function ActivationFunction "All activation functions extend from this partial function"
extends Modelica.Icons.Function;
input Real u "Input of the function";
output Real y "Output of the function";
end ActivationFunction;
7 changes: 7 additions & 0 deletions NeuralNetwork/ActivationFunctions/Id.mo
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
within NeuralNetwork.ActivationFunctions;

function Id
extends ActivationFunction;
algorithm
y := u;
end Id;
4 changes: 1 addition & 3 deletions NeuralNetwork/ActivationFunctions/ReLu.mo
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
within NeuralNetwork.ActivationFunctions;

function ReLu
extends Modelica.Icons.Function;
input Real u "Input of the function";
output Real y "Output of the function";
extends ActivationFunction;
algorithm
y := max(0.0, u);
end ReLu;
6 changes: 2 additions & 4 deletions NeuralNetwork/ActivationFunctions/Sigmoid.mo
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
within NeuralNetwork.ActivationFunctions;

function Sigmoid
extends Modelica.Icons.Function;
input Real u "Input of the function";
output Real y "Output of the function";
extends ActivationFunction;
algorithm
y := 1 / (1 + Modelica.Math.exp(-u));
y := 1 / (1 + exp(-u));
end Sigmoid;
7 changes: 7 additions & 0 deletions NeuralNetwork/ActivationFunctions/Softplus.mo
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
within NeuralNetwork.ActivationFunctions;

function Softplus
extends ActivationFunction;
algorithm
y := log(1 + exp(u));
end Softplus;
7 changes: 7 additions & 0 deletions NeuralNetwork/ActivationFunctions/Step.mo
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
within NeuralNetwork.ActivationFunctions;

function Step
extends ActivationFunction;
algorithm
y := if u < 0 then 0 else 1;
end Step;
7 changes: 7 additions & 0 deletions NeuralNetwork/ActivationFunctions/Tanh.mo
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
within NeuralNetwork.ActivationFunctions;

function Tanh
extends ActivationFunction;
algorithm
y := tanh(u);
end Tanh;
15 changes: 0 additions & 15 deletions NeuralNetwork/ActivationFunctions/activationFunction.mo

This file was deleted.

4 changes: 3 additions & 1 deletion NeuralNetwork/ActivationFunctions/package.mo
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
within NeuralNetwork;

package ActivationFunctions
extends Modelica.Icons.FunctionsPackage;
extends Modelica.Icons.Package;
annotation(
Icon(graphics = {Line(points = {{-80, -80}, {-47.8, -78.7}, {-35.8, -75.7}, {-27.7, -70.6}, {-22.1, -64.2}, {-17.3, -55.9}, {-12.5, -44.3}, {-7.64, -29.2}, {-1.21, -4.82}, {6.83, 26.3}, {11.7, 42}, {16.5, 54.2}, {21.3, 63.1}, {26.9, 69.9}, {34.2, 75}, {45.4, 78.4}, {72, 79.9}, {80, 80}}, thickness = 1, smooth = Smooth.Bezier)}));
end ActivationFunctions;
8 changes: 6 additions & 2 deletions NeuralNetwork/ActivationFunctions/package.order
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
ReLu
ActivationFunction
Id
Step
Sigmoid
activationFunction
Tanh
ReLu
Softplus
5 changes: 2 additions & 3 deletions NeuralNetwork/Examples/Utilities/NARX_Network.mo

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions NeuralNetwork/Examples/Utilities/PolynomNetwork.mo
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ block PolynomNetwork "Neural Network approximating y = u*u + 0.5*u - 2.0 on inte
numNeurons = 2,
weights = layer_1_weights,
bias = layer_1_bias,
f = NeuralNetwork.Types.ActivationFunction.Hyperbolic_tangent
redeclare function f = NeuralNetwork.ActivationFunctions.Tanh
) annotation(
Placement(transformation(origin = {-66, 0}, extent = {{-30, -30}, {30, 30}})));
Layer.Output outputLayer(
Expand All @@ -17,7 +17,7 @@ block PolynomNetwork "Neural Network approximating y = u*u + 0.5*u - 2.0 on inte
bias = layer_2_bias
) annotation(
Placement(transformation(origin = {50, 0}, extent = {{-30, -30}, {30, 30}})));

parameter Real[2,1] layer_1_weights = {{-0.95248}, {-0.943175}};
parameter Real[2] layer_1_bias = {0.872633, -0.949252};
parameter Real[1,2] layer_2_weights = {{-2.25385, 1.40389}};
Expand All @@ -30,7 +30,7 @@ equation
annotation(
Documentation(info = "<html><head></head><body>
<h2>Training</h2><p>
Neural network parameters trained in
Neural network parameters trained in
<a href=\"https://github.com/AMIT-HSBI/NeuralNetwork/blob/main/Example/HelloWorld.ipynb\">HelloWorld.ipynb</a>.
</p><p>Trained with TensorFlow on 8000 data points from interval [-1,1].</p>
</body></html>"));
Expand Down
6 changes: 3 additions & 3 deletions NeuralNetwork/Layer/Hidden.mo
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@ within NeuralNetwork.Layer;

block Hidden "Block for a dense neural network layer"
extends NeuralNetwork.Layer.Interfaces.Layer(lineColor={0, 170, 255});
parameter NeuralNetwork.Types.ActivationFunction f = NeuralNetwork.Types.ActivationFunction.ReLu "Activation function of the layer";
replaceable function f = NeuralNetwork.ActivationFunctions.ActivationFunction "Activation function of the layer";
equation
y = NeuralNetwork.ActivationFunctions.activationFunction(f, weights * u + bias);
y = f(weights * u + bias);

annotation(
Documentation(info = "<html><head></head><body>
<p>
Dense hidden layer:</p><p>y = f(weights*u + bias)</p>
<h2>Parametrization</h2><p>
Set <strong>bias</strong> and <strong>weights</strong> to parametrize layer.</p><p><b>numInputs</b> specify number of input nodes and <b>numNeurons</b> number of neurons in this layer.</p><p>Activation function <b>f</b>. Available options:</p><p></p><ul><li>ReLu [default]: <b>Types.ActivationFunction.ReLu</b></li><li>Sigmoid:&nbsp;<b><b>Types.</b>ActivationFunction.Sigmoid</b></li><li>tanh:&nbsp;<b><b>Types.</b>ActivationFunction.tanh</b></li></ul>
Set <strong>bias</strong> and <strong>weights</strong> to parametrize layer.</p><p><b>numInputs</b> specify number of input nodes and <b>numNeurons</b> number of neurons in this layer.</p><p>Activation function <b>f</b>. Extend from NeuralNetwork.ActivationFunctions.ActivationFunction.
</body></html>"));
end Hidden;
4 changes: 2 additions & 2 deletions NeuralNetwork/Layer/Input.mo
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ within NeuralNetwork.Layer;

block Input "Block for a dense input neural network layer with optional normalization"
extends NeuralNetwork.Layer.Interfaces.Layer(lineColor={212, 0, 0});
parameter NeuralNetwork.Types.ActivationFunction f = NeuralNetwork.Types.ActivationFunction.ReLu "Activation function of the layer";
replaceable function f = NeuralNetwork.ActivationFunctions.ActivationFunction "Activation function of the layer";
// PCA
parameter Boolean pca = false "Reducing the feature space based on a PCA";
parameter Integer dimFeatures = numInputs "Dimension of the new feature space";
Expand Down Expand Up @@ -40,7 +40,7 @@ equation
end if;

// Activation function
y = NeuralNetwork.ActivationFunctions.activationFunction(f, weights * uu + bias);
y = f(weights * uu + bias);

annotation(
Documentation(info = "<html><head></head><body>
Expand Down
7 changes: 0 additions & 7 deletions NeuralNetwork/Types/ActivationFunction.mo

This file was deleted.

5 changes: 0 additions & 5 deletions NeuralNetwork/Types/package.mo

This file was deleted.

1 change: 0 additions & 1 deletion NeuralNetwork/Types/package.order

This file was deleted.

1 change: 0 additions & 1 deletion NeuralNetwork/package.order
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
Layer
ActivationFunctions
Types
Networks
Examples

0 comments on commit f0d1bf9

Please sign in to comment.