Skip to content

Commit

Permalink
Refactoring package.mo into directories (#14)
Browse files Browse the repository at this point in the history
* Refactoring package.mo into directories
* Refactoring code
  * Fixing white space
  * Adding html documentation to root package.mo
  • Loading branch information
AnHeuermann authored Feb 13, 2024
1 parent 0764efa commit 92abd52
Show file tree
Hide file tree
Showing 45 changed files with 416 additions and 354 deletions.
5 changes: 5 additions & 0 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Each line is a file pattern followed by one or more owners.

# License changes
*license* @bernhardbachmann
*LICENSE* @bernhardbachmann
1 change: 0 additions & 1 deletion .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ jobs:
omc
libraries: |
Modelica 4.0.0
ExternData 2.6.1
omc-diff: true

- name: openmodelica-library-testing
Expand Down

Large diffs are not rendered by default.

File renamed without changes.
File renamed without changes.
File renamed without changes.

Large diffs are not rendered by default.

39 changes: 39 additions & 0 deletions Example_Models/NARX_MISO/Model/NARX_MISO.mo

Large diffs are not rendered by default.

File renamed without changes.
File renamed without changes.
File renamed without changes.

Large diffs are not rendered by default.

29 changes: 29 additions & 0 deletions Example_Models/NARX_PCA/Model/NARX_PCA.mo

Large diffs are not rendered by default.

File renamed without changes.
File renamed without changes.
File renamed without changes.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,13 @@ model Feedforward_Network "Simulation of the simple network to demonstrate how t
Placement(visible = true, transformation(origin = {-70, 50}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
Modelica.Blocks.Sources.Constant const(k = 5) annotation(
Placement(visible = true, transformation(origin = {-70, -50}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
Network.Tree_Layer_Neural_Network tree_Layer_Neural_Network annotation(
Network.Tree_Layer_Neural_Network tree_Layer_Neural_Network annotation(
Placement(visible = true, transformation(origin = {34, 0}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
equation
connect(ramp.y, mux.u[1]) annotation(
Line(points = {{-58, 50}, {-40, 50}, {-40, 0}, {-18, 0}}, color = {0, 0, 127}));
connect(const.y, mux.u[2]) annotation(
Line(points = {{-58, -50}, {-40, -50}, {-40, 0}, {-18, 0}}, color = {0, 0, 127}));
connect(mux.y, tree_Layer_Neural_Network.u) annotation(
connect(mux.y, tree_Layer_Neural_Network.u) annotation(
Line(points = {{4, 0}, {22, 0}}, color = {0, 0, 127}, thickness = 0.5));
protected
end Feedforward_Network;
44 changes: 44 additions & 0 deletions Example_Models/Simple_Network/Network/Tree_Layer_Neural_Network.mo
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
block Tree_Layer_Neural_Network "This block models a very simple three layer Neural Network and should demonstrate the usage of the library"
// Parameter of Input Layer
parameter Real FirstLayer_bias[3] = {0, 0, 0} "Bias vector of the first layer";
parameter Real FirstLayer_weights[3, 2] = [1, 0; 0, 1; 0, 0] "Weight matrix of the first layer";
parameter Integer FirstLayer_numInputs = 2 "Specification of the input dimension of the neural network";
parameter Integer FirstLayer_numNeurons = 3 "Number of neurons in the first layer";
parameter Boolean FirstLayer_pca = false "Reducing the feature space based on a PCA";
parameter Boolean FirstLayer_scale = false "Scale the inputs with min-max normalization";
parameter Boolean FirstLayer_standardization = false "Scale the inputs with mu-sigma standardization";

// Parameter of First Hidden Layer
parameter Real SecondLayer_bias[4] = {0, 0, 0, 0} "Bias vector of the second layer";
parameter Real SecondLayer_weights[4, 3] = [1, 0, 0; 0, 1, 0; 0, 0, 0; 0, 0, 0] "Weight matrix of the second layer";
parameter Integer SecondLayer_numInputs = FirstLayer_numNeurons "Specification of the inputs of the second layer";
parameter Integer SecondLayer_numNeurons = 4 "Number of neurons in the second layer";
// Set the Activation Function

parameter Neural_Network.Activation_Function.Type_Activation_Function ActivationFunction = Neural_Network.Activation_Function.ReLu "Activation function of the Input and Hidden layers";

// Parameter of Output Layer
parameter Real ThirdLayer_bias[1] = {0} "Bias vector of the third layer";
parameter Real ThirdLayer_weights[1, 4] = [1, 1, 0, 0] "Weight matrix of the third layer";
parameter Integer ThirdLayer_numInputs = SecondLayer_numNeurons "Specification of the inputs of the third layer";
parameter Integer ThirdLayer_numNeurons = 1 "Number of neurons in the third layer";
parameter Boolean ThirdLayer_rescale = true "Rescale the outputs with min-max denormalization";
parameter Real ThirdLayer_max[1] = {5.0} "Maximum of the training targets";
parameter Real ThirdLayer_min[1] = {0.0} "Minimum of the training targets";
parameter Boolean ThirdLayer_destandardization = false "Destandardization of the outputs with mu-sigma";
parameter Real ThirdLayer_mean[1] = {0.0} "Default value";
parameter Real ThirdLayer_std[1] = {1.0} "Default value";

// Initilize the Layers of the neural network with the parameter that were defined above
Neural_Network.Layer.Input_Hidden_Layer First_Layer(bias = FirstLayer_bias, weights = FirstLayer_weights, NeuronActivation_Function = ActivationFunction, numInputs = FirstLayer_numInputs, numNeurons = FirstLayer_numNeurons, pca = FirstLayer_pca, scale = FirstLayer_scale, max = {1.0}, min = {0.0}, standardization = FirstLayer_standardization, mean = {0.0}, std = {1.0});
Neural_Network.Layer.Dense_Hidden_Layer Second_Layer(bias = SecondLayer_bias, weights = SecondLayer_weights, NeuronActivation_Function = ActivationFunction, numInputs = SecondLayer_numInputs, numNeurons = SecondLayer_numNeurons);
Neural_Network.Layer.Output_Hidden_Layer Third_Layer(bias = ThirdLayer_bias, weights = ThirdLayer_weights, numInputs = ThirdLayer_numInputs, numNeurons = ThirdLayer_numNeurons, rescale = ThirdLayer_rescale, max = ThirdLayer_max, min = ThirdLayer_min, destandardization = ThirdLayer_destandardization, mean = ThirdLayer_mean, std = ThirdLayer_std);

// Treat Neural Network as multiple Input - multiple Output Block
extends Modelica.Blocks.Interfaces.MIMO(final nin = FirstLayer_numInputs, final nout = ThirdLayer_numNeurons);
equation
connect(u, First_Layer.u);
connect(First_Layer.y, Second_Layer.u);
connect(Second_Layer.y, Third_Layer.u);
connect(Third_Layer.y, y);
end Tree_Layer_Neural_Network;
10 changes: 10 additions & 0 deletions NeuralNetwork/Activation_Function/Type_Activation_Function.mo
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
within NeuralNetwork.Activation_Function;

type Type_Activation_Function "Temporary type of ActivationFunction with choices for menus (until enumerations are available)"
extends Integer;
annotation(
choices(
choice = NeuralNetwork.ActivationFunction.Type_Activation_Function.ReLu "ReLu activation function",
choice = NeuralNetwork.ActivationFunction.Type_Activation_Function.Sigmoid "Sigmoid activation function",
choise = NeuralNetwork.ActivationFunction.Type_Activation_Function.Hyperbolic_tangent "Tanh activation function"));
end Type_Activation_Function;
7 changes: 7 additions & 0 deletions NeuralNetwork/Activation_Function/package.mo
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
within NeuralNetwork;

package Activation_Function
constant Integer ReLu = 0;
constant Integer Sigmoid = 1;
constant Integer Hyperbolic_tangent = 2;
end Activation_Function;
4 changes: 4 additions & 0 deletions NeuralNetwork/Activation_Function/package.order
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
ReLu
Sigmoid
Hyperbolic_tangent
Type_Activation_Function
39 changes: 0 additions & 39 deletions NeuralNetwork/Example_Models/NARX_MISO/Model/NARX_MISO.mo

This file was deleted.

31 changes: 0 additions & 31 deletions NeuralNetwork/Example_Models/NARX_PCA/Model/NARX_PCA.mo

This file was deleted.

This file was deleted.

Loading

0 comments on commit 92abd52

Please sign in to comment.