diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..762aab4 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,14 @@ +# Set default behaviour, in case users don't have core.autocrlf set. +* text=auto + +# Explicitly declare text files we want to always be normalized and converted +# to native line endings on checkout. +*.c text +*.h text +*.mo text +*.mos text +*.order text + +# Denote all files that are truly binary and should not be modified. +*.png binary +*.jpg binary diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ad2d65e --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ +*.mat + +*~ + +buildlog.txt + +dsfinal.txt + +dsin.txt + +dslog.txt + +dsmodelext1.c + +dsmodel.c + +dymosim.exe + +dymosim.exp + +dymosim.lib diff --git a/NeuralNetwork 1.0/Icons/NN-Symbol.png b/NeuralNetwork 1.0/Icons/NN-Symbol.png new file mode 100644 index 0000000..fe5398d Binary files /dev/null and b/NeuralNetwork 1.0/Icons/NN-Symbol.png differ diff --git a/NeuralNetwork 1.0/Icons/NNlinear-Symbol.png b/NeuralNetwork 1.0/Icons/NNlinear-Symbol.png new file mode 100644 index 0000000..121f50c Binary files /dev/null and b/NeuralNetwork 1.0/Icons/NNlinear-Symbol.png differ diff --git a/NeuralNetwork 1.0/Icons/NNradial-Symbol.png b/NeuralNetwork 1.0/Icons/NNradial-Symbol.png new file mode 100644 index 0000000..7590302 Binary files /dev/null and b/NeuralNetwork 1.0/Icons/NNradial-Symbol.png differ diff --git a/NeuralNetwork 1.0/Utilities/extractData.m b/NeuralNetwork 1.0/Utilities/extractData.m new file mode 100644 index 0000000..70d6ca1 --- /dev/null +++ b/NeuralNetwork 1.0/Utilities/extractData.m @@ -0,0 +1,131 @@ +%EXTRACTDATA Create parameters code for Dymola NeuralNetwork Library. +% +% EXTRACTDATA(FILENAME,LAYERNAME,MATRIXWEIGHT,MATRIXBIAS,ACTFUN,RECURRENT) +% creates a file named as FILENAME, in which is created the code for a +% NeuronNetwork layer. +% +% If the NewronNetworkLayer is used the LAYERNAME has to be equal to +% 'NoLayerName', otherwise as to be equal to the internal layer which +% parameters are referred. +% +% If the script is used for a recurrent layer the parameter RECURRENT has +% to be equal to the number of non-recurrent inputs to the layer. For +% example, if the layer has 1 non-recurrent input and 5 neurons then the +% all input to the layer will be 6, but RECURRENT has to be equal to 1. + +% ACTFUN can be equal to: +% 'lin' for PureLin NeuralNetwork Activation Function +% 'tan' for TanSig NeuralNetwork Activation Function +% 'log' for LogSig NeuralNetwork Activation Function +% 'rad' for RadBas NeuralNetwork Activation Function +% 'noFun' is the activation function has not to be specified +% +% Made by Fabio Codecą (http://www.elet.polimi.it/people/codeca) + + + +function extractData(fileName,layerName,matrixWeight,matrixBias,actFun,recurrent) + +if (strcmp('NoLayerName',layerName)) + layerName=''; +else + layerName=strcat(layerName,'_'); +end + +fid = fopen(fileName,'w'); +Y = [matrixWeight]; +Z = [matrixBias]; +[numRow, numCol] = size(Y) + +if nargin == 5 + numInput = numCol; +else + numInput = recurrent; +end + +fprintf(fid,strcat(layerName,'numNeurons = ',num2str(numRow),', \n')); +fprintf(fid,strcat(layerName,'numInputs = ',num2str(numInput),', \n')); + +fprintf(fid,strcat(layerName,'weightTable = [')); +% j row +% i column +for j=1:numRow + if (numCol==1) + if (numRow == 1) + fprintf(fid,'%8.8f ], \n',Y(j,1)); + elseif (j == 1) + fprintf(fid,'%8.8f ; \n',Y(j,1)); + elseif (j < numRow) + fprintf(fid,'\t %8.8f ; \n',Y(j,1)); + else + fprintf(fid,'\t %8.8f ],\n',Y(j,1)); + end + else + for i=1:numCol + if (j > 1 && i == 1) + fprintf(fid,'\t %8.8f ,',Y(j,i)); + elseif (i==numCol) + if (j < numRow) + fprintf(fid,'%8.8f ; \n',Y(j,i)); + else + fprintf(fid,'%8.8f ],\n',Y(j,i)); + end + else + fprintf(fid,'%8.8f ,',Y(j,i)); + end + end + end +end + +numCol=1; +fprintf(fid,strcat(layerName,'biasTable = [')); +for j=1:numRow + if (numCol==1) + if (numRow == 1) + fprintf(fid,'%8.8f ]',Z(j,1)); + elseif (j == 1) + fprintf(fid,'%8.8f ; \n',Z(j,1)); + elseif (j < numRow) + fprintf(fid,'\t %8.8f ; \n',Z(j,1)); + else + fprintf(fid,'\t %8.8f ]',Z(j,1)); + end + else + for i=1:numCol + if (j > 1 && i == 1) + fprintf(fid,'\t %8.8f ,',Z(j,i)); + elseif (i==numCol) + if (j < numRow) + fprintf(fid,'%8.8f ; \n',Z(j,i)); + else + fprintf(fid,'%8.8f ]',Z(j,i)); + end + else + fprintf(fid,'%8.8f ,',Z(j,i)); + end + end + end +end + + +printFun=1; + +if (strcmp(actFun,'lin')) + fun='PureLin'; +elseif (strcmp(actFun,'tan')) + fun='TanSig'; +elseif (strcmp(actFun,'log')) + fun='LogSig'; +elseif (strcmp(actFun,'rad')) + fun='RadBas'; +elseif (strcmp(actFun,'noFun')) + printFun = 0; +else + error('Activation Function Name is incorrect'); +end + +if(printFun) + fprintf(fid,strcat(', \n',layerName,'NeuronActivationFunction=NeuralNetwork.Types.ActivationFunction.',fun)); +end + +fclose(fid); diff --git a/NeuralNetwork 1.0/package.mo b/NeuralNetwork 1.0/package.mo new file mode 100644 index 0000000..a5c91fe --- /dev/null +++ b/NeuralNetwork 1.0/package.mo @@ -0,0 +1,1714 @@ +package NeuralNetwork + annotation (uses(Modelica(version="2.2")), Documentation(info=" + +This is the main package of the NeuralNetwork library. + +

Release Notes:

+ +")); + + package BaseClasses + "The basic element of the NeuralNetwork library is modeled" + block NeuralNetworkLayer + "This is the basic model for a neural network layer" + + parameter Integer numNeurons=1 + "It specifies the number of neurons which compose the laye" + annotation(Dialog(group="Layer Data Definition")); + parameter Integer numInputs=1 + "It specifies the number of inputs of the layer " + annotation(Dialog(group="Layer Data Definition")); + parameter Real weightTable[:,:] = [0, 0; 0, 0] + "It is the weight table of the layer " annotation(Dialog(group="Layer Data Definition")); + parameter Real biasTable[:,:] = [0, 0] + "It is the bias table of the layer" annotation(Dialog(group="Layer Data Definition")); + parameter NeuralNetwork.Types.ActivationFunction.Temp + NeuronActivationFunction = NeuralNetwork.Types.ActivationFunction.TanSig + "It is the activation function of the layer" + annotation(Dialog(group="Layer Data Definition")); + + protected + extends Modelica.Blocks.Interfaces.MIMO(final nin=numInputs,final nout=numNeurons); + + equation + if (NeuronActivationFunction == NeuralNetwork.Types.ActivationFunction.PureLin) then + y = weightTable * u + biasTable[:,1]; + elseif (NeuronActivationFunction == NeuralNetwork.Types.ActivationFunction.TanSig) then + y = Modelica.Math.tanh(weightTable * u + biasTable[:,1]); + elseif (NeuronActivationFunction == NeuralNetwork.Types.ActivationFunction.LogSig) then + y= NeuralNetwork.Utilities.LogSig(weightTable * u + biasTable[:,1]); + elseif (NeuronActivationFunction == NeuralNetwork.Types.ActivationFunction.RadBas) then + y= vector(NeuralNetwork.Utilities.RadBas(matrix(NeuralNetwork.Utilities.ElementWiseProduct(matrix(NeuralNetwork.Utilities.Dist(weightTable,matrix(u))),matrix(biasTable))))); + end if; + + annotation (Icon(Bitmap(extent=[-100,94; 100,-98], name="Icons/NN-Symbol.png")), + Diagram(Text( + extent=[-102,118; 102,62], + style(color=3, rgbcolor={0,0,255}), + string="NeuralNetworkLayer"), + Bitmap(extent=[-112,78; 114,-96], name="Icons/NN-Symbol.png"), + Rectangle(extent=[-100,100; 100,-100], style( + color=3, + rgbcolor={0,0,255}, + thickness=4))), + DymolaStoredErrors, + Documentation(info=" + +

+This block models a Neural Network Layer. +

+ +

+A Neural Network Layer is specified by the following parameters +