From 35e0a4cfe32448b65d57a579b00f718492cc9a25 Mon Sep 17 00:00:00 2001 From: Ping He Date: Sat, 6 Apr 2024 17:11:52 -0500 Subject: [PATCH] Added radialBasisFunc and ReLU for regModel (#619) * Added the ReLU activation func. * Added the radialBasisFunc for regModel. --- src/adjoint/DARegression/DARegression.C | 71 ++++++++++++++++++++++++- src/adjoint/DARegression/DARegression.H | 6 +++ 2 files changed, 75 insertions(+), 2 deletions(-) diff --git a/src/adjoint/DARegression/DARegression.C b/src/adjoint/DARegression/DARegression.C index a6adfc78..fac14465 100755 --- a/src/adjoint/DARegression/DARegression.C +++ b/src/adjoint/DARegression/DARegression.C @@ -49,6 +49,18 @@ DARegression::DARegression( { regSubDict.readEntry("hiddenLayerNeurons", hiddenLayerNeurons_); regSubDict.readEntry("activationFunction", activationFunction_); + if (activationFunction_ == "ReLU") + { + leakyCoeff_ = regSubDict.lookupOrDefault("leakyCoeff", 0.0); + } + } + else if (modelType_ == "radialBasisFunction") + { + nRBFs_ = regSubDict.getLabel("nRBFs"); + } + else + { + FatalErrorIn("") << "modelType_: " << modelType_ << " not supported. Options are: neuralNetwork and radialBasisFunction" << abort(FatalError); } // initialize parameters and give it large values @@ -323,9 +335,16 @@ label DARegression::compute() { layerVals[layerI][neuronI] = (1 - exp(-2 * layerVals[layerI][neuronI])) / (1 + exp(-2 * layerVals[layerI][neuronI])); } + else if (activationFunction_ == "ReLU") + { + if (layerVals[layerI][neuronI] < 0) + { + layerVals[layerI][neuronI] = leakyCoeff_ * layerVals[layerI][neuronI]; + } + } else { - FatalErrorIn("") << "activationFunction not valid. Options are: sigmoid and tanh" << abort(FatalError); + FatalErrorIn("") << "activationFunction not valid. Options are: sigmoid, tanh, and ReLU" << abort(FatalError); } } } @@ -350,9 +369,44 @@ label DARegression::compute() outputField.correctBoundaryConditions(); } + else if (modelType_ == "radialBasisFunction") + { + List> inputFields; + inputFields.setSize(inputNames_.size()); + + this->calcInput(inputFields); + + label nInputs = inputNames_.size(); + + // increment of the parameters for each RBF basis + label dP = 2 * nInputs + 1; + + forAll(mesh_.cells(), cellI) + { + scalar outputVal = 0.0; + for (label i = 0; i < nRBFs_; i++) + { + scalar expCoeff = 0.0; + for (label j = 0; j < nInputs; j++) + { + scalar A = (inputFields[j][cellI] - parameters_[dP * i + 2 * j]) * (inputFields[j][cellI] - parameters_[dP * i + 2 * j]); + scalar B = 2 * parameters_[dP * i + 2 * j + 1] * parameters_[dP * i + 2 * j + 1]; + expCoeff += A / B; + } + outputVal += parameters_[(dP + 1) * i + dP] * exp(-expCoeff); + } + + outputField[cellI] = outputScale_ * (outputVal + outputShift_); + } + + // check if the output values are valid otherwise fix/bound them + fail = this->checkOutput(outputField); + + outputField.correctBoundaryConditions(); + } else { - FatalErrorIn("") << "modelType_: " << modelType_ << " not supported. Options are: neuralNetwork" << abort(FatalError); + FatalErrorIn("") << "modelType_: " << modelType_ << " not supported. Options are: neuralNetwork and radialBasisFunction" << abort(FatalError); } return fail; @@ -397,6 +451,19 @@ label DARegression::nParameters() return nParameters; } + else if (modelType_ == "radialBasisFunction") + { + label nInputs = inputNames_.size(); + + // each RBF has a weight, nInputs mean, and nInputs std + label nParameters = nRBFs_ * (2 * nInputs + 1); + + return nParameters; + } + else + { + FatalErrorIn("") << "modelType_: " << modelType_ << " not supported. Options are: neuralNetwork and radialBasisFunction" << abort(FatalError); + } } label DARegression::checkOutput(volScalarField& outputField) diff --git a/src/adjoint/DARegression/DARegression.H b/src/adjoint/DARegression/DARegression.H index 7c8b4df4..f3b11969 100755 --- a/src/adjoint/DARegression/DARegression.H +++ b/src/adjoint/DARegression/DARegression.H @@ -81,6 +81,9 @@ protected: /// neural network activation function word activationFunction_; + /// if the ReLU activation function is used we can prescribe a potentially leaky coefficient + scalar leakyCoeff_ = 0.0; + /// the upper bound for the output scalar outputUpperBound_; @@ -93,6 +96,9 @@ protected: /// default output values scalar defaultOutputValue_; + /// number of radial basis function + label nRBFs_; + public: /// Constructors DARegression(