summaryrefslogtreecommitdiffstats
path: root/Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/MLP.cpp
diff options
context:
space:
mode:
authorSven Eisenhauer <sven@sven-eisenhauer.net>2023-11-10 15:11:48 +0100
committerSven Eisenhauer <sven@sven-eisenhauer.net>2023-11-10 15:11:48 +0100
commit33613a85afc4b1481367fbe92a17ee59c240250b (patch)
tree670b842326116b376b505ec2263878912fca97e2 /Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/MLP.cpp
downloadStudium-33613a85afc4b1481367fbe92a17ee59c240250b.tar.gz
Studium-33613a85afc4b1481367fbe92a17ee59c240250b.tar.bz2
add new repoHEADmaster
Diffstat (limited to 'Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/MLP.cpp')
-rw-r--r--Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/MLP.cpp312
1 files changed, 312 insertions, 0 deletions
diff --git a/Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/MLP.cpp b/Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/MLP.cpp
new file mode 100644
index 0000000..e7abe3b
--- /dev/null
+++ b/Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/MLP.cpp
@@ -0,0 +1,312 @@
+/*
+ * MLP.cpp
+ *
+ * Created on: 09.06.2011
+ * Author: sven
+ */
+#include <boost/foreach.hpp>
+#include <boost/cstdlib.hpp>
+#include <iostream>
+#include <iomanip>
+#include <cmath>
+#include <limits>
+#include <sstream>
+#include "MLP.h"
+
+MLP::MLP(const MLPConfig& mlpConf)
+:mStartHidden(1+mlpConf.getNumInputNeurons()),mStartOutput(mStartHidden+mlpConf.getNumHiddenNeurons())
+,mNoNeuron(mStartOutput + mlpConf.getNumOutputNeurons())
+,mActivity(mNoNeuron), mDelta(mNoNeuron)
+,mMomentum(mlpConf.getMomentum())
+,mLernrate(mlpConf.getLernrate()),mConfig(mlpConf),mTrainSuccess(false),mDoStop(false)
+{
+ mWeights = mlpConf.getWeights();
+ for (uint32_t i = 0; i < mNoNeuron ; i++) {
+ mDeltaWeights.push_back(vector<double>(mNoNeuron));
+ mOldUpdate.push_back(vector<double>(mNoNeuron));
+ }
+ // Bias neuron is always on
+ mActivity.at(0) = 1.0;
+}
+
+MLP::~MLP()
+{
+
+}
+
+void MLP::dump()
+{
+ int width = 12;
+ int prec = 8;
+ cout << "============ MLP config==========" << endl;
+ cout << "start hidden: " << mStartHidden << endl;
+ cout << "start output: " << mStartOutput << endl;
+ cout << "no neuron: " << mNoNeuron << endl;
+
+ cout << "Weights:" << endl;
+ BOOST_FOREACH(vector<double> i, mWeights)
+ {
+ BOOST_FOREACH(double w, i)
+ {
+ cout << setw(width) << fixed << setprecision(prec) << w;
+ }
+ cout << endl;
+ }
+ cout << "Deltaweights:" << endl;
+ BOOST_FOREACH(vector<double> i, mDeltaWeights)
+ {
+ BOOST_FOREACH(double w, i)
+ {
+ cout << setw(width) << setprecision(prec) << w;
+ }
+ cout << endl;
+ }
+ cout << "OldUpdate:" << endl;
+ BOOST_FOREACH(vector<double> i, mOldUpdate)
+ {
+ BOOST_FOREACH(double w, i)
+ {
+ cout << setw(width) << setprecision(prec) << w;
+ }
+ cout << endl;
+ }
+ cout << "Activity:" << endl;
+ BOOST_FOREACH(double w, mActivity)
+ {
+ cout << setw(width) << setprecision(prec) << w;
+ }
+ cout << endl;
+ cout << "=================================" << endl;
+}
+
+double MLP::sigmoid(const double& a)
+{
+ double res = 0.0;
+ res = 1.0 / (1.0 + exp(-a) );
+ return res;
+}
+
+void MLP::propagate(const Pattern& pattern)
+{
+ uint32_t i,j;
+ double activation = 0.0;
+ if (pattern.size() != (mStartHidden - 1) ) {
+ cerr << "Pattern does not match input neurons: " << pattern.size() << " != " << (mStartHidden - 1) << endl;
+ exit(1);
+ }
+ for (i=1 ; i < mStartHidden ; i++) {
+ mActivity.at(i) = pattern.at(i - 1);
+ }
+ for (i=mStartHidden ; i<mStartOutput ; i++) {
+ activation = mActivity.at(0) * mWeights.at(0).at(i);
+ for (j=1 ; j<mStartHidden ; j++) {
+ activation += mActivity.at(j) * mWeights.at(j).at(i);
+ }
+ mActivity.at(i) = sigmoid(activation);
+ }
+ for (i=mStartOutput ; i < mNoNeuron ; i++) {
+ activation = mActivity.at(0) * mWeights.at(0).at(i);
+ for (j=mStartHidden ; j<mStartOutput ; j++) {
+ activation += mActivity.at(j) * mWeights.at(j).at(i);
+ }
+ mActivity.at(i) = sigmoid(activation);
+ }
+}
+void MLP::back_propagate(const Target& target)
+{
+ uint32_t i,j;
+ // injected error
+ for (i=mStartOutput ; i<mNoNeuron ; i++ ) {
+ mDelta.at(i) = (-1.0) * (target.at( i-mStartOutput ) - mActivity.at(i) );
+ mDelta.at(i) *= mActivity.at(i) * ( 1.0 - mActivity.at(i));
+ }
+
+ // implicit error
+ for (i=mStartHidden ; i<mStartOutput ; i++) {
+ for ( j=mStartOutput , mDelta.at(i)=0 ; j<mNoNeuron ; j++) {
+ mDelta.at(i) += mWeights.at(i).at(j) * mDelta.at(j);
+ }
+ mDelta.at(i) *= mActivity.at(i) * ( 1.0 - mActivity.at(i) );
+ }
+
+ // bias weight deltas
+ for (j=mStartHidden ; j<mNoNeuron ; j++) {
+ mDeltaWeights.at(0).at(j) += (mActivity.at(0) * mDelta.at(j));
+ }
+
+ // input to hidden weight deltas
+ for (i=1 ; i<mStartHidden ; i++) {
+ for(j=mStartHidden;j < mStartOutput;j++) {
+ mDeltaWeights.at(i).at(j) += (mActivity.at(i) * mDelta.at(j));
+ }
+ }
+
+ // hidden to output weight deltas
+ for(i=mStartHidden;i < mStartOutput;i++) {
+ for(j=mStartOutput;j < mNoNeuron;j++) {
+ mDeltaWeights.at(i).at(j) += (mActivity.at(i) * mDelta.at(j));
+ }
+ }
+}
+
+void MLP::update_weight()
+{
+ uint32_t i,j;
+ double update;
+ mOldWeights = mWeights;
+ for(i=0;i < mNoNeuron;i++) {
+ for(j=0;j < mNoNeuron;j++) {
+ update = (-1.) * mLernrate * mDeltaWeights.at(i).at(j);
+ update += mMomentum * mOldUpdate.at(i).at(j);
+ mWeights.at(i).at(j) += update;
+ mOldUpdate.at(i).at(j) = update;
+ mDeltaWeights.at(i).at(j) = 0.0;
+ }
+ }
+}
+
+void MLP::reset_delta()
+{
+ for (uint32_t i=0 ; i < mNoNeuron ; i++) {
+ mDelta.at(i) = 0.0;
+ }
+}
+
+void MLP::train(const Traindata & traindata, const Traindata& valdata, const uint32_t numCycles, const std::string& outputfilename)
+{
+ Traindata::const_iterator it;
+ double error = numeric_limits<double>::infinity();
+ for (uint32_t i = 0; i < numCycles ; i++) {
+ it = traindata.begin();
+ for( ; it != traindata.end() ; it++) {
+ propagate((*it).mPattern);
+ back_propagate((*it).mTarget);
+ if (mConfig.getUpdateMode() == MLPConfig::UPDATE_MODE_SINGLE) {
+ update_weight();
+ reset_delta();
+ }
+ if (isStop()) {
+ ostringstream oss;
+ oss << "mlp_lastconfig_" << mOldError << ".txt";
+ writeWeightsToFile(oss.str(),mOldWeights);
+ return;
+ }
+ }
+ if (mConfig.getUpdateMode() == MLPConfig::UPDATE_MODE_BATCH) {
+ update_weight();
+ reset_delta();
+ }
+ error = validate(valdata);
+ if (i>0) {
+ cout << "Error: " << error << " cycles: " << (i+1) << endl;
+ if (error < mConfig.getErrorThreshold()) {
+ cout << "Error: " << error << " cycles: " << (i+1) << endl;
+ writeWeightsToFile(outputfilename,mWeights);
+ mTrainSuccess = true;
+ return;
+ }
+ // error increases on a low level
+ if ( (error < mConfig.getConfigAcceptanceErrorThreshold() ) && (mOldError < error) ) {
+ cout << "Olderror: " << mOldError << " Error: " << error << " cycles: " << (i+1) << endl;
+ writeWeightsToFile(outputfilename,mOldWeights);
+ mTrainSuccess = true;
+ return;
+ }
+ mOldError = error;
+ }
+ }
+ if (!mTrainSuccess) {
+ ostringstream oss;
+ oss << "mlp_lastconfig_" << error << ".txt";
+ writeWeightsToFile(oss.str(),mWeights);
+ }
+}
+
+double MLP::validate(const Traindata & valdata) {
+ double error = 0.0;
+ if (valdata.empty()) {
+ return numeric_limits<double>::infinity();
+ }
+ uint32_t j;
+ Traindata::const_iterator it = valdata.begin();
+ for( ; it != valdata.end() ; it++) {
+ propagate((*it).mPattern);
+ for(j=mStartOutput;j < mNoNeuron;j++)
+ {
+ error += pow(((*it).mTarget.at(j - mStartOutput) - mActivity.at(j)),2);
+ }
+ }
+ return error;
+}
+
+void MLP::writeWeightsToFile(const string& filename, const Weights& weights)
+{
+ ofstream outFile;
+ outFile.open(filename.c_str(),ios::out);
+ uint32_t i,j;
+
+ outFile << "Bias: 0" << endl;
+ outFile << "Input:";
+ for (i=1; i<mStartHidden ; i++) {
+ outFile << " " << i;
+ }
+ outFile << endl;
+
+ outFile << "Hidden:";
+ for (i=mStartHidden; i<mStartOutput ; i++) {
+ outFile << " " << i;
+ }
+ outFile << endl;
+
+ outFile << "Output:";
+ for (i=mStartOutput; i<mNoNeuron ; i++) {
+ outFile << " " << i;
+ }
+ outFile << endl;
+
+ outFile << "Threshold" << endl;
+ for ( i=mStartHidden ; i < mNoNeuron ; i++ ) {
+ outFile << "0 " << i << " " << scientific << weights.at(0).at(i) << endl;
+ }
+ outFile << "Input -> Hidden" << endl;
+ for (i=1; i<mStartHidden; i++) {
+ for (j=mStartHidden; j<mStartOutput; j++) {
+ outFile << i << " " << j << " " << scientific << weights.at(i).at(j) << endl;
+ }
+ }
+ outFile << "Hidden -> Output" << endl;
+ for (i=mStartHidden; i<mStartOutput; i++) {
+ for (j=mStartOutput; j<mNoNeuron; j++) {
+ outFile << i << " " << j << " " << scientific << weights.at(i).at(j) << endl;
+ }
+ }
+ outFile.close();
+}
+
+void MLP::propagate(const Pattern &pattern, Output &result)
+{
+ propagate(pattern);
+ for (uint32_t i=mStartOutput; i<mNoNeuron ; i++) {
+ result.push_back(mActivity.at(i));
+ }
+}
+
+void MLP::stop()
+{
+ //cout << __FUNCTION__ << " locking mutex" << endl;
+ mMutex.lock();
+ mDoStop = true;
+ mMutex.unlock();
+ //cout << __FUNCTION__ << " unlocking mutex" << endl;
+}
+
+bool MLP::isStop()
+{
+ bool res;
+ //cout << __FUNCTION__ << " locking mutex" << endl;
+ mMutex.lock();
+ res = mDoStop;
+ mMutex.unlock();
+ //cout << __FUNCTION__ << " unlocking mutex" << endl;
+ return res;
+} \ No newline at end of file