summaryrefslogtreecommitdiffstats
path: root/Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/main.cpp
diff options
context:
space:
mode:
authorSven Eisenhauer <sven@sven-eisenhauer.net>2023-11-10 15:11:48 +0100
committerSven Eisenhauer <sven@sven-eisenhauer.net>2023-11-10 15:11:48 +0100
commit33613a85afc4b1481367fbe92a17ee59c240250b (patch)
tree670b842326116b376b505ec2263878912fca97e2 /Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/main.cpp
downloadStudium-master.tar.gz
Studium-master.tar.bz2
add new repoHEADmaster
Diffstat (limited to 'Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/main.cpp')
-rw-r--r--Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/main.cpp401
1 files changed, 401 insertions, 0 deletions
diff --git a/Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/main.cpp b/Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/main.cpp
new file mode 100644
index 0000000..6980f5f
--- /dev/null
+++ b/Master/Modellbildung_und_Simulation/Aufgabenblatt3/MLP/src/main.cpp
@@ -0,0 +1,401 @@
+/*
+ * main.cpp
+ *
+ * Created on: 09.06.2011
+ * Author: sven
+ */
+
+#include "MLPConfig.h"
+#include "MLP.h"
+
+#include <boost/foreach.hpp>
+#include <boost/program_options.hpp>
+#include <boost/tokenizer.hpp>
+#include <string>
+#include <vector>
+#include <iostream>
+#include <cstdlib>
+#include <cmath>
+#include <csignal>
+
+namespace po = boost::program_options;
+
+const char* MLP_XOR_CONFIGFILE = "Init_MLP.txt";
+const char* MLP_PCA_TRAINFILE = "train_pca";
+const char* MLP_PCA_TESTFILE = "test_pca";
+const char* MLP_RAW_TRAINFILE = "train_raw";
+const char* MLP_RAW_TESTFILE = "test_raw";
+const char* MLP_PCA_CONFIGFILE = "mlp_pca_config.txt";
+const char* MLP_RAW_CONFIGFILE = "mlp_raw_config.txt";
+
+const uint32_t VALIDATION_DATA_RATIO = 20; /* every x th value will become a validition pattern */
+const double EPSILON = 0.001;
+
+void mlp_xor(MLPConfig& config);
+void mlp_train_pca(MLPConfig& config);
+void mlp_train_raw(MLPConfig& config);
+void mlp_test_pca(MLPConfig& config);
+void mlp_test_raw(MLPConfig& config);
+void parseTrainData(const char*,Traindata&,Traindata&);
+void signal_handler(int signal);
+static MLP* mlpPtr = NULL;
+
+void signal_handler(int signal)
+{
+ std::cout << "Terminating..." << std::endl;
+ if (mlpPtr) {
+ mlpPtr->stop();
+ }
+ mlpPtr = NULL;
+}
+
+double toDouble(const std::string& t)
+{
+ double res;
+ std::istringstream ss(t);
+ ss >> res;
+ return res;
+}
+
+void parseTrainData(const char* filename, Traindata& td, Traindata& vd)
+{
+ typedef boost::char_separator<char> TrainSepChar;
+ typedef tokenizer<TrainSepChar> TrainTokenizer;
+ TrainSepChar sepCol(":");
+ TrainSepChar sepSpace(" ");
+ ifstream inFile(filename);
+ if (!inFile.is_open()) {
+ std::cerr << "error opening data file " << filename << std::endl;
+ exit(1);
+ }
+ std::string line;
+ uint32_t counter = 0;
+ while(inFile.good())
+ {
+ getline(inFile,line);
+ if (line.empty()) {
+ continue;
+ }
+ TrainTokenizer lineTok(line,sepCol);
+ std::vector<std::string> lineToks;
+ BOOST_FOREACH(string t, lineTok)
+ {
+ lineToks.push_back(t);
+ }
+ Trainingpair tp;
+ TrainTokenizer targetTok(lineToks.at(0),sepSpace);
+ BOOST_FOREACH(string t,targetTok)
+ {
+ tp.mTarget.push_back(toDouble(t));
+ }
+ TrainTokenizer patternTok(lineToks.at(1),sepSpace);
+ BOOST_FOREACH(string t,patternTok)
+ {
+ if (t.find(";") != std::string::npos) {
+ continue;
+ }
+ tp.mPattern.push_back(toDouble(t));
+ }
+ if ( (counter % VALIDATION_DATA_RATIO) != 0 ) {
+ td.push_back(tp);
+ } else {
+ vd.push_back(tp);
+ }
+ counter++;
+ }
+ inFile.close();
+}
+
+void mlp_train_pca(MLPConfig& config)
+{
+ config.setNumInputNeurons(90U);
+ config.setNumHiddenNeurons(90U);
+ config.setNumOutputNeurons(2U);
+ config.initWeights(true);
+ MLP mlp(config);
+ mlpPtr = &mlp;
+ Traindata td;
+ Traindata vd;
+ parseTrainData(MLP_PCA_TRAINFILE,td,vd);
+ mlp.train(td,vd,config.getNumTrainingCycles(),MLP_PCA_CONFIGFILE);
+}
+
+void mlp_test_pca(MLPConfig& config)
+{
+ config.parseConfigfile(MLP_PCA_CONFIGFILE);
+ if (!config.isValid()) {
+ std::cerr << "error parsing config file " << MLP_PCA_CONFIGFILE << std::endl;
+ exit(1);
+ }
+ MLP mlp(config);
+ Traindata td;
+ Output output;
+ parseTrainData(MLP_PCA_TESTFILE,td,td);
+ uint32_t c;
+ uint32_t num = 1;
+ uint32_t errorCount = 0;
+ double out,target;
+ double res[2];
+ BOOST_FOREACH(Trainingpair tp, td) {
+ output.clear();
+ mlp.propagate(tp.mPattern,output);
+ bool errorFound = false;
+ for (c = 0; c < 2; c++) {
+ out = output.at(c);
+ if (out >= 0.5) {
+ res[c] = 1.0;
+ } else {
+ res[c] = 0.0;
+ }
+ }
+ for (c = 0; c < 2; c++) {
+ target = tp.mTarget.at(c);
+ if (std::abs(res[c] - target) > EPSILON) {
+ cout << num << " PCA Testerror: " << res[c] << " != " << target << endl;
+ errorFound = true;
+ } else {
+ cout << num << " PCA OK: " << res[c] << " == " << target << endl;
+ }
+ }
+ if (errorFound) {
+ errorCount++;
+ }
+ num++;
+ }
+ cout << "PCA: " << errorCount << " errors in " << td.size() << " testpatterns" << endl;
+}
+
+void mlp_train_raw(MLPConfig& config)
+{
+ config.setNumInputNeurons(6U*151U);
+ config.setNumHiddenNeurons(6U*151U);
+ config.setNumOutputNeurons(2U);
+ config.initWeights(true);
+ MLP mlp(config);
+ mlpPtr = &mlp;
+ Traindata td;
+ Traindata vd;
+ parseTrainData(MLP_RAW_TRAINFILE,td,vd);
+ mlp.train(td,vd,config.getNumTrainingCycles(),MLP_RAW_CONFIGFILE);
+}
+
+void mlp_test_raw(MLPConfig& config)
+{
+ config.parseConfigfile(MLP_RAW_CONFIGFILE);
+ if (!config.isValid()) {
+ std::cerr << "error parsing config file " << MLP_RAW_CONFIGFILE << std::endl;
+ exit(1);
+ }
+ MLP mlp(config);
+ Traindata td;
+ Output output;
+ parseTrainData(MLP_RAW_TESTFILE,td,td);
+ uint32_t c;
+ uint32_t num = 1;
+ uint32_t errorCount = 0;
+ double out,target;
+ double res[2];
+ BOOST_FOREACH(Trainingpair tp, td) {
+ output.clear();
+ mlp.propagate(tp.mPattern,output);
+ bool errorFound = false;
+ for (c = 0; c < 2; c++) {
+ out = output.at(c);
+ if (out >= 0.5) {
+ res[c] = 1.0;
+ } else {
+ res[c] = 0.0;
+ }
+ }
+ for (c = 0; c < 2; c++) {
+ target = tp.mTarget.at(c);
+ if (std::abs(res[c] - target) > EPSILON) {
+ cout << num << " RAW Testerror: " << res[c] << " != " << target << endl;
+ errorFound = true;
+ } else {
+ cout << num << " RAW OK: " << res[c] << " == " << target << endl;
+ }
+ }
+ if (errorFound) {
+ errorCount++;
+ }
+ num++;
+ }
+ cout << "RAW: " << errorCount << " errors in " << td.size() << " testpatterns" << endl;
+}
+
+void mlp_xor(MLPConfig& config) {
+ MLP mlp(config);
+ uint8_t numPatterns = 4;
+ Traindata td;
+ const double patterns[][2] = {
+ {0.0 , 0.0},
+ {0.0 , 1.0},
+ {1.0 , 0.0},
+ {1.0 , 1.0}
+ };
+ const double targets[][1] = {
+ {0},
+ {1},
+ {1},
+ {0}
+ };
+
+ for (uint8_t i = 0; i < numPatterns ; i++) {
+ Trainingpair tp;
+ tp.mPattern.assign(patterns[i],patterns[i]+2);
+ tp.mTarget.assign(targets[i],targets[i]+1);
+ td.push_back(tp);
+ }
+
+ mlp.train(td,td,config.getNumTrainingCycles(),"mlp_xor_weights.txt");
+
+ // test with other data (Aufgabe 3)
+ int32_t i,j;
+ int32_t minval = -40;
+ int32_t maxval = 50;
+ double x,y;
+ Output result;
+ Pattern test_pat;
+ ofstream csvFile;
+ const char* sep = " ";
+ csvFile.open("mlp_xor_out.dat",ios::out);
+ for (i = minval ; i < maxval ; i++)
+ {
+ x = ((double) i) / 10.0;
+ for (j = minval ; j < maxval ; j++) {
+ y = ((double) j) / 10.0;
+ test_pat.clear();
+ test_pat.push_back(x);
+ test_pat.push_back(y);
+ result.clear();
+ mlp.propagate(test_pat,result);
+ csvFile << x << sep << y;
+ BOOST_FOREACH(double z,result)
+ {
+ csvFile << sep << z;
+ }
+ csvFile << endl;
+ }
+ csvFile << endl;
+ }
+ csvFile.close();
+ system("gnuplot -persist mlp_xor.plt");
+}
+
+int main(int argc, char* argv[])
+{
+ signal(SIGINT,&signal_handler);
+
+ MLPConfig config;
+
+ enum APP {
+ APP_XOR,APP_TRAIN_RAW,APP_TRAIN_PCA,APP_TEST_RAW,APP_TEST_PCA
+ };
+
+ // default settings
+ config.setLernrate(0.8);
+ config.setUpdateMode(MLPConfig::UPDATE_MODE_BATCH);
+ config.setMomentum(0.9);
+ config.setErrorThreshold(0.01);
+ config.setConfigAcceptanceErrorThreshold(0.1);
+ config.setNumTrainingCycles(20000U);
+ APP app = APP_XOR;
+
+ try {
+ po::options_description desc("Allowed options");
+ desc.add_options()
+ ("help", "produce help message")
+ ("trainingcycles", po::value<uint32_t>(), "set number of training cycles. default 20000")
+ ("lernrate", po::value<double>(), "set lernrate. default 0.8")
+ ("momentum", po::value<double>(), "set momentum. default 0.9")
+ ("maxerror", po::value<double>(), "set maximum error that stops training. default 0.01")
+ ("acceptanceerror", po::value<double>(), "set error after which the configuration is accepted if error increases. default 0.1")
+ ("updatemode", po::value<std::string>(), "set update mode <\"batch\"|\"single\">. default batch")
+ ("app", po::value<std::string>(), "application <\"xor\"|\"trainraw\"|\"trainpca\"|\"testraw\"|\"testpca\">. default xor")
+ ;
+
+ po::variables_map vm;
+ po::store(po::parse_command_line(argc, argv, desc), vm);
+ po::notify(vm);
+
+ if (vm.count("help")) {
+ cout << desc << "\n";
+ return 1;
+ }
+ if (vm.count("trainingcycles")) {
+ uint32_t trainingcycles = vm["trainingcycles"].as<uint32_t>();
+ config.setNumTrainingCycles(trainingcycles);
+ }
+ if (vm.count("lernrate")) {
+ double lernrate = vm["lernrate"].as<double>();
+ config.setLernrate(lernrate);
+ }
+ if (vm.count("momentum")) {
+ double momentum = vm["momentum"].as<double>();
+ config.setMomentum(momentum);
+ }
+ if (vm.count("maxerror")) {
+ double maxerror = vm["maxerror"].as<double>();
+ config.setErrorThreshold(maxerror);
+ }
+ if (vm.count("momentum")) {
+ double acceptanceerror = vm["acceptanceerror"].as<double>();
+ config.setConfigAcceptanceErrorThreshold(acceptanceerror);
+ }
+ if (vm.count("updatemode")) {
+ std::string updatemode = vm["updatemode"].as<std::string>();
+ if (updatemode.find("batch") != std::string::npos) {
+ config.setUpdateMode(MLPConfig::UPDATE_MODE_BATCH);
+ } else if(updatemode.find("single") != std::string::npos) {
+ config.setUpdateMode(MLPConfig::UPDATE_MODE_SINGLE);
+ }
+ }
+ if (vm.count("app")) {
+ std::string appOpt = vm["app"].as<std::string>();
+ if (appOpt.find("xor") != std::string::npos) {
+ app = APP_XOR;
+ } else if (appOpt.find("trainraw") != std::string::npos) {
+ app = APP_TRAIN_RAW;
+ } else if (appOpt.find("trainpca") != std::string::npos) {
+ app = APP_TRAIN_PCA;
+ } else if (appOpt.find("testraw") != std::string::npos) {
+ app = APP_TEST_RAW;
+ } else if (appOpt.find("testpca") != std::string::npos) {
+ app = APP_TEST_PCA;
+ }
+ }
+ }
+ catch(std::exception& e) {
+ cerr << "error: " << e.what() << "\n";
+ return 1;
+ }
+ catch(...) {
+ cerr << "Exception of unknown type!\n";
+ }
+ switch(app) {
+ case APP_XOR:
+ config.parseConfigfile(MLP_XOR_CONFIGFILE);
+ if (!config.isValid()) {
+ std::cerr << "error parsing config file " << MLP_XOR_CONFIGFILE << std::endl;
+ exit(1);
+ }
+ mlp_xor(config);
+ break;
+ case APP_TRAIN_PCA:
+ mlp_train_pca(config);
+ break;
+ case APP_TRAIN_RAW:
+ mlp_train_raw(config);
+ break;
+ case APP_TEST_PCA:
+ mlp_test_pca(config);
+ break;
+ case APP_TEST_RAW:
+ mlp_test_raw(config);
+ break;
+ default:
+ break;
+ }
+ return 0;
+}