GTK+ IOStream  Beta
<< GTK+ >> add C++ IOStream operators to GTK+. Now with extra abilities ... like network serialisation
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
NeuralNetworkTest.C
Go to the documentation of this file.
1 /* Copyright 2000-2013 Matt Flax <flatmax@flatmax.org>
2  This file is part of GTK+ IOStream class set
3 
4  GTK+ IOStream is free software; you can redistribute it and/or modify
5  it under the terms of the GNU General Public License as published by
6  the Free Software Foundation; either version 2 of the License, or
7  (at your option) any later version.
8 
9  GTK+ IOStream is distributed in the hope that it will be useful,
10  but WITHOUT ANY WARRANTY; without even the implied warranty of
11  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12  GNU General Public License for more details.
13 
14  You have received a copy of the GNU General Public License
15  along with GTK+ IOStream
16  */
17 
18 #include <Eigen/Dense>
19 #include <fstream>
20 #include <iostream>
21 using namespace std;
22 
23 #include "NeuralNetwork.H"
24 
25 /* Function to read double data from file.
26 First number is the number of rows.
27 Second number is the number of columns.
28 The rest are matrix data.
29 */
30 Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic> loadFromFile(string fileName){
31  Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic> matrix;
32  ifstream input(fileName.c_str(), ios::binary); // open the file
33  if (input){
34  double r, c;
35  input.read( reinterpret_cast<char*>( &r), sizeof(r));
36  input.read( reinterpret_cast<char*>( &c), sizeof(c));
37 // cout<<"rows "<<r<<" cols "<<c<<endl;
38  matrix.resize(r,c);
39  for (int i=0; i<c; i++)
40  for (int j=0; j<r; j++)
41  input.read( reinterpret_cast<char*>( &matrix(j,i)), sizeof(double));
42 // cout<<matrix<<endl;
43  if (matrix.rows()==1)
44  matrix.transposeInPlace();
45  }
46  return matrix;
47 }
48 
49 int main(int argc, char *argv[]){
50  // To construct a network, we load some weights and biases from file and create a layer using them
51  // This is done for each layer.
52  // Finally a NeuralNetwork is created to execute each layer
53 
54  // load the input layer weights and biases
55  Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic> weights=loadFromFile(string("testVectors/inputWeights.dat"));
56  Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic> bias=loadFromFile(string("testVectors/inputBias.dat"));
57 
58  // Begin constructing the neural network topology
59  vector<NeuralLayer<double> *> networkLayers;
60  networkLayers.push_back(new TanhLayer<double>(weights, bias));
61 
62  // load the hidden layer weights and biases
63  weights=loadFromFile(string("testVectors/hiddenWeights.dat"));
64  bias=loadFromFile(string("testVectors/hiddenBias.dat"));
65  networkLayers.push_back(new TanhLayer<double>(weights, bias));
66 
67  // load the output layer weights and biases
68  weights=loadFromFile(string("testVectors/outputWeights.dat"));
69  bias=loadFromFile(string("testVectors/outputBias.dat"));
70 
71  networkLayers.push_back(new SigmoidLayer<double>(weights, bias));
72 
73  // setup some input
74  Eigen::Matrix<double, Eigen::Dynamic, 1> input(10,1);
75  input<<0.8333,0.8333,0.8333,0.8333,0.8333,0.6871,0.5833,0.4371,0.3333,0.4000;
76 
78  nn.activate(networkLayers, input);
79 //cout<<"layer 0 output"<<endl;
80 // cout<<networkLayers[0]->output<<endl;
81 //cout<<"layer 1 output"<<endl;
82 // cout<<networkLayers[1]->output<<endl;
83 //cout<<"layer 2 output"<<endl;
84 // cout<<networkLayers[2]->output<<endl;
85 
86  Eigen::Matrix<double, Eigen::Dynamic, 1> outputExpected(9,1);
87  outputExpected<<0.2039,0.5875,0.2798,0.6588,0.5064,0.5675,0.3414,0.6927,0.3164;
88 
89  cout<<"difference = "<<networkLayers[2]->output-outputExpected<<endl;
90 
91  // clean up
92  for (vector<NeuralLayer<double> *>::iterator nl=networkLayers.begin(); nl!=networkLayers.end(); ++nl)
93  delete (*nl);
94  return 0;
95 }