Skip to content

Latest commit

 

History

History
117 lines (65 loc) · 12.9 KB

CppSharkExample3.md

File metadata and controls

117 lines (65 loc) · 12.9 KB

 

 

 

 

 

 

Shark example 3: evolving neural net solving the XOR problem is a Shark example to let a neural network evolve and by random mutation solve the XOR problem.

 

 

 

 

 

 

Operating system: Ubuntu 10.04 LTS Lucid Lynx

IDE: Qt Creator 2.0.0

Project type: GUI application

Compiler: G++ 4.4.1

Libraries used:

 

 

 

 

 

 


#------------------------------------------------- # # Project created by QtCreator 2010-08-15T23:08:25 # #------------------------------------------------- QT += core QT -= gui TARGET = CppSharkExample3 CONFIG += console CONFIG -= app_bundle LIBS += -L/usr/local/lib -lshark TEMPLATE = app SOURCES += main.cpp

 

 

 

 

 

main.cpp

 


#include <cassert> #include <iostream> #include <boost/foreach.hpp> #include <Array/Array.h> #include <ReClaM/FFNet.h> #include <ReClaM/createConnectionMatrix.h> //From http://www.richelbilderbeek.nl/CppGetRandomUniform.htm double GetRandomUniform() {   return static_cast<double>(std::rand())/static_cast<double>(RAND_MAX); } ///NeuralNet is a derived class of FFNet ///to gain access to some protected methods of FFNet struct NeuralNet : public FFNet {   NeuralNet(     const int n_inputs,     const int n_outputs,     const Array<int> connection_matrix)   : FFNet( n_inputs,n_outputs,connection_matrix) {}   NeuralNet(const NeuralNet& n)   : FFNet(n) {}   void Activate(const Array<double> &in)   {     this->activate(in);   }   unsigned int GetNumberOfNeurons()   {     return this->numberOfNeurons;   }   void mutate(const double m)   {     Array<double> weights = this->getWeights();     BOOST_FOREACH(double& x,weights)     {       x+= (GetRandomUniform() * (2.0 * m)) - m;     }     this->weightMatrix = weights;   } }; NeuralNet CreateNet(   const int n_inputs,   const int n_hidden,   const int n_outputs,   const double init_weight_min,   const double init_weight_max) {   //Create neural net connection matrix   Array<int> connection_matrix;   createConnectionMatrix(connection_matrix,n_inputs, n_hidden, n_outputs);   //Create the feed-forward neural network   NeuralNet n(n_inputs, n_outputs, connection_matrix);   n.initWeights(init_weight_min,init_weight_max);   return n; } double Rate_xor_success(NeuralNet& n) {   double rating = 4.0;   const unsigned int output_neuron_index = n.GetNumberOfNeurons() - 1;   {     std::vector<double> v(2);     v[0] = 0.0;     v[1] = 0.0;     Array<double> inputs(v);     n.Activate(inputs);     const double output = n.outputValue(output_neuron_index);     rating -= std::fabs(0.0 - output);   }   {     std::vector<double> v(2);     v[0] = 1.0;     v[1] = 0.0;     Array<double> inputs(v);     n.Activate(inputs);     const double output = n.outputValue(output_neuron_index);     rating -= std::fabs(1.0 - output);   }   {     std::vector<double> v(2);     v[0] = 0.0;     v[1] = 1.0;     Array<double> inputs(v);     n.Activate(inputs);     const double output = n.outputValue(output_neuron_index);     rating -= std::fabs(1.0 - output);   }   {     std::vector<double> v(2);     v[0] = 1.0;     v[1] = 1.0;     Array<double> inputs(v);     n.Activate(inputs);     const double output = n.outputValue(output_neuron_index);     rating -= std::fabs(0.0 - output);   }   return rating; } int main() {   NeuralNet best_net = CreateNet(2,2,1,-1.0,1.1);   double best_result = Rate_xor_success(best_net);   for (int t=0; t!=1000000; ++t)   {     NeuralNet copy(best_net);     copy.mutate(10.0);     double result = Rate_xor_success(copy);     if (result > best_result)     {       best_net = copy;       best_result = result;       std::cout << "Better result (t=" << t << "): "         << Rate_xor_success(best_net) << std::endl;     }   } }

 

 

 

 

 

Screen output

 


Better result (t=0): 2 Better result (t=1): 2.00023 Better result (t=3): 2.38233 Better result (t=23): 2.9239 Better result (t=46): 2.92604 Better result (t=72): 2.96079 Better result (t=113): 2.96166 Better result (t=151): 2.97768 Better result (t=676): 2.98292 Better result (t=1687): 2.98896 Better result (t=2255): 2.98969 Better result (t=2542): 2.99526 Better result (t=2760): 3.28787 Better result (t=10811): 3.56417 Better result (t=28436): 3.6376 Better result (t=79855): 3.66048 Better result (t=98464): 3.68368 Better result (t=145369): 3.83807 Better result (t=731553): 3.91487