Generates neural net instances through genetic algorithm for use in task learning by m3pi. Reward determined by detection of black areas on arena floor, or by amount of area explored in limited time. Due to memory size, limited # of nets.

Dependencies:   Ping m3pimaze mbed

Committer:
parkbanks
Date:
Thu Apr 23 20:16:51 2015 +0000
Revision:
0:ac93d9db8dbd
Initial commit, bot fully working.

Who changed what in which revision?

UserRevisionLine numberNew contents of line
parkbanks 0:ac93d9db8dbd 1 #include "CNeuralNet.h"
parkbanks 0:ac93d9db8dbd 2
parkbanks 0:ac93d9db8dbd 3
parkbanks 0:ac93d9db8dbd 4 // Neuron Methods
parkbanks 0:ac93d9db8dbd 5 SNeuron::SNeuron(int NumInputs): m_NumInputs(NumInputs+1)
parkbanks 0:ac93d9db8dbd 6 {
parkbanks 0:ac93d9db8dbd 7 //we need an additional weight for the bias hence the +1
parkbanks 0:ac93d9db8dbd 8 for (int i=0; i<NumInputs+1; ++i)
parkbanks 0:ac93d9db8dbd 9 {
parkbanks 0:ac93d9db8dbd 10 //set up the weights with an initial random value
parkbanks 0:ac93d9db8dbd 11 m_vecWeight.push_back(RandomClamped());
parkbanks 0:ac93d9db8dbd 12 }
parkbanks 0:ac93d9db8dbd 13 }
parkbanks 0:ac93d9db8dbd 14
parkbanks 0:ac93d9db8dbd 15
parkbanks 0:ac93d9db8dbd 16 // Neuron Layer Methods
parkbanks 0:ac93d9db8dbd 17 // Calls SNeuron constructor for each neuron in each layer
parkbanks 0:ac93d9db8dbd 18 SNeuronLayer::SNeuronLayer(int NumNeurons, int NumInputsPerNeuron): m_NumNeurons(NumNeurons)
parkbanks 0:ac93d9db8dbd 19 {
parkbanks 0:ac93d9db8dbd 20 for (int i=0; i<NumNeurons; ++i)
parkbanks 0:ac93d9db8dbd 21
parkbanks 0:ac93d9db8dbd 22 m_vecNeurons.push_back(SNeuron(NumInputsPerNeuron));
parkbanks 0:ac93d9db8dbd 23 }
parkbanks 0:ac93d9db8dbd 24
parkbanks 0:ac93d9db8dbd 25
parkbanks 0:ac93d9db8dbd 26 // CNeuralNet Methods
parkbanks 0:ac93d9db8dbd 27 // creates a ANN based on given values
parkbanks 0:ac93d9db8dbd 28 // add function so values are taken from main
parkbanks 0:ac93d9db8dbd 29 CNeuralNet::CNeuralNet()
parkbanks 0:ac93d9db8dbd 30 {
parkbanks 0:ac93d9db8dbd 31 m_NumInputs = 2;
parkbanks 0:ac93d9db8dbd 32 m_NumOutputs = 2;
parkbanks 0:ac93d9db8dbd 33 m_NumHiddenLayers = 0;
parkbanks 0:ac93d9db8dbd 34 m_NeuronsPerHiddenLyr = 0;
parkbanks 0:ac93d9db8dbd 35 CreateNet();
parkbanks 0:ac93d9db8dbd 36 }
parkbanks 0:ac93d9db8dbd 37
parkbanks 0:ac93d9db8dbd 38 // Builds ANN with random weights from -1 to 1.
parkbanks 0:ac93d9db8dbd 39 void CNeuralNet::CreateNet()
parkbanks 0:ac93d9db8dbd 40 {
parkbanks 0:ac93d9db8dbd 41 //network layers
parkbanks 0:ac93d9db8dbd 42 if (m_NumHiddenLayers > 0)
parkbanks 0:ac93d9db8dbd 43 {
parkbanks 0:ac93d9db8dbd 44 //first hidden layer
parkbanks 0:ac93d9db8dbd 45 m_vecLayers.push_back(SNeuronLayer(m_NeuronsPerHiddenLyr, m_NumInputs));
parkbanks 0:ac93d9db8dbd 46
parkbanks 0:ac93d9db8dbd 47 for (int i=0; i<m_NumHiddenLayers-1; ++i)
parkbanks 0:ac93d9db8dbd 48 {
parkbanks 0:ac93d9db8dbd 49 m_vecLayers.push_back(SNeuronLayer(m_NeuronsPerHiddenLyr, m_NeuronsPerHiddenLyr));
parkbanks 0:ac93d9db8dbd 50 }
parkbanks 0:ac93d9db8dbd 51 //output layer
parkbanks 0:ac93d9db8dbd 52 m_vecLayers.push_back(SNeuronLayer(m_NumOutputs, m_NeuronsPerHiddenLyr));
parkbanks 0:ac93d9db8dbd 53 }
parkbanks 0:ac93d9db8dbd 54 else{
parkbanks 0:ac93d9db8dbd 55 m_vecLayers.push_back(SNeuronLayer(m_NumOutputs, m_NumInputs));
parkbanks 0:ac93d9db8dbd 56 }
parkbanks 0:ac93d9db8dbd 57 }
parkbanks 0:ac93d9db8dbd 58
parkbanks 0:ac93d9db8dbd 59 // returns vector of weights
parkbanks 0:ac93d9db8dbd 60 vector<float> CNeuralNet::GetWeights() const
parkbanks 0:ac93d9db8dbd 61 {
parkbanks 0:ac93d9db8dbd 62 //this will hold the weights
parkbanks 0:ac93d9db8dbd 63 vector<float> weights;
parkbanks 0:ac93d9db8dbd 64
parkbanks 0:ac93d9db8dbd 65 for (int i=0; i<m_NumHiddenLayers + 1; ++i){
parkbanks 0:ac93d9db8dbd 66 for (int j=0; j<m_vecLayers[i].m_NumNeurons; ++j){
parkbanks 0:ac93d9db8dbd 67 for (int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_NumInputs; ++k){
parkbanks 0:ac93d9db8dbd 68 weights.push_back(m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k]);
parkbanks 0:ac93d9db8dbd 69 }
parkbanks 0:ac93d9db8dbd 70 }
parkbanks 0:ac93d9db8dbd 71 }
parkbanks 0:ac93d9db8dbd 72 return weights;
parkbanks 0:ac93d9db8dbd 73 }
parkbanks 0:ac93d9db8dbd 74
parkbanks 0:ac93d9db8dbd 75 // replaces weights with values given in float vector
parkbanks 0:ac93d9db8dbd 76 void CNeuralNet::PutWeights(vector<float> &weights)
parkbanks 0:ac93d9db8dbd 77 {
parkbanks 0:ac93d9db8dbd 78 int cWeight = 0;
parkbanks 0:ac93d9db8dbd 79 for (int i=0; i<m_NumHiddenLayers + 1; ++i){
parkbanks 0:ac93d9db8dbd 80 for (int j=0; j<m_vecLayers[i].m_NumNeurons; ++j){
parkbanks 0:ac93d9db8dbd 81 for (int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_NumInputs; ++k){
parkbanks 0:ac93d9db8dbd 82 m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k] = weights[cWeight++];
parkbanks 0:ac93d9db8dbd 83 }
parkbanks 0:ac93d9db8dbd 84 }
parkbanks 0:ac93d9db8dbd 85 }
parkbanks 0:ac93d9db8dbd 86 return;
parkbanks 0:ac93d9db8dbd 87 }
parkbanks 0:ac93d9db8dbd 88
parkbanks 0:ac93d9db8dbd 89 // returns # of weights needed for net
parkbanks 0:ac93d9db8dbd 90 int CNeuralNet::GetNumberOfWeights() const
parkbanks 0:ac93d9db8dbd 91 {
parkbanks 0:ac93d9db8dbd 92 int weights = 0;
parkbanks 0:ac93d9db8dbd 93 for (int i=0; i<m_NumHiddenLayers + 1; ++i){
parkbanks 0:ac93d9db8dbd 94 for (int j=0; j<m_vecLayers[i].m_NumNeurons; ++j){
parkbanks 0:ac93d9db8dbd 95 for (int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_NumInputs; ++k){
parkbanks 0:ac93d9db8dbd 96 weights++;
parkbanks 0:ac93d9db8dbd 97 }
parkbanks 0:ac93d9db8dbd 98 }
parkbanks 0:ac93d9db8dbd 99 }
parkbanks 0:ac93d9db8dbd 100 return weights;
parkbanks 0:ac93d9db8dbd 101 }
parkbanks 0:ac93d9db8dbd 102
parkbanks 0:ac93d9db8dbd 103 // Calculates output values from inputs
parkbanks 0:ac93d9db8dbd 104 vector<float> CNeuralNet::Update(vector<float> &inputs)
parkbanks 0:ac93d9db8dbd 105 {
parkbanks 0:ac93d9db8dbd 106 //stores the resultant outputs from each layer
parkbanks 0:ac93d9db8dbd 107 vector<float> outputs;
parkbanks 0:ac93d9db8dbd 108 int cWeight = 0;
parkbanks 0:ac93d9db8dbd 109
parkbanks 0:ac93d9db8dbd 110 //layer
parkbanks 0:ac93d9db8dbd 111 for (int i=0; i<m_NumHiddenLayers + 1; ++i){
parkbanks 0:ac93d9db8dbd 112 if ( i > 0 ){
parkbanks 0:ac93d9db8dbd 113 inputs = outputs;
parkbanks 0:ac93d9db8dbd 114 }
parkbanks 0:ac93d9db8dbd 115 outputs.clear();
parkbanks 0:ac93d9db8dbd 116 cWeight = 0;
parkbanks 0:ac93d9db8dbd 117
parkbanks 0:ac93d9db8dbd 118 //for each neuron sum inputs*weights and feed to sigmoid for weight
parkbanks 0:ac93d9db8dbd 119 for (int j=0; j<m_vecLayers[i].m_NumNeurons; ++j){
parkbanks 0:ac93d9db8dbd 120 float netinput = 0;
parkbanks 0:ac93d9db8dbd 121 int NumInputs = m_vecLayers[i].m_vecNeurons[j].m_NumInputs;
parkbanks 0:ac93d9db8dbd 122
parkbanks 0:ac93d9db8dbd 123 //for each weight
parkbanks 0:ac93d9db8dbd 124 for (int k=0; k<NumInputs - 1; ++k){
parkbanks 0:ac93d9db8dbd 125 //sum weights*inputs
parkbanks 0:ac93d9db8dbd 126 netinput += m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k] * inputs[cWeight++];
parkbanks 0:ac93d9db8dbd 127 }
parkbanks 0:ac93d9db8dbd 128
parkbanks 0:ac93d9db8dbd 129 //add in bias
parkbanks 0:ac93d9db8dbd 130 netinput += m_vecLayers[i].m_vecNeurons[j].m_vecWeight[NumInputs-1] * -1; //bias is -1
parkbanks 0:ac93d9db8dbd 131
parkbanks 0:ac93d9db8dbd 132 //store outputs from each layer, feed combined activation through sigmoid
parkbanks 0:ac93d9db8dbd 133 outputs.push_back(Sigmoid(netinput, 1));//activation response of 1
parkbanks 0:ac93d9db8dbd 134 cWeight = 0;
parkbanks 0:ac93d9db8dbd 135 }
parkbanks 0:ac93d9db8dbd 136 }
parkbanks 0:ac93d9db8dbd 137 return outputs;
parkbanks 0:ac93d9db8dbd 138 }
parkbanks 0:ac93d9db8dbd 139
parkbanks 0:ac93d9db8dbd 140 // Sigmoid function
parkbanks 0:ac93d9db8dbd 141 float CNeuralNet::Sigmoid(float netinput, float response){
parkbanks 0:ac93d9db8dbd 142 return ( 1 / ( 1 + exp(-netinput / response)));
parkbanks 0:ac93d9db8dbd 143 }
parkbanks 0:ac93d9db8dbd 144