-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathGenetic.cpp
58 lines (53 loc) · 1.32 KB
/
Genetic.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
#include "pch.h"
#include "Network.hpp"
#include <random>
// Initialize the seed
std::random_device rd;
// Initialize the random distribution
std::uniform_real_distribution<float> urd(-1.f, 1.f);
// Initialize the random generator
std::default_random_engine dre(rd());
namespace NeuralNetwork {
// Opérateurs génétiques
// ---------------------
void Network::mutation(float mutation_rate)
{
for (auto& layer : m_layers_)
{
for (auto& weights : layer)
{
for (auto& weight : weights)
{
if (abs(urd(dre)) < mutation_rate * 1000)
{
weight = urd(dre);
}
}
}
}
}
Network Network::operator*(const Network& other) const
{
vector<vector<vector<float>>> new_network(m_layers_.size());
for (size_t i = 0; i < m_layers_.size(); ++i)
{
new_network[i] = vector<vector<float>>(m_layers_[i].size());
for (size_t j = 0; j < m_layers_[i].size(); ++j)
{
new_network[i][j] = vector<float>(m_layers_[i][j].size());
for (size_t k = 0; k < m_layers_[i][j].size(); ++k)
{
new_network[i][j][k] = (urd(dre) < 0.f) ? m_layers_[i][j][k] : other.m_layers_[i][j][k];
}
}
}
Network network(new_network, m_rec_);
return network;
}
Network Network::reproduce(const Network& parent1, const Network& parent2, float mutation_rate)
{
Network child = parent1 * parent2;
child.mutation(mutation_rate);
return child;
}
}