-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
refactor and added cma_es with half example
- Loading branch information
Showing
9 changed files
with
760 additions
and
187 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
use cmaes::fmax; | ||
use neuroevolution::cma_es::*; | ||
use neuroevolution::network::*; | ||
|
||
fn main() { | ||
const N: usize = 2; | ||
|
||
let solution = fmax(half::<N>, vec![0.; N * 2], 1.); | ||
let network: Network<N,2> = get_network(&solution.point); | ||
let fitness = solution.value; | ||
println!("half Fitness: {}", fitness); | ||
println!("{}", network); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
use neuroevolution::one_plus_one_na::*; | ||
use neuroevolution::network::Network; | ||
|
||
fn main() { | ||
let mut network = Network::<2, 2>::new(); | ||
|
||
network.optimize(half, 3000); | ||
println!("half Fitness: {}", half(&network)); | ||
print!("{}", network); | ||
|
||
network.optimize(quarter, 3000); | ||
println!("quarter Fitness: {}", quarter(&network)); | ||
print!("{}", network); | ||
|
||
network.optimize(two_quarters, 3000); | ||
println!("two_quarters Fitness: {}", two_quarters(&network)); | ||
print!("{}", network); | ||
|
||
network.optimize(square, 3000); | ||
println!("square Fitness: {}", square(&network)); | ||
print!("{}", network); | ||
|
||
let mut network = Network::<4, 3>::new(); | ||
network.optimize(cube, 3000); | ||
println!("cube Fitness: {}", cube(&network)); | ||
print!("{}", network); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
use std::f64::consts::PI; | ||
use cmaes::DVector; | ||
use crate::network::*; | ||
|
||
const UNIT_CIRCLE_STEPS: u32 = 1000; | ||
|
||
pub fn half<const N: usize>(x: &DVector<f64>) -> f64 { | ||
if x.iter().all(|&x| x >= 0. && x <= 1.) { | ||
let network: Network<N,2> = get_network(x); | ||
half_aux(&network) | ||
} else { | ||
f64::NEG_INFINITY | ||
} | ||
} | ||
|
||
pub fn half_aux<const N: usize>(network: &Network<N,2>) -> f64 { | ||
let mut sum = 0; | ||
for i in 0..UNIT_CIRCLE_STEPS { | ||
let angle = 2. * PI * i as f64 / UNIT_CIRCLE_STEPS as f64; | ||
let output = network.evaluate(&[1., angle]); | ||
if output && angle < PI || !output && angle > PI { | ||
sum += 1; | ||
} | ||
} | ||
sum as f64 / UNIT_CIRCLE_STEPS as f64 | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,3 @@ | ||
pub mod one_plus_one_na; | ||
pub mod cma_es; | ||
pub mod network; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,112 +1,2 @@ | ||
use neuroevolution::one_plus_one_na::*; | ||
use std::f64::consts::PI; | ||
|
||
const UNIT_CIRCLE_STEPS: u32 = 1000; | ||
|
||
fn half<const N: usize>(network: &Network<N, 2>) -> f64 { | ||
let mut sum = 0; | ||
for i in 0..UNIT_CIRCLE_STEPS { | ||
let angle = 2. * PI * i as f64 / UNIT_CIRCLE_STEPS as f64; | ||
let output = network.evaluate(&[1., angle]); | ||
if output && angle < PI || !output && angle > PI { | ||
sum += 1; | ||
} | ||
} | ||
sum as f64 / UNIT_CIRCLE_STEPS as f64 | ||
} | ||
|
||
fn quarter<const N: usize>(network: &Network<N, 2>) -> f64 { | ||
let mut sum = 0; | ||
for i in 0..UNIT_CIRCLE_STEPS { | ||
let angle = 2. * PI * i as f64 / UNIT_CIRCLE_STEPS as f64; | ||
let output = network.evaluate(&[1., angle]); | ||
if output && angle < PI / 2. || !output && angle > PI / 2. { | ||
sum += 1; | ||
} | ||
} | ||
sum as f64 / UNIT_CIRCLE_STEPS as f64 | ||
} | ||
|
||
fn two_quarters<const N: usize>(network: &Network<N, 2>) -> f64 { | ||
let mut sum = 0; | ||
for i in 0..UNIT_CIRCLE_STEPS { | ||
let angle = 2. * PI * i as f64 / UNIT_CIRCLE_STEPS as f64; | ||
let output = network.evaluate(&[1., angle]); | ||
if output && (angle < PI / 2. || angle > PI && angle < 3. * PI / 2.) | ||
|| !output && (angle > PI / 2. && angle < PI || angle > 3. * PI / 2.) { | ||
sum += 1; | ||
} | ||
} | ||
sum as f64 / UNIT_CIRCLE_STEPS as f64 | ||
} | ||
|
||
fn square<const N: usize>(network: &Network<N, 2>) -> f64 { | ||
let points_with_labels = [ | ||
(1., PI / 4., true), | ||
(1., 3. * PI / 4., false), | ||
(1., 5. * PI / 4., true), | ||
(1., 7. * PI / 4., false), | ||
]; | ||
|
||
points_with_labels | ||
.iter() | ||
.map(|&(r, theta, label)| { | ||
let output = network.evaluate(&[r, theta]); | ||
if output && label || !output && !label { | ||
1. | ||
} else { | ||
0. | ||
} | ||
}) | ||
.sum::<f64>() / 4. | ||
} | ||
|
||
fn cube<const N: usize>(network: &Network<N, 3>) -> f64 { | ||
let points_with_labels = [ | ||
(1., PI / 4., PI / 4., true), | ||
(1., 3. * PI / 4., PI / 4., false), | ||
(1., 5. * PI / 4., PI / 4., true), | ||
(1., 7. * PI / 4., PI / 4., false), | ||
(1., PI / 4., 3. * PI / 4., true), | ||
(1., 3. * PI / 4., 3. * PI / 4., false), | ||
(1., 5. * PI / 4., 3. * PI / 4., true), | ||
(1., 7. * PI / 4., 3. * PI / 4., false), | ||
]; | ||
|
||
points_with_labels | ||
.iter() | ||
.map(|&(r, theta, phi, label)| { | ||
let output = network.evaluate(&[r, theta, phi]); | ||
if output && label || !output && !label { | ||
1. | ||
} else { | ||
0. | ||
} | ||
}) | ||
.sum::<f64>() / 8. | ||
} | ||
|
||
fn main() { | ||
let mut network = Network::<2, 2>::new(); | ||
|
||
network.optimize(half, 3000); | ||
println!("half Fitness: {}", half(&network)); | ||
print!("{}", network); | ||
|
||
network.optimize(quarter, 3000); | ||
println!("quarter Fitness: {}", quarter(&network)); | ||
print!("{}", network); | ||
|
||
network.optimize(two_quarters, 3000); | ||
println!("two_quarters Fitness: {}", two_quarters(&network)); | ||
print!("{}", network); | ||
|
||
network.optimize(square, 3000); | ||
println!("square Fitness: {}", square(&network)); | ||
print!("{}", network); | ||
|
||
let mut network = Network::<4, 3>::new(); | ||
network.optimize(cube, 3000); | ||
println!("cube Fitness: {}", cube(&network)); | ||
print!("{}", network); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,111 @@ | ||
use std::fmt; | ||
use std::f64::consts::PI; | ||
use rand::prelude::*; | ||
use rand_distr::{Exp, Distribution}; | ||
use cmaes::DVector; | ||
|
||
#[derive(Debug, Clone)] | ||
pub struct Network<const N: usize, const D: usize> { | ||
parameters: [[f64;D];N], | ||
output_layer: fn(&[bool; N]) -> bool, | ||
} | ||
|
||
fn generate_random_array<const N: usize, const D: usize>() -> [[f64;D];N] { | ||
let mut array = [[0.;D];N]; | ||
for i in 0..N { | ||
for j in 0..D { | ||
array[i][j] = random::<f64>(); | ||
} | ||
} | ||
array | ||
} | ||
|
||
fn polar_dot_product<const D: usize>(v1: &[f64; D], v2: &[f64; D]) -> f64 { | ||
let (r1, angles1) = v1.split_first().unwrap(); | ||
let (r2, angles2) = v2.split_first().unwrap(); | ||
|
||
let norm_product = r1 * r2; | ||
let angle_difference_cosine: f64 = angles1 | ||
.iter() | ||
.zip(angles2.iter()) | ||
.map(|(&theta1, &theta2)| (theta1 - theta2).cos()) | ||
.product(); | ||
|
||
norm_product * angle_difference_cosine | ||
} | ||
|
||
impl<const N: usize, const D: usize> std::fmt::Display for Network<N, D> { | ||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
for (index, param) in self.parameters.iter().enumerate() { | ||
write!(f, "Neuron {}: [", index)?; | ||
for (i, &value) in param.iter().enumerate() { | ||
if i == 0 { | ||
write!(f, "{:.2}", 2. * value - 1.)?; | ||
} else { | ||
write!(f, "{:.2}", value * 2. * PI)?; | ||
} | ||
if i != param.len() - 1 { | ||
write!(f, ", ")?; | ||
} | ||
} | ||
write!(f, "]\n")?; | ||
} | ||
Ok(()) | ||
} | ||
} | ||
|
||
impl<const N: usize, const D: usize> Network<N, D> { | ||
pub fn new() -> Self { | ||
Self { | ||
parameters: generate_random_array::<N, D>(), | ||
output_layer: |inputs: &[bool; N]| inputs.iter().any(|&x| x), // OR | ||
} | ||
} | ||
|
||
pub fn optimize(&mut self, evaluation_function: fn(&Network<N, D>) -> f64, n_iters: u32) { | ||
let exp = Exp::new(1.).unwrap(); | ||
|
||
for _ in 0..n_iters { | ||
let mut new_network = self.clone(); | ||
for i in 0..N { | ||
for j in 0..D { | ||
if random::<f64>() < 1. / (2. * N as f64) { | ||
let sign = if random::<f64>() < 0.5 { 1. } else { -1. }; | ||
new_network.parameters[i][j] += sign * exp.sample(&mut thread_rng()); | ||
new_network.parameters[i][j] -= new_network.parameters[i][j].floor(); | ||
} | ||
} | ||
} | ||
|
||
if evaluation_function(&new_network) > evaluation_function(self) { | ||
*self = new_network; | ||
} | ||
} | ||
} | ||
|
||
pub fn evaluate(&self, input: &[f64; D]) -> bool { | ||
let mut hidden = [false; N]; | ||
for i in 0..N { | ||
let mut normal = [0.;D]; | ||
normal[0] = 1.; | ||
for j in 1..D { | ||
normal[j] = self.parameters[i][j] * 2. * PI; | ||
} | ||
hidden[i] = polar_dot_product(input, &normal) - (2. * self.parameters[i][0] - 1.) > 0.; | ||
} | ||
(self.output_layer)(&hidden) | ||
} | ||
} | ||
|
||
pub fn get_network<const N: usize, const D: usize>(x: &DVector<f64>) -> Network<N, D> { | ||
let mut parameters = [[0.;D];N]; | ||
for i in 0..N { | ||
for j in 0..D { | ||
parameters[i][j] = x[i * D + j]; | ||
} | ||
} | ||
Network { | ||
parameters, | ||
output_layer: |hidden: &[bool; N]| hidden.iter().any(|&x| x), | ||
} | ||
} |
Oops, something went wrong.