Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions crates/lgp-cli/src/experiment_runner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ fn run_iris(
n_generations: config.hyperparameters.n_generations,
n_trials: config.hyperparameters.n_trials,
seed: Some(seed),
n_threads: config.hyperparameters.n_threads,
program_parameters: build_program_params(config),
};

Expand All @@ -173,6 +174,7 @@ fn run_cart_pole_lgp(
n_generations: config.hyperparameters.n_generations,
n_trials: config.hyperparameters.n_trials,
seed: Some(seed),
n_threads: config.hyperparameters.n_threads,
program_parameters: build_program_params(config),
};

Expand Down Expand Up @@ -208,6 +210,7 @@ fn run_cart_pole_q(
n_generations: config.hyperparameters.n_generations,
n_trials: config.hyperparameters.n_trials,
seed: Some(seed),
n_threads: config.hyperparameters.n_threads,
program_parameters: q_program_params,
};

Expand All @@ -229,6 +232,7 @@ fn run_mountain_car_lgp(
n_generations: config.hyperparameters.n_generations,
n_trials: config.hyperparameters.n_trials,
seed: Some(seed),
n_threads: config.hyperparameters.n_threads,
program_parameters: build_program_params(config),
};

Expand Down Expand Up @@ -264,6 +268,7 @@ fn run_mountain_car_q(
n_generations: config.hyperparameters.n_generations,
n_trials: config.hyperparameters.n_trials,
seed: Some(seed),
n_threads: config.hyperparameters.n_threads,
program_parameters: q_program_params,
};

Expand Down
4 changes: 4 additions & 0 deletions crates/lgp/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,7 @@ criterion = "0.4.0"
name = "performance_after_training"
harness = false
required-features = ["gym"]

[[bench]]
name = "parallel_fitness"
harness = false
124 changes: 124 additions & 0 deletions crates/lgp/benches/parallel_fitness.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
use std::iter::repeat_with;

use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use itertools::Itertools;
use lgp::{
core::{
engines::{
core_engine::Core, fitness_engine::Fitness, generate_engine::Generate,
reset_engine::Reset, status_engine::Status,
},
instruction::InstructionGeneratorParameters,
program::ProgramGeneratorParameters,
},
problems::iris::{IrisEngine, IrisState},
};
use rayon::prelude::*;

/// Sequential fitness evaluation (pre-parallelization baseline).
fn eval_fitness_sequential<C: Core>(
population: &mut Vec<C::Individual>,
trials: &[C::State],
default_fitness: f64,
) {
let n_trials = trials.len();
for individual in population.iter_mut() {
let total: f64 = trials
.iter()
.cloned()
.map(|mut trial| {
C::Reset::reset(individual);
C::Reset::reset(&mut trial);
let score = C::Fitness::eval_fitness(individual, &mut trial);
if score.is_finite() {
score
} else {
default_fitness
}
})
.sum();
C::Status::set_fitness(individual, total / n_trials as f64);
}
}

/// Parallel fitness evaluation (current implementation).
fn eval_fitness_parallel<C: Core>(
population: &mut Vec<C::Individual>,
trials: &[C::State],
default_fitness: f64,
) {
let n_trials = trials.len();
population.par_iter_mut().for_each(|individual| {
let total: f64 = trials
.iter()
.cloned()
.map(|mut trial| {
C::Reset::reset(individual);
C::Reset::reset(&mut trial);
let score = C::Fitness::eval_fitness(individual, &mut trial);
if score.is_finite() {
score
} else {
default_fitness
}
})
.sum();
C::Status::set_fitness(individual, total / n_trials as f64);
});
}

fn parallel_vs_sequential(c: &mut Criterion) {
let mut group = c.benchmark_group("fitness_evaluation");

let n_trials = 5;
let default_fitness = 0.0;

let trials: Vec<IrisState> = repeat_with(|| <IrisEngine as Core>::Generate::generate(()))
.take(n_trials)
.collect_vec();

for pop_size in [50, 100, 200, 500] {
let population: Vec<_> = repeat_with(|| {
<IrisEngine as Core>::Generate::generate(ProgramGeneratorParameters {
max_instructions: 100,
instruction_generator_parameters: InstructionGeneratorParameters {
n_extras: 1,
external_factor: 10.0,
n_actions: 3,
n_inputs: 4,
},
})
})
.take(pop_size)
.collect();

group.bench_with_input(
BenchmarkId::new("sequential", pop_size),
&pop_size,
|b, _| {
b.iter_batched(
|| population.clone(),
|mut pop| {
eval_fitness_sequential::<IrisEngine>(&mut pop, &trials, default_fitness);
},
criterion::BatchSize::SmallInput,
);
},
);

group.bench_with_input(BenchmarkId::new("parallel", pop_size), &pop_size, |b, _| {
b.iter_batched(
|| population.clone(),
|mut pop| {
eval_fitness_parallel::<IrisEngine>(&mut pop, &trials, default_fitness);
},
criterion::BatchSize::SmallInput,
);
});
}

group.finish();
}

criterion_group!(benches, parallel_vs_sequential);
criterion_main!(benches);
9 changes: 9 additions & 0 deletions crates/lgp/src/core/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,10 @@ pub struct ExperimentParams {
#[arg(long)]
pub seed: Option<u64>,

/// Number of threads for parallel evaluation (defaults to all available cores)
#[arg(long)]
pub n_threads: Option<usize>,

/// Fitness assigned to invalid programs (overridden per environment if not set)
#[arg(long)]
pub default_fitness: Option<f64>,
Expand Down Expand Up @@ -236,6 +240,7 @@ impl ExperimentParams {
n_generations: self.n_generations,
n_trials: self.n_trials,
seed: self.seed,
n_threads: self.n_threads,
program_parameters: self.build_program_params(),
};
run_experiment!(hyperparameters);
Expand All @@ -252,6 +257,7 @@ impl ExperimentParams {
n_generations: self.n_generations,
n_trials: self.n_trials,
seed: self.seed,
n_threads: self.n_threads,
program_parameters: self.build_q_program_params(),
};
ResetEngine::reset(&mut hyperparameters.program_parameters.consts);
Expand All @@ -269,6 +275,7 @@ impl ExperimentParams {
n_generations: self.n_generations,
n_trials: self.n_trials,
seed: self.seed,
n_threads: self.n_threads,
program_parameters: self.build_program_params(),
};
run_experiment!(hyperparameters);
Expand All @@ -285,6 +292,7 @@ impl ExperimentParams {
n_generations: self.n_generations,
n_trials: self.n_trials,
seed: self.seed,
n_threads: self.n_threads,
program_parameters: self.build_q_program_params(),
};
ResetEngine::reset(&mut hyperparameters.program_parameters.consts);
Expand All @@ -300,6 +308,7 @@ impl ExperimentParams {
n_generations: self.n_generations,
n_trials: self.n_trials,
seed: self.seed,
n_threads: self.n_threads,
program_parameters: self.build_program_params(),
};
run_experiment!(hyperparameters);
Expand Down
57 changes: 33 additions & 24 deletions crates/lgp/src/core/engines/core_engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use clap::{Args, Parser};
use derivative::Derivative;
use itertools::Itertools;
use rand::{seq::IteratorRandom, Rng};
use rayon::prelude::*;

use crate::{
core::{
Expand Down Expand Up @@ -53,6 +54,9 @@ where
#[builder(default = "None")]
#[arg(long)]
pub seed: Option<u64>,
#[builder(default = "None")]
#[arg(long)]
pub n_threads: Option<usize>,
#[command(flatten)]
pub program_parameters: C::ProgramParameters,
}
Expand All @@ -78,7 +82,8 @@ where
gap = hp.gap,
mutation_percent = hp.mutation_percent,
crossover_percent = hp.crossover_percent,
seed = ?hp.seed
seed = ?hp.seed,
n_threads = ?hp.n_threads
))]
pub fn new(hp: HyperParameters<C>) -> Self {
debug!("Initializing evolution engine");
Expand Down Expand Up @@ -116,11 +121,7 @@ where

let mut population = self.next_population.clone();

C::eval_fitness(
&mut population,
&mut self.trials,
self.params.default_fitness,
);
C::eval_fitness(&mut population, &self.trials, self.params.default_fitness);
C::rank(&mut population);

assert!(population.iter().all(C::Status::evaluated));
Expand Down Expand Up @@ -182,14 +183,22 @@ where
{
pub fn build_engine(&self) -> CoreIter<T> {
update_seed(self.seed);

if let Some(n_threads) = self.n_threads {
rayon::ThreadPoolBuilder::new()
.num_threads(n_threads)
.build_global()
.ok();
}

CoreIter::new(self.clone())
}
}

pub trait Core {
type Individual: Ord + Clone + Send + Sync + Serialize + DeserializeOwned;
type ProgramParameters: Copy + Send + Sync + Clone + Serialize + DeserializeOwned + Args;
type State: State;
type State: State + Clone + Send + Sync;
type FitnessMarker;
type Generate: Generate<Self::ProgramParameters, Self::Individual> + Generate<(), Self::State>;
type Fitness: Fitness<Self::Individual, Self::State, Self::FitnessMarker>;
Expand All @@ -210,27 +219,27 @@ pub trait Core {

fn eval_fitness(
population: &mut Vec<Self::Individual>,
trials: &mut Vec<Self::State>,
trials: &[Self::State],
default_fitness: f64,
) {
for individual in population.iter_mut() {
let mut scores = trials
.iter_mut()
.map(|trial| {
let n_trials = trials.len();
population.par_iter_mut().for_each(|individual| {
let total: f64 = trials
.iter()
.cloned()
.map(|mut trial| {
Self::Reset::reset(individual);
Self::Reset::reset(trial);
Self::Fitness::eval_fitness(individual, trial)
Self::Reset::reset(&mut trial);
let score = Self::Fitness::eval_fitness(individual, &mut trial);
if score.is_finite() {
score
} else {
default_fitness
}
})
.collect_vec();

let n_trials = scores.len();
scores = scores
.into_iter()
.map(|s| if !s.is_finite() { default_fitness } else { s })
.collect_vec();
let average = scores.into_iter().sum::<f64>() / n_trials as f64;
Self::Status::set_fitness(individual, average);
}
.sum();
Self::Status::set_fitness(individual, total / n_trials as f64);
});
}

fn rank(population: &mut Vec<Self::Individual>) {
Expand Down
3 changes: 3 additions & 0 deletions crates/lgp/src/core/experiment_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,9 @@ pub struct HyperParams {
/// Serialized as a string to support values > i64::MAX in TOML format.
#[serde(default, with = "optional_u64_as_string")]
pub seed: Option<u64>,
/// Number of threads for parallel evaluation. If None, uses all available cores.
#[serde(default)]
pub n_threads: Option<usize>,
pub program: ProgramConfig,
}

Expand Down
Loading
Loading