Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add criterion based benchmarks #356

Open
wants to merge 16 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,6 @@ members = [
exclude = [
"media/book/tests",
]

[profile.bench]
debug = true
126 changes: 126 additions & 0 deletions argmin/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ ndarray = { version = "0.15", features = ["serde-1"] }
ndarray-linalg = { version = "0.16", features = ["netlib"] }
argmin-math = { path = "../argmin-math" }
serde = { version = "1.0", features = ["derive", "rc"] }
criterion = { version = "0.4", features = ["html_reports"] }

[features]
default = ["slog-logger", "serde1"]
Expand Down Expand Up @@ -184,3 +185,128 @@ required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"]
[[example]]
name = "writers"
required-features = ["argmin-math/ndarray_latest-serde", "slog-logger", "serde1"]

[[bench]]
name = "backtracking"
harness = false
required-features = []

[[bench]]
name = "bfgs"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "brentroot"
harness = false
required-features = []

[[bench]]
name = "brentopt"
harness = false
required-features = []

[[bench]]
name = "conjugategradient"
harness = false
required-features = []

[[bench]]
name = "dfp"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "gaussnewton"
harness = false
required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde"]

[[bench]]
name = "gaussnewton_linesearch"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "goldensectionsearch"
harness = false
required-features = []

[[bench]]
name = "hagerzhang"
harness = false
required-features = []

[[bench]]
name = "landweber"
harness = false
required-features = []

[[bench]]
name = "lbfgs"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "lbfgs2d"
harness = false
required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde"]

[[bench]]
name = "morethuente"
harness = false
required-features = []

[[bench]]
name = "neldermead"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "newton"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "newton_cg"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "nonlinear_cg"
harness = false
required-features = []

[[bench]]
name = "owl_qn"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "particleswarm"
harness = false
required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde"]

[[bench]]
name = "simulatedannealing"
harness = false
required-features = []

[[bench]]
name = "sr1"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "sr1_trustregion"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]

[[bench]]
name = "steepestdescent"
harness = false
required-features = []

[[bench]]
name = "trustregion_nd"
harness = false
required-features = ["argmin-math/ndarray_latest-serde"]
72 changes: 72 additions & 0 deletions argmin/benches/backtracking.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
// Copyright 2018-2022 argmin developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.

use criterion::{criterion_group, criterion_main, Criterion};

use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch};
use argmin::solver::linesearch::{condition::ArmijoCondition, BacktrackingLineSearch};
use argmin_testfunctions::{sphere, sphere_derivative};

struct Sphere {}

impl CostFunction for Sphere {
type Param = Vec<f64>;
type Output = f64;

fn cost(&self, param: &Self::Param) -> Result<Self::Output, Error> {
Ok(sphere(param))
}
}

impl Gradient for Sphere {
type Param = Vec<f64>;
type Gradient = Vec<f64>;

fn gradient(&self, param: &Self::Param) -> Result<Self::Gradient, Error> {
Ok(sphere_derivative(param))
}
}

fn run() -> Result<(), Error> {
// define initial parameter vector
let init_param: Vec<f64> = vec![0.7, 0.0];
// Define problem
let operator = Sphere {};
// Set condition
let cond = ArmijoCondition::new(0.5)?;
// Set up Line Search method
let mut solver = BacktrackingLineSearch::new(cond).rho(0.9)?;
// Set search direction
solver.search_direction(vec![-1.0, 0.0]);
// Set initial position
solver.initial_step_length(1.0)?;

let init_cost = operator.cost(&init_param)?;
let init_grad = operator.gradient(&init_param)?;

// Run solver
let _res = Executor::new(operator, solver)
// .add_observer(SlogLogger::term(), ObserverMode::Always)
.configure(|state| {
state
.param(init_param)
.gradient(init_grad)
.cost(init_cost)
.max_iters(10)
})
.run()?;
Ok(())
}

fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("Backtracking", |b| {
b.iter(|| run().expect("Benchmark should run without errors"))
});
}

criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
176 changes: 176 additions & 0 deletions argmin/benches/bfgs.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
// Copyright 2018-2022 argmin developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.

use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};

use argmin::core::{CostFunction, Error, Executor, Gradient};
use argmin::solver::linesearch::MoreThuenteLineSearch;
use argmin::solver::quasinewton::BFGS;
use argmin_testfunctions::rosenbrock;
use finitediff::FiniteDiff;
use nalgebra::uninit::InitStatus;
use ndarray::{array, Array1, Array2, FixedInitializer};

struct RosenbrockVec {
a: f64,
b: f64,
}

struct RosenbrockNdarray {
a: f64,
b: f64,
}

impl CostFunction for RosenbrockVec {
type Param = Vec<f64>;
type Output = f64;

fn cost(&self, p: &Self::Param) -> Result<Self::Output, Error> {
Ok(rosenbrock(p, self.a, self.b))
}
}

impl Gradient for RosenbrockVec {
type Param = Vec<f64>;
type Gradient = Vec<f64>;

fn gradient(&self, p: &Self::Param) -> Result<Self::Gradient, Error> {
Ok((*p).forward_diff(&|x| rosenbrock(&x, self.a, self.b)))
}
}

impl CostFunction for RosenbrockNdarray {
type Param = Array1<f64>;
type Output = f64;

fn cost(&self, p: &Self::Param) -> Result<Self::Output, Error> {
Ok(rosenbrock(&p.to_vec(), self.a, self.b))
}
}

impl Gradient for RosenbrockNdarray {
type Param = Array1<f64>;
type Gradient = Array1<f64>;

fn gradient(&self, p: &Self::Param) -> Result<Self::Gradient, Error> {
Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b)))
}
}

fn run_vec(
a: f64,
b: f64,
init_param: &[f64],
c1: f64,
c2: f64,
iterations: u64,
) -> Result<(), Error> {
// Define cost function
let cost = RosenbrockVec { a, b };
// Define initial parameter vector
let init_param: Vec<f64> = Vec::from(init_param);
let mut init_hessian = Vec::<Vec<f64>>::new();
for i in 0..init_hessian.len() {
let mut row = Vec::new();
for j in 0..init_hessian.len() {
if i == j {
row.push(1.0);
} else {
row.push(0.0);
}
}
init_hessian.push(row);
}
// set up a line search
let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?;
// Set up solver
let solver = BFGS::new(linesearch);

// Run solver
let res = Executor::new(cost, solver)
.configure(|state| {
state
.param(init_param)
.inv_hessian(init_hessian)
.max_iters(iterations)
})
.run()?;
Ok(())
}
fn run_ndarray(
a: f64,
b: f64,
init_param: &[f64],
c1: f64,
c2: f64,
iterations: u64,
) -> Result<(), Error> {
// Define cost function
let cost = RosenbrockNdarray { a, b };
// Define initial parameter vector
let init_param: Array1<f64> = Array1::from_vec(Vec::from(init_param));
let init_hessian: Array2<f64> = Array2::eye(init_param.len());
// set up a line search
let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?;
// Set up solver
let solver = BFGS::new(linesearch);

// Run solver
let res = Executor::new(cost, solver)
.configure(|state| {
state
.param(init_param)
.inv_hessian(init_hessian)
.max_iters(iterations)
})
.run()?;
Ok(())
}

fn criterion_benchmark(c: &mut Criterion) {
let a = 1.0;
let b = 100.0;
let init_param = vec![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0];
let c1 = 1e-4;
let c2 = 0.9;
let iterations: u64 = 60;
let mut group = c.benchmark_group("BFGS");
for i in 2..init_param.len() {
// WARN: Vec version immediately fails with
// Condition violated: `MoreThuenteLineSearch`: Search direction must be a descent direction.
//
// group.bench_with_input(BenchmarkId::new("Vec", i), &i, |bencher, i| {
// bencher.iter(|| {
// run_vec(
// black_box(a),
// black_box(b),
// black_box(&init_param[0..*i]),
// black_box(c1),
// black_box(c2),
// black_box(iterations),
// ).expect("Benchmark should run without errors")
// })
// });
group.bench_with_input(BenchmarkId::new("ndarray", i), &i, |bencher, i| {
bencher.iter(|| {
run_ndarray(
black_box(a),
black_box(b),
black_box(&init_param[0..*i]),
black_box(c1),
black_box(c2),
black_box(iterations),
)
.expect("Benchmark should run without errors")
})
});
}
group.finish();
}

criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
Loading