From c5341cb5fa3aab3e83cee19cd853abfbc1efd10f Mon Sep 17 00:00:00 2001 From: Gwilym Inzani Date: Wed, 10 May 2023 16:01:16 +0100 Subject: [PATCH] Add benchmarks --- agb-hashmap/Cargo.toml | 1 + agb-hashmap/benches/bench.rs | 318 +++++++++++++++++++++++++++++++++++ 2 files changed, 319 insertions(+) create mode 100644 agb-hashmap/benches/bench.rs diff --git a/agb-hashmap/Cargo.toml b/agb-hashmap/Cargo.toml index bf7fc6de..c464959a 100644 --- a/agb-hashmap/Cargo.toml +++ b/agb-hashmap/Cargo.toml @@ -11,6 +11,7 @@ rustc-hash = { version = "1", default-features = false } [dev-dependencies] rand = { version = "0.8", default-features = false, features = ["small_rng"] } +lazy_static = "1.4" [profile.dev] opt-level = 3 diff --git a/agb-hashmap/benches/bench.rs b/agb-hashmap/benches/bench.rs new file mode 100644 index 00000000..c93a8481 --- /dev/null +++ b/agb-hashmap/benches/bench.rs @@ -0,0 +1,318 @@ +// These benchmarks were taken from hashbrown. They are impossible to run +// on the target GBA hardware, but hopefully running these on something like a +// raspberry pi zero will give something comparable. + +// This benchmark suite contains some benchmarks along a set of dimensions: +// Int key distribution: low bit heavy, top bit heavy, and random. +// Task: basic functionality: insert, insert_erase, lookup, lookup_fail, iter +#![feature(test)] + +extern crate test; + +use test::{black_box, Bencher}; + +use agb_hashmap::HashMap; +use std::sync::atomic::{self, AtomicUsize}; + +const SIZE: usize = 1000; + +type StdHashMap = std::collections::hash_map::HashMap; + +// A random key iterator. +#[derive(Clone, Copy)] +struct RandomKeys { + state: usize, +} + +impl RandomKeys { + fn new() -> Self { + RandomKeys { state: 0 } + } +} + +impl Iterator for RandomKeys { + type Item = usize; + fn next(&mut self) -> Option { + // Add 1 then multiply by some 32 bit prime. + self.state = self.state.wrapping_add(1).wrapping_mul(3_787_392_781); + Some(self.state) + } +} + +// Just an arbitrary side effect to make the maps not shortcircuit to the non-dropping path +// when dropping maps/entries (most real world usages likely have drop in the key or value) +lazy_static::lazy_static! { + static ref SIDE_EFFECT: AtomicUsize = AtomicUsize::new(0); +} + +#[derive(Clone)] +struct DropType(usize); +impl Drop for DropType { + fn drop(&mut self) { + SIDE_EFFECT.fetch_add(self.0, atomic::Ordering::SeqCst); + } +} + +macro_rules! bench_suite { + ($bench_macro:ident, $bench_agb_hashmap_serial:ident, $bench_std_serial:ident, + $bench_agb_hashmap_highbits:ident, $bench_std_highbits:ident, + $bench_agb_hashmap_random:ident, $bench_std_random:ident) => { + $bench_macro!($bench_agb_hashmap_serial, HashMap, 0..); + $bench_macro!($bench_std_serial, StdHashMap, 0..); + $bench_macro!( + $bench_agb_hashmap_highbits, + HashMap, + (0..).map(usize::swap_bytes) + ); + $bench_macro!( + $bench_std_highbits, + StdHashMap, + (0..).map(usize::swap_bytes) + ); + $bench_macro!($bench_agb_hashmap_random, HashMap, RandomKeys::new()); + $bench_macro!($bench_std_random, StdHashMap, RandomKeys::new()); + }; +} + +macro_rules! bench_insert { + ($name:ident, $maptype:ident, $keydist:expr) => { + #[bench] + fn $name(b: &mut Bencher) { + let mut m = $maptype::with_capacity(SIZE); + b.iter(|| { + m.clear(); + for i in ($keydist).take(SIZE) { + m.insert(i, (DropType(i), [i; 20])); + } + black_box(&mut m); + }); + eprintln!("{}", SIDE_EFFECT.load(atomic::Ordering::SeqCst)); + } + }; +} + +bench_suite!( + bench_insert, + insert_agb_hashmap_serial, + insert_std_serial, + insert_agb_hashmap_highbits, + insert_std_highbits, + insert_agb_hashmap_random, + insert_std_random +); + +macro_rules! bench_grow_insert { + ($name:ident, $maptype:ident, $keydist:expr) => { + #[bench] + fn $name(b: &mut Bencher) { + b.iter(|| { + let mut m = $maptype::default(); + for i in ($keydist).take(SIZE) { + m.insert(i, DropType(i)); + } + black_box(&mut m); + }) + } + }; +} + +bench_suite!( + bench_grow_insert, + grow_insert_agb_hashmap_serial, + grow_insert_std_serial, + grow_insert_agb_hashmap_highbits, + grow_insert_std_highbits, + grow_insert_agb_hashmap_random, + grow_insert_std_random +); + +macro_rules! bench_insert_erase { + ($name:ident, $maptype:ident, $keydist:expr) => { + #[bench] + fn $name(b: &mut Bencher) { + let mut base = $maptype::default(); + for i in ($keydist).take(SIZE) { + base.insert(i, DropType(i)); + } + let skip = $keydist.skip(SIZE); + b.iter(|| { + let mut m = base.clone(); + let mut add_iter = skip.clone(); + let mut remove_iter = $keydist; + // While keeping the size constant, + // replace the first keydist with the second. + for (add, remove) in (&mut add_iter).zip(&mut remove_iter).take(SIZE) { + m.insert(add, DropType(add)); + black_box(m.remove(&remove)); + } + black_box(m); + }); + eprintln!("{}", SIDE_EFFECT.load(atomic::Ordering::SeqCst)); + } + }; +} + +bench_suite!( + bench_insert_erase, + insert_erase_agb_hashmap_serial, + insert_erase_std_serial, + insert_erase_agb_hashmap_highbits, + insert_erase_std_highbits, + insert_erase_agb_hashmap_random, + insert_erase_std_random +); + +macro_rules! bench_lookup { + ($name:ident, $maptype:ident, $keydist:expr) => { + #[bench] + fn $name(b: &mut Bencher) { + let mut m = $maptype::default(); + for i in $keydist.take(SIZE) { + m.insert(i, DropType(i)); + } + + b.iter(|| { + for i in $keydist.take(SIZE) { + black_box(m.get(&i)); + } + }); + eprintln!("{}", SIDE_EFFECT.load(atomic::Ordering::SeqCst)); + } + }; +} + +bench_suite!( + bench_lookup, + lookup_agb_hashmap_serial, + lookup_std_serial, + lookup_agb_hashmap_highbits, + lookup_std_highbits, + lookup_agb_hashmap_random, + lookup_std_random +); + +macro_rules! bench_lookup_fail { + ($name:ident, $maptype:ident, $keydist:expr) => { + #[bench] + fn $name(b: &mut Bencher) { + let mut m = $maptype::default(); + let mut iter = $keydist; + for i in (&mut iter).take(SIZE) { + m.insert(i, DropType(i)); + } + + b.iter(|| { + for i in (&mut iter).take(SIZE) { + black_box(m.get(&i)); + } + }) + } + }; +} + +bench_suite!( + bench_lookup_fail, + lookup_fail_agb_hashmap_serial, + lookup_fail_std_serial, + lookup_fail_agb_hashmap_highbits, + lookup_fail_std_highbits, + lookup_fail_agb_hashmap_random, + lookup_fail_std_random +); + +macro_rules! bench_iter { + ($name:ident, $maptype:ident, $keydist:expr) => { + #[bench] + fn $name(b: &mut Bencher) { + let mut m = $maptype::default(); + for i in ($keydist).take(SIZE) { + m.insert(i, DropType(i)); + } + + b.iter(|| { + for i in &m { + black_box(i); + } + }) + } + }; +} + +bench_suite!( + bench_iter, + iter_agb_hashmap_serial, + iter_std_serial, + iter_agb_hashmap_highbits, + iter_std_highbits, + iter_agb_hashmap_random, + iter_std_random +); + +macro_rules! clone_bench { + ($maptype:ident) => { + use super::DropType; + use test::{black_box, Bencher}; + + #[bench] + fn clone_small(b: &mut Bencher) { + let mut m = $maptype::new(); + for i in 0..10 { + m.insert(i, DropType(i)); + } + + b.iter(|| { + black_box(m.clone()); + }) + } + + #[bench] + fn clone_from_small(b: &mut Bencher) { + let mut m = $maptype::new(); + let mut m2 = $maptype::new(); + for i in 0..10 { + m.insert(i, DropType(i)); + } + + b.iter(|| { + m2.clone_from(&m); + black_box(&mut m2); + }) + } + + #[bench] + fn clone_large(b: &mut Bencher) { + let mut m = $maptype::new(); + for i in 0..1000 { + m.insert(i, DropType(i)); + } + + b.iter(|| { + black_box(m.clone()); + }) + } + + #[bench] + fn clone_from_large(b: &mut Bencher) { + let mut m = $maptype::new(); + let mut m2 = $maptype::new(); + for i in 0..1000 { + m.insert(i, DropType(i)); + } + + b.iter(|| { + m2.clone_from(&m); + black_box(&mut m2); + }) + } + }; +} + +mod agb_hashmap_clone_benches { + use agb_hashmap::HashMap; + clone_bench!(HashMap); +} + +mod std_hashmap_clone_benches { + use std::collections::hash_map::HashMap; + clone_bench!(HashMap); +}