Optimize has_duplicates() for short slices

This commit is contained in:
Greg Fitzgerald 2019-01-07 09:38:19 -07:00
parent 24963e547c
commit 6000df9779
2 changed files with 24 additions and 5 deletions

14
benches/runtime.rs Normal file
View File

@ -0,0 +1,14 @@
#![feature(test)]
extern crate test;
use solana::runtime::*;
use test::Bencher;
#[bench]
fn bench_has_duplicates(bencher: &mut Bencher) {
bencher.iter(|| {
let data = test::black_box([1, 2, 3]);
assert!(!has_duplicates(&data));
})
}

View File

@ -5,8 +5,6 @@ use solana_sdk::pubkey::Pubkey;
use solana_sdk::system_program;
use solana_sdk::transaction::Transaction;
use solana_system_program;
use std::collections::HashSet;
use std::iter::FromIterator;
/// Reasons the runtime might have rejected a transaction.
#[derive(Debug, PartialEq, Eq, Clone)]
@ -118,9 +116,16 @@ fn execute_instruction(
}
/// Return true if the slice has any duplicate elements
fn has_duplicates<T: Eq + std::hash::Hash>(xs: &[T]) -> bool {
let xs_set: HashSet<&T> = HashSet::from_iter(xs.iter());
xs.len() != xs_set.len()
pub fn has_duplicates<T: PartialEq>(xs: &[T]) -> bool {
// Note: This is an O(n^2) algorithm, but requires no heap allocations. The benchmark
// `bench_has_duplicates` in benches/runtime.rs shows that this implementation is
// ~50 times faster than using HashSet for very short slices.
for i in 1..xs.len() {
if xs[i..].contains(&xs[i - 1]) {
return true;
}
}
false
}
/// Get mut references to a subset of elements.