Skip to content

Commit

Permalink
Use criterion for benchmarks
Browse files Browse the repository at this point in the history
It's very nice! Finally a way to add benchmarks to your projects
and see performance as part of quality thanks to regression information.
  • Loading branch information
Byron committed Feb 1, 2020
1 parent 0b036d6 commit 2be30e9
Show file tree
Hide file tree
Showing 3 changed files with 79 additions and 84 deletions.
10 changes: 9 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,12 @@ description = "A zero-copy json-lexer, filters and serializer."
repository = "https://github.com/Byron/json-tools"
keywords = ["json", "tools", "lexer"]
documentation = "https://docs.rs/crate/json-tools"
exclude = ["tests", "benches", "rustfmt.toml"]
include = ["src/**/*.rs", "Cargo.toml"]

[dev-dependencies]
criterion = "0.3"

[[bench]]
name = "usage"
path = "benches/usage.rs"
harness = false
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,10 @@ for token in Lexer::new(r#"{ "face": "😂" }"#.bytes(), BufferType::Span) {
This library may possibly never grow bigger than the two features originally mentioned, as it was created
as a workaround to missing features in [`serde`](https://github.com/serde-rs/serde).

# Manual

Run tests with `cargo test` and benchmarks with `cargo bench` (works on stable).

## License

Licensed under either of
Expand Down
149 changes: 66 additions & 83 deletions benches/usage.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
#![feature(test)]

extern crate json_tools;
extern crate test;
use criterion::*;

use json_tools::{Buffer, BufferType, FilterTypedKeyValuePairs, Lexer, Span, Token, TokenReader, TokenType};
use std::io;
Expand Down Expand Up @@ -167,86 +164,72 @@ impl Iterator for KeyValueProducer {
}
}

#[bench]
fn span_lexer_throughput(b: &mut test::Bencher) {
b.iter(|| {
let it = Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span);
for t in it {
test::black_box(t);
}
});
b.bytes = NULL_RIDDEN.len() as u64;
}

#[bench]
fn span_lexer_span_token_reader_throughput(b: &mut test::Bencher) {
b.iter(|| {
let mut r = TokenReader::new(Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span), Some(NULL_RIDDEN));
io::copy(&mut r, &mut io::sink()).ok();
});
b.bytes = NULL_RIDDEN.len() as u64;
}

#[bench]
fn span_lexer_bytes_token_reader_throughput(b: &mut test::Bencher) {
b.iter(|| {
let mut r = TokenReader::new(Lexer::new(NULL_RIDDEN.bytes(), BufferType::Bytes(128)), None);
io::copy(&mut r, &mut io::sink()).ok();
});
b.bytes = NULL_RIDDEN.len() as u64;
}

#[bench]
fn bytes_token_producer_bytes_token_reader_throughput(b: &mut test::Bencher) {
let mut ncb = 0u64;
b.iter(|| {
let mut r = TokenReader::new(KeyValueProducer::new(BufferType::Bytes(0)).take(30000), None);
ncb = io::copy(&mut r, &mut io::sink()).unwrap();
});
b.bytes = ncb;
}

#[bench]
fn span_token_producer_bytes_token_reader_throughput(b: &mut test::Bencher) {
let mut ncb = 0u64;
b.iter(|| {
let mut r = TokenReader::new(KeyValueProducer::new(BufferType::Span).take(30000), Some(KEY_VALUE_SRC));
ncb = io::copy(&mut r, &mut io::sink()).unwrap();
});
b.bytes = ncb;
}

#[bench]
fn bytes_lexer_throughput(b: &mut test::Bencher) {
b.iter(|| {
let it = Lexer::new(NULL_RIDDEN.bytes(), BufferType::Bytes(128));
for t in it {
test::black_box(t);
}
});
b.bytes = NULL_RIDDEN.len() as u64;
}

#[bench]
fn span_filter_null_throughput(b: &mut test::Bencher) {
b.iter(|| {
let f = FilterTypedKeyValuePairs::new(Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span), TokenType::Null);
for t in f {
test::black_box(t);
}
});
b.bytes = NULL_RIDDEN.len() as u64;
}

#[bench]
fn span_lexer_throughput_with_cursor(b: &mut test::Bencher) {
fn span_lexer_throughput_with_cursor(c: &mut Criterion) {
use std::io::{Cursor, Read};

b.iter(|| {
let it = Lexer::new(Cursor::new(NULL_RIDDEN.as_bytes()).bytes().filter_map(|r| r.ok()), BufferType::Span);
for t in it {
test::black_box(t);
}
});
b.bytes = NULL_RIDDEN.len() as u64;
let num_elements = 30000usize;
c.benchmark_group("TokenReader")
.throughput(Throughput::Elements(num_elements as u64))
.bench_function("bytes token producer bytes token reader", |b| {
b.iter(|| {
let mut r = TokenReader::new(KeyValueProducer::new(BufferType::Bytes(0)).take(30000), None);
io::copy(&mut r, &mut io::sink()).unwrap();
});
})
.bench_function("span token producer bytes token reader", |b| {
b.iter(|| {
let mut r = TokenReader::new(KeyValueProducer::new(BufferType::Span).take(num_elements), Some(KEY_VALUE_SRC));
io::copy(&mut r, &mut io::sink()).unwrap();
});
});
c.benchmark_group("Lexer")
.throughput(Throughput::Bytes(NULL_RIDDEN.len() as u64))
.bench_function("bytes lexer", |b| {
b.iter(|| {
let it = Lexer::new(NULL_RIDDEN.bytes(), BufferType::Bytes(128));
for t in it {
black_box(t);
}
});
})
.bench_function("span filter null", |b| {
b.iter(|| {
let f = FilterTypedKeyValuePairs::new(Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span), TokenType::Null);
for t in f {
black_box(t);
}
});
})
.bench_function("span lexer with cursor", |b| {
b.iter(|| {
let it = Lexer::new(Cursor::new(NULL_RIDDEN.as_bytes()).bytes().filter_map(|r| r.ok()), BufferType::Span);
for t in it {
black_box(t);
}
})
})
.bench_function("span lexer bytes token reader", |b| {
b.iter(|| {
let mut r = TokenReader::new(Lexer::new(NULL_RIDDEN.bytes(), BufferType::Bytes(128)), None);
io::copy(&mut r, &mut io::sink()).ok();
})
})
.bench_function("span lexer span token reader", |b| {
b.iter(|| {
let mut r = TokenReader::new(Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span), Some(NULL_RIDDEN));
io::copy(&mut r, &mut io::sink()).ok();
})
})
.bench_function("span lexer", |b| {
b.iter(|| {
let it = Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span);
for t in it {
black_box(t);
}
})
});
}

criterion_group!(benches, span_lexer_throughput_with_cursor);
criterion_main!(benches);

0 comments on commit 2be30e9

Please sign in to comment.