Skip to content

Commit

Permalink
Apply latest rustfmt
Browse files Browse the repository at this point in the history
  • Loading branch information
Byron committed Dec 13, 2017
1 parent 1235550 commit 0750473
Show file tree
Hide file tree
Showing 8 changed files with 231 additions and 204 deletions.
93 changes: 51 additions & 42 deletions benches/usage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
extern crate json_tools;
extern crate test;

use json_tools::{Lexer, FilterTypedKeyValuePairs, BufferType, TokenType, TokenReader, Token, Buffer, Span};
use json_tools::{Buffer, BufferType, FilterTypedKeyValuePairs, Lexer, Span, Token, TokenReader, TokenType};
use std::io;

const NULL_RIDDEN: &'static str = r##"
Expand Down Expand Up @@ -124,33 +124,30 @@ struct KeyValueProducer {
impl KeyValueProducer {
fn new(bt: BufferType) -> KeyValueProducer {
KeyValueProducer {
buf: [Token {
kind: TokenType::String,
buf: match bt {
BufferType::Bytes(_) => Buffer::MultiByte(KEY_VALUE_SRC[0..5].into()),
BufferType::Span => Buffer::Span(Span { first: 0, end: 5 }),
},
},
Token {
kind: TokenType::Colon,
buf: Buffer::Span(Span::default()),
},
Token {
kind: TokenType::String,
buf: match bt {
BufferType::Bytes(_) => Buffer::MultiByte(KEY_VALUE_SRC[6..25].into()),
BufferType::Span => {
Buffer::Span(Span {
first: 6,
end: 25,
})
}
},
},
Token {
kind: TokenType::Comma,
buf: Buffer::Span(Span::default()),
}],
buf: [
Token {
kind: TokenType::String,
buf: match bt {
BufferType::Bytes(_) => Buffer::MultiByte(KEY_VALUE_SRC[0..5].into()),
BufferType::Span => Buffer::Span(Span { first: 0, end: 5 }),
},
},
Token {
kind: TokenType::Colon,
buf: Buffer::Span(Span::default()),
},
Token {
kind: TokenType::String,
buf: match bt {
BufferType::Bytes(_) => Buffer::MultiByte(KEY_VALUE_SRC[6..25].into()),
BufferType::Span => Buffer::Span(Span { first: 6, end: 25 }),
},
},
Token {
kind: TokenType::Comma,
buf: Buffer::Span(Span::default()),
},
],
cur: 0,
}
}
Expand Down Expand Up @@ -184,8 +181,10 @@ fn span_lexer_throughput(b: &mut test::Bencher) {
#[bench]
fn span_lexer_span_token_reader_throughput(b: &mut test::Bencher) {
b.iter(|| {
let mut r = TokenReader::new(Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span),
Some(NULL_RIDDEN));
let mut r = TokenReader::new(
Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span),
Some(NULL_RIDDEN),
);
io::copy(&mut r, &mut io::sink()).ok();
});
b.bytes = NULL_RIDDEN.len() as u64;
Expand All @@ -194,8 +193,10 @@ fn span_lexer_span_token_reader_throughput(b: &mut test::Bencher) {
#[bench]
fn span_lexer_bytes_token_reader_throughput(b: &mut test::Bencher) {
b.iter(|| {
let mut r = TokenReader::new(Lexer::new(NULL_RIDDEN.bytes(), BufferType::Bytes(128)),
None);
let mut r = TokenReader::new(
Lexer::new(NULL_RIDDEN.bytes(), BufferType::Bytes(128)),
None,
);
io::copy(&mut r, &mut io::sink()).ok();
});
b.bytes = NULL_RIDDEN.len() as u64;
Expand All @@ -205,8 +206,10 @@ fn span_lexer_bytes_token_reader_throughput(b: &mut test::Bencher) {
fn bytes_token_producer_bytes_token_reader_throughput(b: &mut test::Bencher) {
let mut ncb = 0u64;
b.iter(|| {
let mut r = TokenReader::new(KeyValueProducer::new(BufferType::Bytes(0)).take(30000),
None);
let mut r = TokenReader::new(
KeyValueProducer::new(BufferType::Bytes(0)).take(30000),
None,
);
ncb = io::copy(&mut r, &mut io::sink()).unwrap();
});
b.bytes = ncb;
Expand All @@ -216,8 +219,10 @@ fn bytes_token_producer_bytes_token_reader_throughput(b: &mut test::Bencher) {
fn span_token_producer_bytes_token_reader_throughput(b: &mut test::Bencher) {
let mut ncb = 0u64;
b.iter(|| {
let mut r = TokenReader::new(KeyValueProducer::new(BufferType::Span).take(30000),
Some(KEY_VALUE_SRC));
let mut r = TokenReader::new(
KeyValueProducer::new(BufferType::Span).take(30000),
Some(KEY_VALUE_SRC),
);
ncb = io::copy(&mut r, &mut io::sink()).unwrap();
});
b.bytes = ncb;
Expand All @@ -234,27 +239,31 @@ fn bytes_lexer_throughput(b: &mut test::Bencher) {
b.bytes = NULL_RIDDEN.len() as u64;
}


#[bench]
fn span_filter_null_throughput(b: &mut test::Bencher) {
b.iter(|| {
let f = FilterTypedKeyValuePairs::new(Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span),
TokenType::Null);
let f = FilterTypedKeyValuePairs::new(
Lexer::new(NULL_RIDDEN.bytes(), BufferType::Span),
TokenType::Null,
);
for t in f {
test::black_box(t);
}
});
b.bytes = NULL_RIDDEN.len() as u64;
}


#[bench]
fn span_lexer_throughput_with_cursor(b: &mut test::Bencher) {
use std::io::{Cursor, Read};

b.iter(|| {
let it = Lexer::new(Cursor::new(NULL_RIDDEN.as_bytes()).bytes().filter_map(|r| r.ok()),
BufferType::Span);
let it = Lexer::new(
Cursor::new(NULL_RIDDEN.as_bytes())
.bytes()
.filter_map(|r| r.ok()),
BufferType::Span,
);
for t in it {
test::black_box(t);
}
Expand Down
9 changes: 5 additions & 4 deletions src/iter_ext.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use super::{Token, FilterTypedKeyValuePairs, TokenType, TokenReader};

use super::{FilterTypedKeyValuePairs, Token, TokenReader, TokenType};

/// Applies convenience constructors to all `Iterator<Item=Token>` types
pub trait IteratorExt: Iterator<Item = Token> {
Expand All @@ -8,7 +7,8 @@ pub trait IteratorExt: Iterator<Item = Token> {
///
/// It is useful, for example, to get rid of `null` values on a lexical level.
fn filter_key_value_by_type(self, token_type: TokenType) -> FilterTypedKeyValuePairs<Self>
where Self: Sized
where
Self: Sized,
{
FilterTypedKeyValuePairs::new(self, token_type)
}
Expand All @@ -21,7 +21,8 @@ pub trait IteratorExt: Iterator<Item = Token> {
/// serializing tokens, as they can refer to their original
/// `&str` slice.
fn reader<'a>(self, source: Option<&'a str>) -> TokenReader<Self>
where Self: Sized
where
Self: Sized,
{
TokenReader::new(self, source)
}
Expand Down
9 changes: 5 additions & 4 deletions src/key_value_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ impl<I: IntoIterator<Item = Token>> FilterTypedKeyValuePairs<I> {
}

impl<I> Iterator for FilterTypedKeyValuePairs<I>
where I: IntoIterator<Item = Token>
where
I: IntoIterator<Item = Token>,
{
type Item = Token;

Expand Down Expand Up @@ -136,9 +137,9 @@ impl<I> Iterator for FilterTypedKeyValuePairs<I>
}
}
_ => return first_token(&mut self.buf, first_str_candidate),
}// end match token kind (string?)
}// end inner str candidate LOOP
}// end have token
} // end match token kind (string?)
} // end inner str candidate LOOP
} // end have token
None => None,
}
}
Expand Down
56 changes: 25 additions & 31 deletions src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,8 @@ pub enum BufferType {
}

impl<I> Lexer<I>
where I: IntoIterator<Item = u8>
where
I: IntoIterator<Item = u8>,
{
/// Returns a new Lexer from a given byte iterator.
pub fn new(chars: I, buffer_type: BufferType) -> Lexer<I> {
Expand Down Expand Up @@ -170,7 +171,8 @@ enum Mode {
}

impl<I> Iterator for Lexer<I>
where I: IntoIterator<Item = u8>
where
I: IntoIterator<Item = u8>,
{
type Item = Token;

Expand Down Expand Up @@ -229,21 +231,19 @@ impl<I> Iterator for Lexer<I>
continue;
}
}
Mode::Number => {
match c {
b'0'...b'9' | b'-' | b'.' => {
if let Some(ref mut v) = buf {
v.push(c);
}
continue;
}
_ => {
t = Some(TokenType::Number);
self.put_back(c);
break;
Mode::Number => match c {
b'0'...b'9' | b'-' | b'.' => {
if let Some(ref mut v) = buf {
v.push(c);
}
continue;
}
}
_ => {
t = Some(TokenType::Number);
self.put_back(c);
break;
}
},
Mode::True(ref mut b, ref mut i) => {
b[*i] = c;
if *i == 3 {
Expand Down Expand Up @@ -343,10 +343,10 @@ impl<I> Iterator for Lexer<I>
break;
}
_ => {}
}// end single byte match
}// end case SlowPath
}// end match state
}// end for each byte
} // end single byte match
} // end case SlowPath
} // end match state
} // end for each byte

match t {
None => None,
Expand All @@ -355,19 +355,13 @@ impl<I> Iterator for Lexer<I>
None
} else {
let buf = match (&t, buf) {
(&TokenType::String, Some(b)) |
(&TokenType::Number, Some(b)) => Buffer::MultiByte(b),
_ => {
Buffer::Span(Span {
first: first,
end: self.cursor,
})
}
(&TokenType::String, Some(b)) | (&TokenType::Number, Some(b)) => Buffer::MultiByte(b),
_ => Buffer::Span(Span {
first: first,
end: self.cursor,
}),
};
Some(Token {
kind: t,
buf: buf,
})
Some(Token { kind: t, buf: buf })
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ mod key_value_filter;
mod reader;
mod iter_ext;

pub use lexer::{Lexer, Token, TokenType, Span, BufferType, Buffer};
pub use lexer::{Buffer, BufferType, Lexer, Span, Token, TokenType};
pub use key_value_filter::FilterTypedKeyValuePairs;
pub use reader::TokenReader;
pub use iter_ext::IteratorExt;
24 changes: 10 additions & 14 deletions src/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::io::{Read, Result, Write};
use std::cmp;
use std::ptr;

use super::{Token, TokenType, Buffer};
use super::{Buffer, Token, TokenType};

fn copy_memory(src: &[u8], dst: &mut [u8]) {
let len_src = src.len();
Expand Down Expand Up @@ -72,17 +72,13 @@ impl<'a, I: IntoIterator<Item = Token>> Read for TokenReader<'a, I> {
None => return Ok(buf.len() - bl),
Some(t) => {
let bytes: &[u8] = match t.kind {
TokenType::String | TokenType::Number => {
match t.buf {
Buffer::MultiByte(ref b) => &b,
Buffer::Span(ref s) => {
match self.src {
Some(b) => b[s.first as usize..s.end as usize].as_bytes(),
None => panic!("Must set source if tokens don't provide byter buffers"),
}
}
}
}
TokenType::String | TokenType::Number => match t.buf {
Buffer::MultiByte(ref b) => &b,
Buffer::Span(ref s) => match self.src {
Some(b) => b[s.first as usize..s.end as usize].as_bytes(),
None => panic!("Must set source if tokens don't provide byter buffers"),
},
},
TokenType::Invalid => "".as_bytes(),
_ => t.kind.as_ref().as_bytes(),
};
Expand All @@ -100,8 +96,8 @@ impl<'a, I: IntoIterator<Item = Token>> Read for TokenReader<'a, I> {
return Ok(buf.len());
}
}
}// match iter.next()
}// end while there are bytes to produce
} // match iter.next()
} // end while there are bytes to produce
unreachable!();
}
}
Loading

0 comments on commit 0750473

Please sign in to comment.