diff --git a/src/iter_ext.rs b/src/iter_ext.rs new file mode 100644 index 0000000..add5373 --- /dev/null +++ b/src/iter_ext.rs @@ -0,0 +1,29 @@ +use super::{Token, FilterTypedKeyValuePairs, TokenType, TokenReader}; + + +/// Applies convenience constructors to all `Iterator` types +pub trait IteratorExt: Iterator { + + /// Returns an Iterator which filters key=value pairs, if `value.kind` matches + /// the given `token_type`. + /// + /// It is useful, for example, to get rid of `null` values on a lexical level. + fn filter_key_value_by_type(self, token_type: TokenType) -> FilterTypedKeyValuePairs + where Self: Sized { + FilterTypedKeyValuePairs::new(self, token_type) + } + + /// Returns a `TokenReader` to produce a byte stream from `Token` instances + /// + /// # Arguments + /// * `source` - an optional, original string from which the tokens were + /// generated. This offers the best performance when + /// serializing tokens, as they can refer to their original + /// `&str` slice. + fn reader<'a>(self, source: Option<&'a str>) -> TokenReader + where Self: Sized { + TokenReader::new(self, source) + } +} + +impl> IteratorExt for T {} diff --git a/src/lib.rs b/src/lib.rs index bfccaf7..b15e113 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -3,7 +3,9 @@ mod lexer; mod key_value_filter; mod reader; +mod iter_ext; pub use lexer::{Lexer, Token, TokenType, Span, BufferType, Buffer}; pub use key_value_filter::{FilterTypedKeyValuePairs}; pub use reader::{TokenReader}; +pub use iter_ext::IteratorExt; \ No newline at end of file diff --git a/tests/filters.rs b/tests/filters.rs index bc1db61..37e7da3 100644 --- a/tests/filters.rs +++ b/tests/filters.rs @@ -1,7 +1,8 @@ extern crate json_tools; use std::io::{Read, Cursor, self}; -use json_tools::{Lexer, FilterTypedKeyValuePairs, BufferType, TokenType, TokenReader}; +use json_tools::{Lexer, FilterTypedKeyValuePairs, BufferType, TokenType, TokenReader, + IteratorExt}; #[test] fn filter_null_values() { @@ -36,7 +37,9 @@ fn filter_null_values() { let mut buf: Vec = Vec::new(); let mut byte = [0u8]; - let mut r = TokenReader::new(new_filter(bt.clone()), Some(src)); + let mut r = Lexer::new(src.bytes(), bt.clone()) + .filter_key_value_by_type(TokenType::Null) + .reader(Some(src)); while let Ok(l) = r.read(&mut byte) { if l == 0 {