Skip to content

Commit

Permalink
feat(iterator-ext): more fun with Token-Iterators
Browse files Browse the repository at this point in the history
* attaches constructor utilities to all `Iterator<Item=Token>` to ease
  using them. It's similar to (former) `IteratorExt` which would put
  `chain()` and `nth` for instance.

Closes #3
  • Loading branch information
Byron committed May 9, 2015
1 parent 0d125f2 commit 15dc5c5
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 2 deletions.
29 changes: 29 additions & 0 deletions src/iter_ext.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
use super::{Token, FilterTypedKeyValuePairs, TokenType, TokenReader};


/// Applies convenience constructors to all `Iterator<Item=Token>` types
pub trait IteratorExt: Iterator<Item=Token> {

/// Returns an Iterator which filters key=value pairs, if `value.kind` matches
/// the given `token_type`.
///
/// It is useful, for example, to get rid of `null` values on a lexical level.
fn filter_key_value_by_type(self, token_type: TokenType) -> FilterTypedKeyValuePairs<Self>
where Self: Sized {
FilterTypedKeyValuePairs::new(self, token_type)
}

/// Returns a `TokenReader` to produce a byte stream from `Token` instances
///
/// # Arguments
/// * `source` - an optional, original string from which the tokens were
/// generated. This offers the best performance when
/// serializing tokens, as they can refer to their original
/// `&str` slice.
fn reader<'a>(self, source: Option<&'a str>) -> TokenReader<Self>
where Self: Sized {
TokenReader::new(self, source)
}
}

impl<T: Iterator<Item=Token>> IteratorExt for T {}
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
mod lexer;
mod key_value_filter;
mod reader;
mod iter_ext;

pub use lexer::{Lexer, Token, TokenType, Span, BufferType, Buffer};
pub use key_value_filter::{FilterTypedKeyValuePairs};
pub use reader::{TokenReader};
pub use iter_ext::IteratorExt;
7 changes: 5 additions & 2 deletions tests/filters.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
extern crate json_tools;

use std::io::{Read, Cursor, self};
use json_tools::{Lexer, FilterTypedKeyValuePairs, BufferType, TokenType, TokenReader};
use json_tools::{Lexer, FilterTypedKeyValuePairs, BufferType, TokenType, TokenReader,
IteratorExt};

#[test]
fn filter_null_values() {
Expand Down Expand Up @@ -36,7 +37,9 @@ fn filter_null_values() {

let mut buf: Vec<u8> = Vec::new();
let mut byte = [0u8];
let mut r = TokenReader::new(new_filter(bt.clone()), Some(src));
let mut r = Lexer::new(src.bytes(), bt.clone())
.filter_key_value_by_type(TokenType::Null)
.reader(Some(src));

while let Ok(l) = r.read(&mut byte) {
if l == 0 {
Expand Down

0 comments on commit 15dc5c5

Please sign in to comment.