Skip to content

Commit

Permalink
feat(token-reader): infrastructure setup
Browse files Browse the repository at this point in the history
* added `TokenReader` type with basic API
* improve existing filter tests to verify TokenReader operation

Next: implementation
  • Loading branch information
Byron committed May 7, 2015
1 parent 431f051 commit 96dac09
Show file tree
Hide file tree
Showing 4 changed files with 69 additions and 14 deletions.
9 changes: 6 additions & 3 deletions src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ pub enum Buffer {
}

/// The type of `Buffer` you want in each `Token`
#[derive(Debug, PartialEq, Clone)]
pub enum BufferType {
/// Use a `Buffer::MultiByte` were appropriate. Initialize it with the
/// given capcity (to obtain higher performance when pushing charcters)
Expand Down Expand Up @@ -257,8 +258,9 @@ impl<I> Iterator for Lexer<I>
state = Mode::String(false);
if let Some(ref mut v) = buf {
v.push(c);
} else {
set_cursor(self.cursor);
}
set_cursor(self.cursor);
},
b'n' => {
state = Mode::Null([c, b'x', b'x', b'x'], 1);
Expand All @@ -267,11 +269,12 @@ impl<I> Iterator for Lexer<I>
b'0' ... b'9'
|b'-'
|b'.'=> {
state = Mode::Number;
if let Some(ref mut v) = buf {
v.push(c);
} else {
set_cursor(self.cursor);
}
state = Mode::Number;
set_cursor(self.cursor);
},
b't' => {
state = Mode::True([c, b'x', b'x', b'x'], 1);
Expand Down
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
mod lexer;
mod key_value_filter;
mod reader;

pub use lexer::{Lexer, Token, TokenType, Span, BufferType, Buffer};
pub use key_value_filter::{FilterTypedKeyValuePairs};
pub use reader::{TokenReader};
23 changes: 23 additions & 0 deletions src/reader.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
use std::io::{Read, Result, self};

use super::{Token, TokenType};

pub struct TokenReader<'a, I: Iterator<Item=Token>> {
iter: I,
src: Option<&'a str>,
}

impl<'a, I: Iterator<Item=Token>> TokenReader<'a, I> {
pub fn new(iter: I, source: Option<&'a str>) -> TokenReader<'a, I> {
TokenReader {
iter: iter,
src: source,
}
}
}

impl<'a, I: Iterator<Item=Token>> Read for TokenReader<'a, I> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
Err(io::Error::last_os_error())
}
}
49 changes: 38 additions & 11 deletions tests/filters.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,46 @@
extern crate json_tools;

use json_tools::{Lexer, FilterTypedKeyValuePairs, BufferType, TokenType};
use std::io::Read;

use json_tools::{Lexer, FilterTypedKeyValuePairs, BufferType, TokenType, TokenReader};

#[test]
fn filter_null_values() {
for &(src, count, fcount) in &[(r#"{"s":null, "s":true, "s":null }"#, 13, 5),
(r#"{"s":null, "s":null, "s":null }"#, 13, 2),
(r#"{"s":true, "s":null, "s":null }"#, 13, 5),
(r#"{"s":true, "s":null "s":null }"#, 12, 5), // invalid is fine
(r#"{"s":true,,,, "s":null, "s":null }"#, 16, 8),
(r#"{"s":null, "s":null, "s":true }"#, 13, 5),
(r#"{"s":true, "s":null, "s":true }"#, 13, 9),
(r#"{"s":true, "s":null "s":true }"#, 12, 8),] {
for &(src, want, count, fcount) in
&[(r#"{ "s":null, "s":true, "s":null }"#,
r#"{"s":true}"#, 13, 5),
(r#"{"s " : null, "s":null, "s":null }"#,
r#"{}"#, 13, 2),
(r#"{"s":true, "s":null, "s":null }"#,
r#"{"s":true}"#, 13, 5),
(r#"{"s":true, "s":null "s":null }"#,
r#"{"s":true}"#, 12, 5), // invalid is fine
(r#"{"s":true,,,, "s":null, "s":null }"#,
r#"{"s":true,,,}"#, 16, 8),
(r#"{"s":null, "s":null, "s":true }"#,
r#"{"s":true}"#, 13, 5),
(r#"{"s":true, "s":null, "s":true }"#,
r#"{"s":true,"s":true}"#, 13, 9),
(r#"{"s":true, "s":null "s":true }"#,
r#"{"s":true"s":true}"#, 12, 8),] {
assert_eq!(Lexer::new(src.bytes(), BufferType::Span).count(), count);
assert_eq!(FilterTypedKeyValuePairs::new(Lexer::new(src.bytes(), BufferType::Span),
TokenType::Null).count(), fcount);
let new_filter = |bt| {
FilterTypedKeyValuePairs::new(Lexer::new(src.bytes(), bt), TokenType::Null)
};
assert_eq!(new_filter(BufferType::Span).count(), fcount);

for bt in &[BufferType::Bytes(128), BufferType::Span] {
let mut buf: Vec<u8> = Vec::new();
let mut byte = [0u8];
let mut r = TokenReader::new(new_filter(bt.clone()), Some(src));

while let Ok(l) = r.read(&mut byte) {
if l == 0 {
break
}
buf.push(byte[0]);
}
assert_eq!(&String::from_utf8(buf).unwrap(), want);
}
}
}

0 comments on commit 96dac09

Please sign in to comment.