diff --git a/src/lexer.rs b/src/lexer.rs index d8e536e..8865328 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -85,6 +85,7 @@ pub enum Buffer { } /// The type of `Buffer` you want in each `Token` +#[derive(Debug, PartialEq, Clone)] pub enum BufferType { /// Use a `Buffer::MultiByte` were appropriate. Initialize it with the /// given capcity (to obtain higher performance when pushing charcters) @@ -257,8 +258,9 @@ impl Iterator for Lexer state = Mode::String(false); if let Some(ref mut v) = buf { v.push(c); + } else { + set_cursor(self.cursor); } - set_cursor(self.cursor); }, b'n' => { state = Mode::Null([c, b'x', b'x', b'x'], 1); @@ -267,11 +269,12 @@ impl Iterator for Lexer b'0' ... b'9' |b'-' |b'.'=> { + state = Mode::Number; if let Some(ref mut v) = buf { v.push(c); + } else { + set_cursor(self.cursor); } - state = Mode::Number; - set_cursor(self.cursor); }, b't' => { state = Mode::True([c, b'x', b'x', b'x'], 1); diff --git a/src/lib.rs b/src/lib.rs index fa41a92..39d8ee7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,7 @@ mod lexer; mod key_value_filter; +mod reader; pub use lexer::{Lexer, Token, TokenType, Span, BufferType, Buffer}; pub use key_value_filter::{FilterTypedKeyValuePairs}; +pub use reader::{TokenReader}; diff --git a/src/reader.rs b/src/reader.rs new file mode 100644 index 0000000..e1df060 --- /dev/null +++ b/src/reader.rs @@ -0,0 +1,23 @@ +use std::io::{Read, Result, self}; + +use super::{Token, TokenType}; + +pub struct TokenReader<'a, I: Iterator> { + iter: I, + src: Option<&'a str>, +} + +impl<'a, I: Iterator> TokenReader<'a, I> { + pub fn new(iter: I, source: Option<&'a str>) -> TokenReader<'a, I> { + TokenReader { + iter: iter, + src: source, + } + } +} + +impl<'a, I: Iterator> Read for TokenReader<'a, I> { + fn read(&mut self, buf: &mut [u8]) -> Result { + Err(io::Error::last_os_error()) + } +} \ No newline at end of file diff --git a/tests/filters.rs b/tests/filters.rs index f1b618f..eae3ef3 100644 --- a/tests/filters.rs +++ b/tests/filters.rs @@ -1,19 +1,46 @@ extern crate json_tools; -use json_tools::{Lexer, FilterTypedKeyValuePairs, BufferType, TokenType}; +use std::io::Read; + +use json_tools::{Lexer, FilterTypedKeyValuePairs, BufferType, TokenType, TokenReader}; #[test] fn filter_null_values() { - for &(src, count, fcount) in &[(r#"{"s":null, "s":true, "s":null }"#, 13, 5), - (r#"{"s":null, "s":null, "s":null }"#, 13, 2), - (r#"{"s":true, "s":null, "s":null }"#, 13, 5), - (r#"{"s":true, "s":null "s":null }"#, 12, 5), // invalid is fine - (r#"{"s":true,,,, "s":null, "s":null }"#, 16, 8), - (r#"{"s":null, "s":null, "s":true }"#, 13, 5), - (r#"{"s":true, "s":null, "s":true }"#, 13, 9), - (r#"{"s":true, "s":null "s":true }"#, 12, 8),] { + for &(src, want, count, fcount) in + &[(r#"{ "s":null, "s":true, "s":null }"#, + r#"{"s":true}"#, 13, 5), + (r#"{"s " : null, "s":null, "s":null }"#, + r#"{}"#, 13, 2), + (r#"{"s":true, "s":null, "s":null }"#, + r#"{"s":true}"#, 13, 5), + (r#"{"s":true, "s":null "s":null }"#, + r#"{"s":true}"#, 12, 5), // invalid is fine + (r#"{"s":true,,,, "s":null, "s":null }"#, + r#"{"s":true,,,}"#, 16, 8), + (r#"{"s":null, "s":null, "s":true }"#, + r#"{"s":true}"#, 13, 5), + (r#"{"s":true, "s":null, "s":true }"#, + r#"{"s":true,"s":true}"#, 13, 9), + (r#"{"s":true, "s":null "s":true }"#, + r#"{"s":true"s":true}"#, 12, 8),] { assert_eq!(Lexer::new(src.bytes(), BufferType::Span).count(), count); - assert_eq!(FilterTypedKeyValuePairs::new(Lexer::new(src.bytes(), BufferType::Span), - TokenType::Null).count(), fcount); + let new_filter = |bt| { + FilterTypedKeyValuePairs::new(Lexer::new(src.bytes(), bt), TokenType::Null) + }; + assert_eq!(new_filter(BufferType::Span).count(), fcount); + + for bt in &[BufferType::Bytes(128), BufferType::Span] { + let mut buf: Vec = Vec::new(); + let mut byte = [0u8]; + let mut r = TokenReader::new(new_filter(bt.clone()), Some(src)); + + while let Ok(l) = r.read(&mut byte) { + if l == 0 { + break + } + buf.push(byte[0]); + } + assert_eq!(&String::from_utf8(buf).unwrap(), want); + } } } \ No newline at end of file