Skip to content

Commit

Permalink
fix(value): Improve value coercion practices
Browse files Browse the repository at this point in the history
Template impact: There is now a consistent approach taken to when a
`Value` can be used as a string, whole number, fractional number, bool
etc.

Examples
- Every `Value` is now treated as a string.
- Whole numbers are now preserved.
- Whole numbers can convert to fractional numbers but not the other way
  around.
- You can convert a fractional number to a whole number using the
  filters `round`, `ceil`, or `floor`.
- Operations on only whole numbers preserve their whole number nature.
- for tag ranges, indexing, etc only accept whole numbers

Fixes cobalt-org#99

Programmatic impact:  This implements the `Value` refactor from cobalt-org#95.
`Value` is still an enum but now there is a single `Scalar` variant.
This scalar is not an enum but is convertible between each of the
common data types.  Optimizations are in place to natively store some of
these data types.

BREAKING CHANGE: `Value`'s API has changed. Conversion behavior within
templates has changed.
  • Loading branch information
epage committed Jan 14, 2018
1 parent 090fbf4 commit ebb4f40
Show file tree
Hide file tree
Showing 23 changed files with 1,218 additions and 828 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ let template = liquid::ParserBuilder::with_liquid()
.parse("Liquid! {{num | minus: 2}}").unwrap();

let mut globals = liquid::Object::new();
globals.insert("num".to_owned(), liquid::Value::Num(4f32));
globals.insert("num".to_owned(), liquid::Value::scalar(4f32));

let output = template.render(&globals).unwrap();
assert_eq!(output, "Liquid! 2".to_string());
Expand Down
102 changes: 55 additions & 47 deletions src/compiler/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,52 +115,56 @@ pub fn granularize(block: &str) -> Result<Vec<Token>> {
for el in split_atom(block) {
push_more = None;
result.push(match &*el.trim() {
"" => continue,
"" => continue,

"|" => Token::Pipe,
"." => Token::Dot,
":" => Token::Colon,
"," => Token::Comma,
"[" => Token::OpenSquare,
"]" => Token::CloseSquare,
"(" => Token::OpenRound,
")" => Token::CloseRound,
"?" => Token::Question,
"-" => Token::Dash,
"=" => Token::Assignment,
"or" => Token::Or,
"|" => Token::Pipe,
"." => Token::Dot,
":" => Token::Colon,
"," => Token::Comma,
"[" => Token::OpenSquare,
"]" => Token::CloseSquare,
"(" => Token::OpenRound,
")" => Token::CloseRound,
"?" => Token::Question,
"-" => Token::Dash,
"=" => Token::Assignment,
"or" => Token::Or,

"==" => Token::Comparison(ComparisonOperator::Equals),
"!=" => Token::Comparison(ComparisonOperator::NotEquals),
"<=" => Token::Comparison(ComparisonOperator::LessThanEquals),
">=" => Token::Comparison(ComparisonOperator::GreaterThanEquals),
"<" => Token::Comparison(ComparisonOperator::LessThan),
">" => Token::Comparison(ComparisonOperator::GreaterThan),
"contains" => Token::Comparison(ComparisonOperator::Contains),
".." => Token::DotDot,
"==" => Token::Comparison(ComparisonOperator::Equals),
"!=" => Token::Comparison(ComparisonOperator::NotEquals),
"<=" => Token::Comparison(ComparisonOperator::LessThanEquals),
">=" => Token::Comparison(ComparisonOperator::GreaterThanEquals),
"<" => Token::Comparison(ComparisonOperator::LessThan),
">" => Token::Comparison(ComparisonOperator::GreaterThan),
"contains" => Token::Comparison(ComparisonOperator::Contains),
".." => Token::DotDot,

x if SINGLE_STRING_LITERAL.is_match(x) ||
DOUBLE_STRING_LITERAL.is_match(x) => {
Token::StringLiteral(x[1..x.len() - 1].to_owned())
}
x if NUMBER_LITERAL.is_match(x) => {
Token::NumberLiteral(x.parse::<f32>()
.expect(&format!("Could not parse {:?} as float", x)))
}
x if BOOLEAN_LITERAL.is_match(x) => {
Token::BooleanLiteral(x.parse::<bool>()
.expect(&format!("Could not parse {:?} as bool", x)))
}
x if INDEX.is_match(x) => {
let mut parts = x.splitn(2, '.');
parts.next().unwrap();
push_more =
Some(vec![Token::Identifier(parts.next().unwrap().to_owned())]);
Token::Dot
}
x if IDENTIFIER.is_match(x) => Token::Identifier(x.to_owned()),
x => return Err(Error::Lexer(format!("{} is not a valid identifier", x))),
});
x if SINGLE_STRING_LITERAL.is_match(x) || DOUBLE_STRING_LITERAL.is_match(x) => {
Token::StringLiteral(x[1..x.len() - 1].to_owned())
}
x if NUMBER_LITERAL.is_match(x) => {
x.parse::<i32>().map(Token::IntegerLiteral).unwrap_or_else(
|_e| {
Token::FloatLiteral(x.parse::<f32>()
.expect(&format!("Could not parse {:?} as float",
x)))
},
)
}
x if BOOLEAN_LITERAL.is_match(x) => {
Token::BooleanLiteral(x.parse::<bool>().expect(
&format!("Could not parse {:?} as bool", x),
))
}
x if INDEX.is_match(x) => {
let mut parts = x.splitn(2, '.');
parts.next().unwrap();
push_more = Some(vec![Token::Identifier(parts.next().unwrap().to_owned())]);
Token::Dot
}
x if IDENTIFIER.is_match(x) => Token::Identifier(x.to_owned()),
x => return Err(Error::Lexer(format!("{} is not a valid identifier", x))),
});
if let Some(v) = push_more {
result.extend(v);
}
Expand Down Expand Up @@ -341,16 +345,20 @@ mod test {
Token::Identifier("arg2".to_owned())]);
assert_eq!(granularize("multiply 5 3").unwrap(),
vec![Token::Identifier("multiply".to_owned()),
Token::NumberLiteral(5f32),
Token::NumberLiteral(3f32)]);
Token::IntegerLiteral(5i32),
Token::IntegerLiteral(3i32)]);
assert_eq!(granularize("multiply 5.5 3.2434").unwrap(),
vec![Token::Identifier("multiply".to_owned()),
Token::FloatLiteral(5.5f32),
Token::FloatLiteral(3.2434f32)]);
assert_eq!(granularize("for i in (1..5)").unwrap(),
vec![Token::Identifier("for".to_owned()),
Token::Identifier("i".to_owned()),
Token::Identifier("in".to_owned()),
Token::OpenRound,
Token::NumberLiteral(1f32),
Token::IntegerLiteral(1i32),
Token::DotDot,
Token::NumberLiteral(5f32),
Token::IntegerLiteral(5i32),
Token::CloseRound]);
assert_eq!(granularize("\"1, '2', 3, 4\"").unwrap(),
vec![Token::StringLiteral("1, '2', 3, 4".to_owned())]);
Expand Down
30 changes: 8 additions & 22 deletions src/compiler/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,6 @@ use super::ParseTag;
use super::Token;
use value::Index;

fn coerce(index: f32) -> Option<isize> {
// at the first condition only is_normal is not enough
// because zero is not counted normal
if (index != 0f32 && !index.is_normal()) || index.round() > (::std::isize::MAX as f32) ||
index.round() < (::std::isize::MIN as f32) {
None
} else {
Some(index.round() as isize)
}
}

/// Parses the provided elements into a number of Renderable items
/// This is the internal version of parse that accepts Elements tokenized
/// by `lexer::tokenize` and does not register built-in blocks. The main use
Expand Down Expand Up @@ -94,13 +83,9 @@ pub fn parse_indexes(mut tokens: &[Token]) -> Result<Vec<Index>> {
Token::OpenSquare if tokens.len() > 2 => {
let index = match tokens[1] {
Token::StringLiteral(ref x) => Index::with_key(x.as_ref()),
Token::NumberLiteral(ref x) => {
let x = coerce(*x)
.ok_or_else(|| Error::Parser(format!("Invalid index {}", x)))?;
Index::with_index(x)
}
Token::IntegerLiteral(ref x) => Index::with_index(*x as isize),
_ => {
return Error::parser("number | string", Some(&tokens[0]));
return Error::parser("integer | string", Some(&tokens[0]));
}
};
indexes.push(index);
Expand Down Expand Up @@ -242,7 +227,7 @@ pub fn expect<'a, T>(tokens: &mut T, expected: &Token) -> Result<&'a Token>
pub fn consume_value_token(tokens: &mut Iter<Token>) -> Result<Token> {
match tokens.next() {
Some(t) => value_token(t.clone()),
None => Error::parser("string | number | identifier", None),
None => Error::parser("string | number | boolean | identifier", None),
}
}

Expand All @@ -251,7 +236,8 @@ pub fn consume_value_token(tokens: &mut Iter<Token>) -> Result<Token> {
pub fn value_token(t: Token) -> Result<Token> {
match t {
v @ Token::StringLiteral(_) |
v @ Token::NumberLiteral(_) |
v @ Token::IntegerLiteral(_) |
v @ Token::FloatLiteral(_) |
v @ Token::BooleanLiteral(_) |
v @ Token::Identifier(_) => Ok(v),
x => Error::parser("string | number | boolean | identifier", Some(&x)),
Expand Down Expand Up @@ -325,9 +311,9 @@ mod test_parse_output {
assert_eq!(result.unwrap(),
Output::new(Argument::Var(Variable::new("abc")),
vec![FilterPrototype::new("def",
vec![Argument::Val(Value::str("1")),
Argument::Val(Value::Num(2.0)),
Argument::Val(Value::str("3"))]),
vec![Argument::Val(Value::scalar("1")),
Argument::Val(Value::scalar(2.0)),
Argument::Val(Value::scalar("3"))]),
FilterPrototype::new("blabla", vec![])]));
}

Expand Down
42 changes: 27 additions & 15 deletions src/compiler/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ pub enum Token {
Assignment,
Identifier(String),
StringLiteral(String),
NumberLiteral(f32),
IntegerLiteral(i32),
FloatLiteral(f32),
BooleanLiteral(bool),
DotDot,
Comparison(ComparisonOperator),
Expand All @@ -44,9 +45,10 @@ impl Token {
/// be interpreted as a Value.
pub fn to_value(&self) -> Result<Value> {
match self {
&Token::StringLiteral(ref x) => Ok(Value::str(x)),
&Token::NumberLiteral(x) => Ok(Value::Num(x)),
&Token::BooleanLiteral(x) => Ok(Value::Bool(x)),
&Token::StringLiteral(ref x) => Ok(Value::scalar(x.as_str())),
&Token::IntegerLiteral(x) => Ok(Value::scalar(x)),
&Token::FloatLiteral(x) => Ok(Value::scalar(x)),
&Token::BooleanLiteral(x) => Ok(Value::scalar(x)),
x => Error::parser("Value", Some(x)),
}
}
Expand All @@ -55,9 +57,10 @@ impl Token {
/// necessary
pub fn to_arg(&self) -> Result<Argument> {
match *self {
Token::NumberLiteral(f) => Ok(Argument::Val(Value::Num(f))),
Token::StringLiteral(ref s) => Ok(Argument::Val(Value::Str(s.clone()))),
Token::BooleanLiteral(b) => Ok(Argument::Val(Value::Bool(b))),
Token::IntegerLiteral(f) => Ok(Argument::Val(Value::scalar(f))),
Token::FloatLiteral(f) => Ok(Argument::Val(Value::scalar(f))),
Token::StringLiteral(ref s) => Ok(Argument::Val(Value::scalar(s.as_str()))),
Token::BooleanLiteral(b) => Ok(Argument::Val(Value::scalar(b))),
Token::Identifier(ref id) => {
let mut var = Variable::default();
var.extend(id.split('.').map(Index::with_key));
Expand Down Expand Up @@ -94,7 +97,8 @@ impl fmt::Display for Token {
Token::Comparison(ComparisonOperator::Contains) => "contains".to_owned(),
Token::Identifier(ref x) |
Token::StringLiteral(ref x) => x.clone(),
Token::NumberLiteral(ref x) => x.to_string(),
Token::IntegerLiteral(ref x) => x.to_string(),
Token::FloatLiteral(ref x) => x.to_string(),
Token::BooleanLiteral(ref x) => x.to_string(),
};
write!(f, "{}", out)
Expand All @@ -111,18 +115,26 @@ mod test {
let ctx = Context::new();
let t = Token::StringLiteral("hello".to_owned());
assert_eq!(t.to_arg().unwrap().evaluate(&ctx).unwrap(),
Value::str("hello"));
Value::scalar("hello"));
}

#[test]
fn evaluate_handles_number_literals() {
let ctx = Context::new();
assert_eq!(Token::NumberLiteral(42f32)
assert_eq!(Token::FloatLiteral(42f32)
.to_arg()
.unwrap()
.evaluate(&ctx)
.unwrap(),
Value::Num(42f32));
Value::scalar(42f32));

let ctx = Context::new();
assert_eq!(Token::IntegerLiteral(42i32)
.to_arg()
.unwrap()
.evaluate(&ctx)
.unwrap(),
Value::scalar(42i32));
}

#[test]
Expand All @@ -133,26 +145,26 @@ mod test {
.unwrap()
.evaluate(&ctx)
.unwrap(),
Value::Bool(true));
Value::scalar(true));

assert_eq!(Token::BooleanLiteral(false)
.to_arg()
.unwrap()
.evaluate(&ctx)
.unwrap(),
Value::Bool(false));
Value::scalar(false));
}

#[test]
fn evaluate_handles_identifiers() {
let mut ctx = Context::new();
ctx.set_global_val("var0", Value::Num(42f32));
ctx.set_global_val("var0", Value::scalar(42f32));
assert_eq!(Token::Identifier("var0".to_owned())
.to_arg()
.unwrap()
.evaluate(&ctx)
.unwrap(),
Value::Num(42f32));
Value::scalar(42f32));
assert!(Token::Identifier("nope".to_owned())
.to_arg()
.unwrap()
Expand Down
Loading

0 comments on commit ebb4f40

Please sign in to comment.