diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 42ac385..4205ab6 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -263,6 +263,13 @@ mod parser_tests { ParseToken::ArrayEof ])); + assert_eq!(run("$[:]"), Ok(vec![ + ParseToken::Absolute, + ParseToken::Array, + ParseToken::Range(None, None), + ParseToken::ArrayEof + ])); + match run("$[") { Ok(_) => panic!(), _ => {} @@ -520,5 +527,12 @@ mod tokenizer_tests { ] , Some(TokenError::Eof) )); + + run("$[:]", (vec![ + Token::Absolute(0), + Token::OpenArray(1), + Token::Split(2), + Token::CloseArray(3) + ], Some(TokenError::Eof))); } } \ No newline at end of file diff --git a/src/parser/parser.rs b/src/parser/parser.rs index 4c76103..bdd2184 100644 --- a/src/parser/parser.rs +++ b/src/parser/parser.rs @@ -375,6 +375,13 @@ impl Parser { fn range_to(tokenizer: &mut TokenReader) -> ParseResult { debug!("#range_to"); + match tokenizer.peek_token() { + Ok(Token::CloseArray(_)) => { + return Ok(Self::node(ParseToken::Range(None, None))) + } + _ => {} + } + match tokenizer.next_token() { Ok(Token::Key(pos, ref val)) => { let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?; diff --git a/tests/filter.rs b/tests/filter.rs index 101ec8e..1b0fb18 100644 --- a/tests/filter.rs +++ b/tests/filter.rs @@ -349,4 +349,11 @@ fn filer_same_obj() { {"a": 1}, {"a": 1} ])); +} + +#[test] +fn empty_range() { + setup(); + + select_and_then_compare("$[:]", json!(["first", "second"]), json!(["first", "second"])); } \ No newline at end of file