diff --git a/coverage.sh b/coverage.sh new file mode 100755 index 0000000..7fbc9e1 --- /dev/null +++ b/coverage.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +# +# cargo install cargo-tarpaulin +# + +set -e + +cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs -v --all \ No newline at end of file diff --git a/src/parser/mod.rs b/src/parser/mod.rs index bda8b4f..3ee69a8 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -41,6 +41,38 @@ mod parser_tests { interpreter.start() } + #[test] + fn parse_error() { + setup(); + + fn invalid(path: &str) { + if let Err(_) = run(path) { + assert!(true); + } else { + assert!(false); + } + } + + invalid("$[]"); + invalid("$[a]"); + invalid("$[?($.a)]"); + invalid("$[?(@.a > @.b]"); + invalid("$[?(@.a < @.b&&(@.c < @.d)]"); + invalid("@."); + invalid("$..[?(a <= @.a)]"); // invalid term value + invalid("$['a', b]"); + invalid("$[0, >=]"); + invalid("$[a:]"); + invalid("$[:a]"); + invalid("$[::a]"); + invalid("$[:>]"); + invalid("$[1:>]"); + invalid("$[1,,]"); + invalid("$[?]"); + invalid("$[?(1 = 1)]"); + invalid("$[?(1 = >)]"); + } + #[test] fn parse_path() { setup(); @@ -434,6 +466,18 @@ mod parser_tests { ]) ); + assert_eq!( + run(r#"$[?(@ > 1)]"#), + Ok(vec![ + ParseToken::Absolute, + ParseToken::Array, + ParseToken::Relative, + ParseToken::Number(1_f64), + ParseToken::Filter(FilterToken::Greater), + ParseToken::ArrayEof + ]) + ); + assert_eq!( run("$[:]"), Ok(vec![ @@ -463,36 +507,6 @@ mod parser_tests { ParseToken::ArrayEof ]) ); - - match run("$[") { - Ok(_) => panic!(), - _ => {} - } - - match run("$[]") { - Ok(_) => panic!(), - _ => {} - } - - match run("$[a]") { - Ok(_) => panic!(), - _ => {} - } - - match run("$[?($.a)]") { - Ok(_) => panic!(), - _ => {} - } - - match run("$[?(@.a > @.b]") { - Ok(_) => panic!(), - _ => {} - } - - match run("$[?(@.a < @.b&&(@.c < @.d)]") { - Ok(_) => panic!(), - _ => {} - } } #[test] @@ -776,6 +790,21 @@ mod tokenizer_tests { ), ); + run( + r#"$['single\'1','single\'2']"#, + ( + vec![ + Token::Absolute(0), + Token::OpenArray(1), + Token::SingleQuoted(2, "single\'1".to_string()), + Token::Comma(13), + Token::SingleQuoted(14, "single\'2".to_string()), + Token::CloseArray(25), + ], + Some(TokenError::Eof), + ), + ); + run( r#"$["double\"quote"]"#, ( diff --git a/src/parser/parser.rs b/src/parser/parser.rs index 65eaa56..3ef1a0c 100644 --- a/src/parser/parser.rs +++ b/src/parser/parser.rs @@ -111,17 +111,7 @@ impl Parser { fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#paths_dot"); let node = Self::path(prev, tokenizer)?; - match tokenizer.peek_token() { - Ok(Token::Equal(_)) - | Ok(Token::NotEqual(_)) - | Ok(Token::Little(_)) - | Ok(Token::LittleOrEqual(_)) - | Ok(Token::Greater(_)) - | Ok(Token::GreaterOrEqual(_)) - | Ok(Token::And(_)) - | Ok(Token::Or(_)) => Ok(node), - _ => Self::paths(node, tokenizer), - } + Self::paths(node, tokenizer) } fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { @@ -201,7 +191,12 @@ impl Parser { fn boolean(tokenizer: &mut TokenReader) -> ParseResult { debug!("#boolean"); match tokenizer.next_token() { - Ok(Token::Key(_, v)) => { + Ok(Token::Key(_, ref v)) + if { + let b = v.as_bytes(); + b.len() > 0 && (b[0] == b't' || b[0] == b'T' || b[0] == b'f' || b[0] == b'F') + } => + { Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true")))) } _ => Err(tokenizer.err_msg()), @@ -214,15 +209,11 @@ impl Parser { Self::eat_token(tokenizer); Self::eat_whitespace(tokenizer); - if !(tokenizer.peek_is(SINGLE_QUOTE) || tokenizer.peek_is(DOUBLE_QUOTE)) { - return Err(tokenizer.err_msg()); - } - match tokenizer.next_token() { Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => { keys.push(val); } - _ => {} + _ => return Err(tokenizer.err_msg()), } Self::eat_whitespace(tokenizer); @@ -241,7 +232,6 @@ impl Parser { Self::array_keys(tokenizer, val) } } - Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)), _ => Err(tokenizer.err_msg()), } } @@ -341,25 +331,33 @@ impl Parser { fn range_value(tokenizer: &mut TokenReader) -> Result, String> { Self::eat_whitespace(tokenizer); - if tokenizer.peek_is(SPLIT) { - Self::eat_token(tokenizer); - Self::eat_whitespace(tokenizer); - - if tokenizer.peek_is(KEY) { - match tokenizer.next_token() { - Ok(Token::Key(pos, str_step)) => { - match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) { - Ok(step) => Ok(Some(step)), - Err(e) => Err(e), - } - } - _ => Ok(None), - } - } else { - Ok(None) + match tokenizer.peek_token() { + Ok(Token::Split(_)) => { + Self::eat_token(tokenizer); + Self::eat_whitespace(tokenizer); + } + _ => { + return Ok(None); + } + } + + match tokenizer.peek_token() { + Ok(Token::Key(_, _)) => {} + _ => { + return Ok(None); + } + } + + match tokenizer.next_token() { + Ok(Token::Key(pos, str_step)) => { + match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) { + Ok(step) => Ok(Some(step)), + Err(e) => Err(e), + } + } + _ => { + unreachable!(); } - } else { - Ok(None) } } @@ -423,7 +421,6 @@ impl Parser { Self::eat_whitespace(tokenizer); Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer) } - Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)), _ => Err(tokenizer.err_msg()), } } @@ -509,7 +506,6 @@ impl Parser { Ok(Self::node(ParseToken::Number(number))) } }, - Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)), _ => Err(tokenizer.err_msg()), } } @@ -530,14 +526,6 @@ impl Parser { } } - fn peek_key(tokenizer: &mut TokenReader) -> Option { - if let Ok(Token::Key(_, k)) = tokenizer.peek_token() { - Some(k.clone()) - } else { - None - } - } - fn term(tokenizer: &mut TokenReader) -> ParseResult { debug!("#term"); @@ -563,15 +551,15 @@ impl Parser { } if tokenizer.peek_is(KEY) { - return match Self::peek_key(tokenizer) { - Some(key) => match key.chars().next() { - Some(ch) => match ch { - '-' | '0'...'9' => Self::term_num(tokenizer), - _ => Self::boolean(tokenizer), - }, - _ => Err(tokenizer.err_msg()), - }, - _ => Err(tokenizer.err_msg()), + let key = if let Ok(Token::Key(_, k)) = tokenizer.peek_token() { + k.clone() + } else { + unreachable!() + }; + + return match key.as_bytes()[0] { + b'-' | b'0'...b'9' => Self::term_num(tokenizer), + _ => Self::boolean(tokenizer), }; } @@ -587,7 +575,6 @@ impl Parser { Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual), Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater), Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual), - Err(TokenError::Eof) => ParseToken::Eof, _ => { return Err(tokenizer.err_msg()); } diff --git a/tests/readme.rs b/tests/readme.rs index 3f15f65..03f590d 100644 --- a/tests/readme.rs +++ b/tests/readme.rs @@ -486,8 +486,6 @@ fn readme_delete2() { let ret = jsonpath::delete(json_obj, "$.store.book").unwrap(); - println!("{:?}", ret); - assert_eq!( ret, json!({