parser code coverage

This commit is contained in:
freestrings 2019-06-20 17:22:46 +09:00
parent ad47444b7a
commit c19c75dac5
4 changed files with 111 additions and 88 deletions

9
coverage.sh Executable file
View File

@ -0,0 +1,9 @@
#!/usr/bin/env bash
#
# cargo install cargo-tarpaulin
#
set -e
cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs -v --all

View File

@ -41,6 +41,38 @@ mod parser_tests {
interpreter.start() interpreter.start()
} }
#[test]
fn parse_error() {
setup();
fn invalid(path: &str) {
if let Err(_) = run(path) {
assert!(true);
} else {
assert!(false);
}
}
invalid("$[]");
invalid("$[a]");
invalid("$[?($.a)]");
invalid("$[?(@.a > @.b]");
invalid("$[?(@.a < @.b&&(@.c < @.d)]");
invalid("@.");
invalid("$..[?(a <= @.a)]"); // invalid term value
invalid("$['a', b]");
invalid("$[0, >=]");
invalid("$[a:]");
invalid("$[:a]");
invalid("$[::a]");
invalid("$[:>]");
invalid("$[1:>]");
invalid("$[1,,]");
invalid("$[?]");
invalid("$[?(1 = 1)]");
invalid("$[?(1 = >)]");
}
#[test] #[test]
fn parse_path() { fn parse_path() {
setup(); setup();
@ -434,6 +466,18 @@ mod parser_tests {
]) ])
); );
assert_eq!(
run(r#"$[?(@ > 1)]"#),
Ok(vec![
ParseToken::Absolute,
ParseToken::Array,
ParseToken::Relative,
ParseToken::Number(1_f64),
ParseToken::Filter(FilterToken::Greater),
ParseToken::ArrayEof
])
);
assert_eq!( assert_eq!(
run("$[:]"), run("$[:]"),
Ok(vec![ Ok(vec![
@ -463,36 +507,6 @@ mod parser_tests {
ParseToken::ArrayEof ParseToken::ArrayEof
]) ])
); );
match run("$[") {
Ok(_) => panic!(),
_ => {}
}
match run("$[]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[a]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?($.a)]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?(@.a > @.b]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?(@.a < @.b&&(@.c < @.d)]") {
Ok(_) => panic!(),
_ => {}
}
} }
#[test] #[test]
@ -776,6 +790,21 @@ mod tokenizer_tests {
), ),
); );
run(
r#"$['single\'1','single\'2']"#,
(
vec![
Token::Absolute(0),
Token::OpenArray(1),
Token::SingleQuoted(2, "single\'1".to_string()),
Token::Comma(13),
Token::SingleQuoted(14, "single\'2".to_string()),
Token::CloseArray(25),
],
Some(TokenError::Eof),
),
);
run( run(
r#"$["double\"quote"]"#, r#"$["double\"quote"]"#,
( (

View File

@ -111,17 +111,7 @@ impl Parser {
fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> { fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#paths_dot"); debug!("#paths_dot");
let node = Self::path(prev, tokenizer)?; let node = Self::path(prev, tokenizer)?;
match tokenizer.peek_token() { Self::paths(node, tokenizer)
Ok(Token::Equal(_))
| Ok(Token::NotEqual(_))
| Ok(Token::Little(_))
| Ok(Token::LittleOrEqual(_))
| Ok(Token::Greater(_))
| Ok(Token::GreaterOrEqual(_))
| Ok(Token::And(_))
| Ok(Token::Or(_)) => Ok(node),
_ => Self::paths(node, tokenizer),
}
} }
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> { fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
@ -201,7 +191,12 @@ impl Parser {
fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> { fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#boolean"); debug!("#boolean");
match tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Key(_, v)) => { Ok(Token::Key(_, ref v))
if {
let b = v.as_bytes();
b.len() > 0 && (b[0] == b't' || b[0] == b'T' || b[0] == b'f' || b[0] == b'F')
} =>
{
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true")))) Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
} }
_ => Err(tokenizer.err_msg()), _ => Err(tokenizer.err_msg()),
@ -214,15 +209,11 @@ impl Parser {
Self::eat_token(tokenizer); Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer); Self::eat_whitespace(tokenizer);
if !(tokenizer.peek_is(SINGLE_QUOTE) || tokenizer.peek_is(DOUBLE_QUOTE)) {
return Err(tokenizer.err_msg());
}
match tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => { Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
keys.push(val); keys.push(val);
} }
_ => {} _ => return Err(tokenizer.err_msg()),
} }
Self::eat_whitespace(tokenizer); Self::eat_whitespace(tokenizer);
@ -241,7 +232,6 @@ impl Parser {
Self::array_keys(tokenizer, val) Self::array_keys(tokenizer, val)
} }
} }
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
_ => Err(tokenizer.err_msg()), _ => Err(tokenizer.err_msg()),
} }
} }
@ -341,25 +331,33 @@ impl Parser {
fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> { fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> {
Self::eat_whitespace(tokenizer); Self::eat_whitespace(tokenizer);
if tokenizer.peek_is(SPLIT) { match tokenizer.peek_token() {
Self::eat_token(tokenizer); Ok(Token::Split(_)) => {
Self::eat_whitespace(tokenizer); Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
if tokenizer.peek_is(KEY) { }
match tokenizer.next_token() { _ => {
Ok(Token::Key(pos, str_step)) => { return Ok(None);
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) { }
Ok(step) => Ok(Some(step)), }
Err(e) => Err(e),
} match tokenizer.peek_token() {
} Ok(Token::Key(_, _)) => {}
_ => Ok(None), _ => {
} return Ok(None);
} else { }
Ok(None) }
match tokenizer.next_token() {
Ok(Token::Key(pos, str_step)) => {
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
Ok(step) => Ok(Some(step)),
Err(e) => Err(e),
}
}
_ => {
unreachable!();
} }
} else {
Ok(None)
} }
} }
@ -423,7 +421,6 @@ impl Parser {
Self::eat_whitespace(tokenizer); Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer) Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
} }
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
_ => Err(tokenizer.err_msg()), _ => Err(tokenizer.err_msg()),
} }
} }
@ -509,7 +506,6 @@ impl Parser {
Ok(Self::node(ParseToken::Number(number))) Ok(Self::node(ParseToken::Number(number)))
} }
}, },
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
_ => Err(tokenizer.err_msg()), _ => Err(tokenizer.err_msg()),
} }
} }
@ -530,14 +526,6 @@ impl Parser {
} }
} }
fn peek_key(tokenizer: &mut TokenReader) -> Option<String> {
if let Ok(Token::Key(_, k)) = tokenizer.peek_token() {
Some(k.clone())
} else {
None
}
}
fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> { fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term"); debug!("#term");
@ -563,15 +551,15 @@ impl Parser {
} }
if tokenizer.peek_is(KEY) { if tokenizer.peek_is(KEY) {
return match Self::peek_key(tokenizer) { let key = if let Ok(Token::Key(_, k)) = tokenizer.peek_token() {
Some(key) => match key.chars().next() { k.clone()
Some(ch) => match ch { } else {
'-' | '0'...'9' => Self::term_num(tokenizer), unreachable!()
_ => Self::boolean(tokenizer), };
},
_ => Err(tokenizer.err_msg()), return match key.as_bytes()[0] {
}, b'-' | b'0'...b'9' => Self::term_num(tokenizer),
_ => Err(tokenizer.err_msg()), _ => Self::boolean(tokenizer),
}; };
} }
@ -587,7 +575,6 @@ impl Parser {
Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual), Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual),
Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater), Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater),
Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual), Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual),
Err(TokenError::Eof) => ParseToken::Eof,
_ => { _ => {
return Err(tokenizer.err_msg()); return Err(tokenizer.err_msg());
} }

View File

@ -486,8 +486,6 @@ fn readme_delete2() {
let ret = jsonpath::delete(json_obj, "$.store.book").unwrap(); let ret = jsonpath::delete(json_obj, "$.store.book").unwrap();
println!("{:?}", ret);
assert_eq!( assert_eq!(
ret, ret,
json!({ json!({