code coverage 적용

This commit is contained in:
freestrings 2019-06-20 22:33:21 +09:00
commit 5f832e8fe7
9 changed files with 273 additions and 175 deletions

View File

@ -1,5 +1,9 @@
language: rust
sudo: false
sudo: required
addons:
apt:
packages:
- libssl-dev
cache: cargo
@ -13,27 +17,22 @@ matrix:
- rust: stable
os: linux
env: RUST_BACKTRACE=1
addons:
chrome: stable
before_script:
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
- cargo install-update -a
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
before_cache: |
if [[ "$TRAVIS_RUST_VERSION" == stable ]]; then
cargo install cargo-tarpaulin -f
fi
script:
- cargo clean
- cargo build --verbose --all
- cargo test --verbose --all
after_success: |
cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs --out Xml
bash <(curl -s https://codecov.io/bash)
- rust: stable
os: osx
env: RUST_BACKTRACE=1
addons:
chrome: stable
before_script:
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
- cargo install-update -a
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
script:
- cargo clean
- cargo build --verbose --all
- cargo test --verbose --all
- language: node_js

View File

@ -4,6 +4,7 @@
![crates.io](https://img.shields.io/crates/v/jsonpath_lib.svg)
![npm](https://img.shields.io/npm/v/jsonpath-rs.svg?label=npm%20%60jsonpath-rs%60)
![npm](https://img.shields.io/npm/v/jsonpath-wasm.svg?label=npm%20%60jsonpath-wasm%60)
![Codecov](https://img.shields.io/codecov/c/github/freestrings/jsonpath.svg?token=92c41b4e7cf04a9cbebc08f68c5da615)
`Rust` 버전 [JsonPath](https://goessner.net/articles/JsonPath/) 구현이다. `Webassembly``Javascript`에서도 유사한 API 인터페이스를 제공 한다.

9
coverage.sh Executable file
View File

@ -0,0 +1,9 @@
#!/usr/bin/env bash
#
# cargo install cargo-tarpaulin
#
set -e
cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs -v --all

View File

@ -41,6 +41,38 @@ mod parser_tests {
interpreter.start()
}
#[test]
fn parse_error() {
setup();
fn invalid(path: &str) {
if let Err(_) = run(path) {
assert!(true);
} else {
assert!(false);
}
}
invalid("$[]");
invalid("$[a]");
invalid("$[?($.a)]");
invalid("$[?(@.a > @.b]");
invalid("$[?(@.a < @.b&&(@.c < @.d)]");
invalid("@.");
invalid("$..[?(a <= @.a)]"); // invalid term value
invalid("$['a', b]");
invalid("$[0, >=]");
invalid("$[a:]");
invalid("$[:a]");
invalid("$[::a]");
invalid("$[:>]");
invalid("$[1:>]");
invalid("$[1,,]");
invalid("$[?]");
invalid("$[?(1 = 1)]");
invalid("$[?(1 = >)]");
}
#[test]
fn parse_path() {
setup();
@ -434,6 +466,18 @@ mod parser_tests {
])
);
assert_eq!(
run(r#"$[?(@ > 1)]"#),
Ok(vec![
ParseToken::Absolute,
ParseToken::Array,
ParseToken::Relative,
ParseToken::Number(1_f64),
ParseToken::Filter(FilterToken::Greater),
ParseToken::ArrayEof
])
);
assert_eq!(
run("$[:]"),
Ok(vec![
@ -463,36 +507,6 @@ mod parser_tests {
ParseToken::ArrayEof
])
);
match run("$[") {
Ok(_) => panic!(),
_ => {}
}
match run("$[]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[a]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?($.a)]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?(@.a > @.b]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?(@.a < @.b&&(@.c < @.d)]") {
Ok(_) => panic!(),
_ => {}
}
}
#[test]
@ -776,6 +790,21 @@ mod tokenizer_tests {
),
);
run(
r#"$['single\'1','single\'2']"#,
(
vec![
Token::Absolute(0),
Token::OpenArray(1),
Token::SingleQuoted(2, "single\'1".to_string()),
Token::Comma(13),
Token::SingleQuoted(14, "single\'2".to_string()),
Token::CloseArray(25),
],
Some(TokenError::Eof),
),
);
run(
r#"$["double\"quote"]"#,
(

View File

@ -111,17 +111,7 @@ impl Parser {
fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#paths_dot");
let node = Self::path(prev, tokenizer)?;
match tokenizer.peek_token() {
Ok(Token::Equal(_))
| Ok(Token::NotEqual(_))
| Ok(Token::Little(_))
| Ok(Token::LittleOrEqual(_))
| Ok(Token::Greater(_))
| Ok(Token::GreaterOrEqual(_))
| Ok(Token::And(_))
| Ok(Token::Or(_)) => Ok(node),
_ => Self::paths(node, tokenizer),
}
Self::paths(node, tokenizer)
}
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
@ -201,7 +191,12 @@ impl Parser {
fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#boolean");
match tokenizer.next_token() {
Ok(Token::Key(_, v)) => {
Ok(Token::Key(_, ref v))
if {
let b = v.as_bytes();
b.len() > 0 && (b[0] == b't' || b[0] == b'T' || b[0] == b'f' || b[0] == b'F')
} =>
{
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
}
_ => Err(tokenizer.err_msg()),
@ -214,15 +209,11 @@ impl Parser {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
if !(tokenizer.peek_is(SINGLE_QUOTE) || tokenizer.peek_is(DOUBLE_QUOTE)) {
return Err(tokenizer.err_msg());
}
match tokenizer.next_token() {
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
keys.push(val);
}
_ => {}
_ => return Err(tokenizer.err_msg()),
}
Self::eat_whitespace(tokenizer);
@ -241,7 +232,6 @@ impl Parser {
Self::array_keys(tokenizer, val)
}
}
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
_ => Err(tokenizer.err_msg()),
}
}
@ -341,25 +331,33 @@ impl Parser {
fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> {
Self::eat_whitespace(tokenizer);
if tokenizer.peek_is(SPLIT) {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
if tokenizer.peek_is(KEY) {
match tokenizer.next_token() {
Ok(Token::Key(pos, str_step)) => {
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
Ok(step) => Ok(Some(step)),
Err(e) => Err(e),
}
}
_ => Ok(None),
}
} else {
Ok(None)
match tokenizer.peek_token() {
Ok(Token::Split(_)) => {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
}
_ => {
return Ok(None);
}
}
match tokenizer.peek_token() {
Ok(Token::Key(_, _)) => {}
_ => {
return Ok(None);
}
}
match tokenizer.next_token() {
Ok(Token::Key(pos, str_step)) => {
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
Ok(step) => Ok(Some(step)),
Err(e) => Err(e),
}
}
_ => {
unreachable!();
}
} else {
Ok(None)
}
}
@ -423,7 +421,6 @@ impl Parser {
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
}
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
_ => Err(tokenizer.err_msg()),
}
}
@ -509,7 +506,6 @@ impl Parser {
Ok(Self::node(ParseToken::Number(number)))
}
},
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
_ => Err(tokenizer.err_msg()),
}
}
@ -530,14 +526,6 @@ impl Parser {
}
}
fn peek_key(tokenizer: &mut TokenReader) -> Option<String> {
if let Ok(Token::Key(_, k)) = tokenizer.peek_token() {
Some(k.clone())
} else {
None
}
}
fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term");
@ -563,15 +551,15 @@ impl Parser {
}
if tokenizer.peek_is(KEY) {
return match Self::peek_key(tokenizer) {
Some(key) => match key.chars().next() {
Some(ch) => match ch {
'-' | '0'...'9' => Self::term_num(tokenizer),
_ => Self::boolean(tokenizer),
},
_ => Err(tokenizer.err_msg()),
},
_ => Err(tokenizer.err_msg()),
let key = if let Ok(Token::Key(_, k)) = tokenizer.peek_token() {
k.clone()
} else {
unreachable!()
};
return match key.as_bytes()[0] {
b'-' | b'0'...b'9' => Self::term_num(tokenizer),
_ => Self::boolean(tokenizer),
};
}
@ -587,7 +575,6 @@ impl Parser {
Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual),
Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater),
Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual),
Err(TokenError::Eof) => ParseToken::Eof,
_ => {
return Err(tokenizer.err_msg());
}

View File

@ -995,28 +995,44 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
.push(Some(ExprTerm::Number(Number::from_f64(*v).unwrap())));
}
ParseToken::Filter(ref ft) => {
if let Some(Some(ref right)) = self.terms.pop() {
if let Some(Some(left)) = self.terms.pop() {
let mut ret = None;
match ft {
FilterToken::Equal => left.eq(right, &mut ret),
FilterToken::NotEqual => left.ne(right, &mut ret),
FilterToken::Greater => left.gt(right, &mut ret),
FilterToken::GreaterOrEqual => left.ge(right, &mut ret),
FilterToken::Little => left.lt(right, &mut ret),
FilterToken::LittleOrEqual => left.le(right, &mut ret),
FilterToken::And => left.and(right, &mut ret),
FilterToken::Or => left.or(right, &mut ret),
};
let ref right = match self.terms.pop() {
Some(Some(right)) => right,
Some(None) => ExprTerm::Json(
None,
match &self.current {
Some(current) => current.to_vec(),
_ => unreachable!(),
},
),
_ => panic!("empty term right"),
};
if let Some(e) = ret {
self.terms.push(Some(e));
}
} else {
unreachable!()
}
} else {
unreachable!()
let left = match self.terms.pop() {
Some(Some(left)) => left,
Some(None) => ExprTerm::Json(
None,
match &self.current {
Some(current) => current.to_vec(),
_ => unreachable!(),
},
),
_ => panic!("empty term left"),
};
let mut ret = None;
match ft {
FilterToken::Equal => left.eq(right, &mut ret),
FilterToken::NotEqual => left.ne(right, &mut ret),
FilterToken::Greater => left.gt(right, &mut ret),
FilterToken::GreaterOrEqual => left.ge(right, &mut ret),
FilterToken::Little => left.lt(right, &mut ret),
FilterToken::LittleOrEqual => left.le(right, &mut ret),
FilterToken::And => left.and(right, &mut ret),
FilterToken::Or => left.or(right, &mut ret),
};
if let Some(e) = ret {
self.terms.push(Some(e));
}
}
ParseToken::Range(from, to, step) => {

View File

@ -343,6 +343,22 @@ fn op_complex() {
);
}
#[test]
fn op_compare() {
setup();
for path in [
r#"$[?("1" == 1)]"#,
r#"$[?(1 == "1")]"#,
r#"$[?(true == 1)]"#,
r#"$[?(@ == 1)]"#,
]
.iter()
{
select_and_then_compare(path, json!({}), json!([Value::Null]));
}
}
#[test]
fn example() {
setup();

View File

@ -7,50 +7,81 @@ use serde::Deserialize;
use serde_json::Value;
use common::{compare_result, read_contents, read_json, setup};
use jsonpath::JsonPathError;
mod common;
#[test]
fn compile() {
let compile_object = |path| {
let mut template = jsonpath::compile(path);
let json_obj = read_json("./benches/data_obj.json");
let json = template(&json_obj).unwrap();
let ret = json!([
{"id": 2,"name": "Gray Berry"},
{"id": 2,"name": "Gray Berry"}
]);
compare_result(json, ret);
};
let compile_array = |path| {
let mut template = jsonpath::compile(path);
let json_obj = read_json("./benches/data_array.json");
let json = template(&json_obj).unwrap();
let ret = json!([
{"id": 2,"name": "Gray Berry"},
{"id": 2,"name": "Rosetta Erickson"}
]);
compare_result(json, ret);
};
fn compile_error() {
let mut template = jsonpath::compile("$[");
if let Err(JsonPathError::Path(_)) = template(&Value::Null) {
assert!(true);
} else {
assert!(false);
}
}
setup();
let mut template = jsonpath::compile("$..friends[2]");
let json_obj = read_json("./benches/data_obj.json");
let json = template(&json_obj).unwrap();
let ret = json!([
{"id": 2,"name": "Gray Berry"},
{"id": 2,"name": "Gray Berry"}
]);
compare_result(json, ret);
let json_obj = read_json("./benches/data_array.json");
let json = template(&json_obj).unwrap();
let ret = json!([
{"id": 2,"name": "Gray Berry"},
{"id": 2,"name": "Rosetta Erickson"}
]);
compare_result(json, ret);
compile_object("$..friends[2]");
compile_array("$..friends[2]");
compile_error();
}
#[test]
fn selector() {
setup();
let json_obj = read_json("./benches/data_obj.json");
let mut reader = jsonpath::selector(&json_obj);
let json = reader("$..friends[2]").unwrap();
let ret = json!([
{"id": 2,"name": "Gray Berry"},
{"id": 2,"name": "Gray Berry"}
]);
compare_result(json, ret);
fn select<'a, F>(selector: &mut F, path: &'a str, target: Value)
where
F: FnMut(&'a str) -> Result<Vec<&Value>, JsonPathError>,
{
let json = selector(path).unwrap();
compare_result(json, target);
};
let json = reader("$..friends[0]").unwrap();
let ret = json!([
{"id": 0},
{"id": 0,"name": "Millicent Norman"}
]);
compare_result(json, ret);
let json_obj = read_json("./benches/data_obj.json");
let mut selector = jsonpath::selector(&json_obj);
select(
&mut selector,
"$..friends[2]",
json!([
{"id": 2,"name": "Gray Berry"},
{"id": 2,"name": "Gray Berry"}
]),
);
select(
&mut selector,
"$..friends[0]",
json!([
{"id": 0},
{"id": 0,"name": "Millicent Norman"}
]),
);
}
#[test]
@ -61,31 +92,43 @@ fn selector_as() {
name: Option<String>,
}
fn select<'a, F>(selector: &mut F, path: &'a str, target: Vec<Friend>)
where
F: FnMut(&'a str) -> Result<Vec<Friend>, JsonPathError>,
{
let json = selector(path).unwrap();
assert_eq!(json, target);
};
let json_obj = read_json("./benches/data_obj.json");
let mut selector = jsonpath::selector_as::<Friend>(&json_obj);
let json = selector("$..friends[2]").unwrap();
let ret = vec![
Friend {
id: 2,
name: Some("Gray Berry".to_string()),
},
Friend {
id: 2,
name: Some("Gray Berry".to_string()),
},
];
assert_eq!(json, ret);
select(
&mut selector,
"$..friends[2]",
vec![
Friend {
id: 2,
name: Some("Gray Berry".to_string()),
},
Friend {
id: 2,
name: Some("Gray Berry".to_string()),
},
],
);
let json = selector("$..friends[0]").unwrap();
let ret = vec![
Friend { id: 0, name: None },
Friend {
id: 0,
name: Some("Millicent Norman".to_string()),
},
];
assert_eq!(json, ret);
select(
&mut selector,
"$..friends[0]",
vec![
Friend { id: 0, name: None },
Friend {
id: 0,
name: Some("Millicent Norman".to_string()),
},
],
);
}
#[test]

View File

@ -486,8 +486,6 @@ fn readme_delete2() {
let ret = jsonpath::delete(json_obj, "$.store.book").unwrap();
println!("{:?}", ret);
assert_eq!(
ret,
json!({