mirror of
https://github.com/fluencelabs/jsonpath
synced 2025-04-25 09:22:19 +00:00
apply clippy lints - tests
This commit is contained in:
parent
2f0d29d644
commit
319186b1d9
@ -14,16 +14,20 @@ branches:
|
|||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- rust: stable
|
- rust: nightly
|
||||||
os: linux
|
os: linux
|
||||||
env: RUST_BACKTRACE=1
|
env: RUST_BACKTRACE=1
|
||||||
before_cache: |
|
before_cache: |
|
||||||
if [[ "$TRAVIS_RUST_VERSION" == stable ]]; then
|
if [[ "$TRAVIS_RUST_VERSION" == stable ]]; then
|
||||||
cargo install cargo-tarpaulin -f
|
cargo install cargo-tarpaulin -f
|
||||||
fi
|
fi
|
||||||
|
before_script:
|
||||||
|
- rustup component add clippy
|
||||||
script:
|
script:
|
||||||
- cargo clean
|
- cargo clean
|
||||||
|
- cargo clippy -- -D warnings
|
||||||
- cargo build --verbose --all
|
- cargo build --verbose --all
|
||||||
|
- cargo clippy --all-targets --all-features -- -D warnings -A clippy::cognitive_complexity
|
||||||
- cargo test --verbose --all
|
- cargo test --verbose --all
|
||||||
after_success: |
|
after_success: |
|
||||||
cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs --out Xml
|
cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs --out Xml
|
||||||
|
3
build.sh
3
build.sh
@ -2,6 +2,9 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
cargo clippy -- -D warnings && \
|
||||||
|
cargo clippy --all-targets --all-features -- -D warnings -A clippy::cognitive_complexity
|
||||||
|
|
||||||
# project_root
|
# project_root
|
||||||
DIR="$(pwd)"
|
DIR="$(pwd)"
|
||||||
WASM="${DIR}"/wasm
|
WASM="${DIR}"/wasm
|
||||||
|
@ -13,8 +13,8 @@ mod utils {
|
|||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
pub fn string_to_num<F, S: FromStr>(string: &str, msg_handler: F) -> Result<S, String>
|
pub fn string_to_num<F, S: FromStr>(string: &str, msg_handler: F) -> Result<S, String>
|
||||||
where
|
where
|
||||||
F: Fn() -> String,
|
F: Fn() -> String,
|
||||||
{
|
{
|
||||||
match string.parse() {
|
match string.parse() {
|
||||||
Ok(n) => Ok(n),
|
Ok(n) => Ok(n),
|
||||||
@ -195,13 +195,13 @@ impl Parser {
|
|||||||
debug!("#boolean");
|
debug!("#boolean");
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(Token::Key(_, ref v))
|
Ok(Token::Key(_, ref v))
|
||||||
if {
|
if {
|
||||||
let b = v.as_bytes();
|
let b = v.as_bytes();
|
||||||
!b.is_empty() && (b[0] == b't' || b[0] == b'T' || b[0] == b'f' || b[0] == b'F')
|
!b.is_empty() && (b[0] == b't' || b[0] == b'T' || b[0] == b'f' || b[0] == b'F')
|
||||||
} =>
|
} =>
|
||||||
{
|
{
|
||||||
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
|
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
|
||||||
}
|
}
|
||||||
_ => Err(tokenizer.err_msg()),
|
_ => Err(tokenizer.err_msg()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -557,7 +557,7 @@ impl Parser {
|
|||||||
};
|
};
|
||||||
|
|
||||||
return match key.as_bytes()[0] {
|
return match key.as_bytes()[0] {
|
||||||
b'-' | b'0'...b'9' => Self::term_num(tokenizer),
|
b'-' | b'0'..=b'9' => Self::term_num(tokenizer),
|
||||||
_ => Self::boolean(tokenizer),
|
_ => Self::boolean(tokenizer),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -687,7 +687,6 @@ pub trait NodeVisitor {
|
|||||||
fn end_term(&mut self) {}
|
fn end_term(&mut self) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod parser_tests {
|
mod parser_tests {
|
||||||
use parser::{FilterToken, NodeVisitor, ParseToken, Parser};
|
use parser::{FilterToken, NodeVisitor, ParseToken, Parser};
|
||||||
@ -732,11 +731,7 @@ mod parser_tests {
|
|||||||
setup();
|
setup();
|
||||||
|
|
||||||
fn invalid(path: &str) {
|
fn invalid(path: &str) {
|
||||||
if let Err(_) = run(path) {
|
assert!(run(path).is_err());
|
||||||
assert!(true);
|
|
||||||
} else {
|
|
||||||
assert!(false);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
invalid("$[]");
|
invalid("$[]");
|
||||||
@ -821,24 +816,21 @@ mod parser_tests {
|
|||||||
])
|
])
|
||||||
);
|
);
|
||||||
|
|
||||||
match run("$.") {
|
if run("$.").is_ok() {
|
||||||
Ok(_) => panic!(),
|
panic!();
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match run("$..") {
|
if run("$..").is_ok() {
|
||||||
Ok(_) => panic!(),
|
panic!();
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match run("$. a") {
|
if run("$. a").is_ok() {
|
||||||
Ok(_) => panic!(),
|
panic!();
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_array_sytax() {
|
fn parse_array_syntax() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1211,24 +1203,20 @@ mod parser_tests {
|
|||||||
])
|
])
|
||||||
);
|
);
|
||||||
|
|
||||||
match run("$[1.1]") {
|
if run("$[1.1]").is_ok() {
|
||||||
Ok(_) => panic!(),
|
panic!();
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match run("$[?(1.1<.2)]") {
|
if run("$[?(1.1<.2)]").is_ok() {
|
||||||
Ok(_) => panic!(),
|
panic!();
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match run("$[?(1.1<2.)]") {
|
if run("$[?(1.1<2.)]").is_ok() {
|
||||||
Ok(_) => panic!(),
|
panic!();
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match run("$[?(1.1<2.a)]") {
|
if run("$[?(1.1<2.a)]").is_ok() {
|
||||||
Ok(_) => panic!(),
|
panic!();
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1253,7 +1241,7 @@ mod tokenizer_tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn run(input: &str, expected: (Vec<Token>, Option<TokenError>)) {
|
fn run(input: &str, expected: (Vec<Token>, Option<TokenError>)) {
|
||||||
let (vec, err) = collect_token(input.clone());
|
let (vec, err) = collect_token(input);
|
||||||
assert_eq!((vec, err), expected, "\"{}\"", input);
|
assert_eq!((vec, err), expected, "\"{}\"", input);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,7 +161,7 @@ impl<'a> Tokenizer<'a> {
|
|||||||
if let Some('\\') = val.chars().last() {
|
if let Some('\\') = val.chars().last() {
|
||||||
self.input.next_char().map_err(to_token_error)?;
|
self.input.next_char().map_err(to_token_error)?;
|
||||||
let _ = val.pop();
|
let _ = val.pop();
|
||||||
let (_, mut val_remain) = self
|
let (_, val_remain) = self
|
||||||
.input
|
.input
|
||||||
.take_while(|c| *c != ch)
|
.take_while(|c| *c != ch)
|
||||||
.map_err(to_token_error)?;
|
.map_err(to_token_error)?;
|
||||||
|
@ -30,7 +30,7 @@ pub fn read_contents(path: &str) -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) {
|
pub fn select_and_then_compare(path: &str, json: Value, target: Value) {
|
||||||
let mut selector = Selector::default();
|
let mut selector = Selector::default();
|
||||||
let result = selector
|
let result = selector
|
||||||
.str_path(path)
|
.str_path(path)
|
||||||
@ -50,7 +50,7 @@ pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn compare_result<'a>(result: Vec<&Value>, target: Value) {
|
pub fn compare_result(result: Vec<&Value>, target: Value) {
|
||||||
let result = serde_json::to_value(result).unwrap();
|
let result = serde_json::to_value(result).unwrap();
|
||||||
assert_eq!(result, target);
|
assert_eq!(result, target);
|
||||||
}
|
}
|
||||||
|
@ -624,7 +624,7 @@ fn quote() {
|
|||||||
fn all_filter() {
|
fn all_filter() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
for path in vec![r#"$.*"#, r#"$[*]"#] {
|
for path in &[r#"$.*"#, r#"$[*]"#] {
|
||||||
select_and_then_compare(
|
select_and_then_compare(
|
||||||
path,
|
path,
|
||||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||||
@ -632,7 +632,7 @@ fn all_filter() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
for path in vec![r#"$..*"#, r#"$..[*]"#] {
|
for path in &[r#"$..*"#, r#"$..[*]"#] {
|
||||||
select_and_then_compare(
|
select_and_then_compare(
|
||||||
path,
|
path,
|
||||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||||
@ -640,7 +640,7 @@ fn all_filter() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
for path in vec![r#"$.*.*"#, r#"$[*].*"#, r#"$.*[*]"#, r#"$[*][*]"#] {
|
for path in &[r#"$.*.*"#, r#"$[*].*"#, r#"$.*[*]"#, r#"$[*][*]"#] {
|
||||||
select_and_then_compare(
|
select_and_then_compare(
|
||||||
path,
|
path,
|
||||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||||
@ -648,7 +648,7 @@ fn all_filter() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
for path in vec![r#"$..friends.*"#, r#"$[*].friends.*"#] {
|
for path in &[r#"$..friends.*"#, r#"$[*].friends.*"#] {
|
||||||
select_and_then_compare(
|
select_and_then_compare(
|
||||||
path,
|
path,
|
||||||
read_json("./benches/data_array.json"),
|
read_json("./benches/data_array.json"),
|
||||||
|
@ -37,11 +37,7 @@ fn compile() {
|
|||||||
|
|
||||||
fn compile_error() {
|
fn compile_error() {
|
||||||
let mut template = jsonpath::compile("$[");
|
let mut template = jsonpath::compile("$[");
|
||||||
if let Err(JsonPathError::Path(_)) = template(&Value::Null) {
|
assert!(template(&Value::Null).is_err());
|
||||||
assert!(true);
|
|
||||||
} else {
|
|
||||||
assert!(false);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setup();
|
setup();
|
||||||
|
@ -20,11 +20,8 @@ fn selector_mut() {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.value(read_json("./benches/example.json"))
|
.value(read_json("./benches/example.json"))
|
||||||
.replace_with(&mut |v| {
|
.replace_with(&mut |v| {
|
||||||
match v {
|
if let Value::Number(n) = v {
|
||||||
Value::Number(n) => {
|
nums.push(n.as_f64().unwrap());
|
||||||
nums.push(n.as_f64().unwrap());
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
Value::String("a".to_string())
|
Value::String("a".to_string())
|
||||||
})
|
})
|
||||||
|
Loading…
x
Reference in New Issue
Block a user