the clippy lints

This commit is contained in:
freestrings 2019-06-25 23:56:13 +09:00
commit cab5177811
16 changed files with 1160 additions and 1246 deletions

View File

@ -14,17 +14,23 @@ branches:
matrix: matrix:
include: include:
- rust: stable - rust: nightly
os: linux os: linux
env: RUST_BACKTRACE=1 env: RUST_BACKTRACE=1
before_cache: | before_cache: |
if [[ "$TRAVIS_RUST_VERSION" == stable ]]; then if [[ "$TRAVIS_RUST_VERSION" == stable ]]; then
cargo install cargo-tarpaulin -f cargo install cargo-tarpaulin -f
fi fi
before_script:
- rustup component add clippy
script: script:
- cargo clean - cargo clean
- cargo clippy -- -D warnings
- cargo build --verbose --all - cargo build --verbose --all
- cargo clippy --all-targets --all-features -- -D warnings -A clippy::cognitive_complexity
- cargo test --verbose --all - cargo test --verbose --all
- cd wasm && cargo clippy -- -D warnings -A clippy::suspicious_else_formatting
- cd ../nodejs/native && cargo clippy -- -D warnings
after_success: | after_success: |
cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs --out Xml cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs --out Xml
bash <(curl -s https://codecov.io/bash) bash <(curl -s https://codecov.io/bash)

View File

@ -109,7 +109,7 @@ fn bench_select_as(b: &mut Bencher) {
#[bench] #[bench]
fn bench_delete(b: &mut Bencher) { fn bench_delete(b: &mut Bencher) {
let json = get_json(); let json = get_json();
let mut selector = SelectorMut::new(); let mut selector = SelectorMut::default();
let _ = selector.str_path(get_path()); let _ = selector.str_path(get_path());
b.iter(move || { b.iter(move || {
@ -123,13 +123,13 @@ fn bench_delete(b: &mut Bencher) {
fn bench_select_to_compare_with_delete(b: &mut Bencher) { fn bench_select_to_compare_with_delete(b: &mut Bencher) {
let json = &get_json(); let json = &get_json();
let mut selector = Selector::new(); let mut selector = Selector::default();
let _ = selector.str_path(get_path()); let _ = selector.str_path(get_path());
b.iter(move || { b.iter(move || {
for _ in 1..100 { for _ in 1..100 {
let json = json.clone(); let json = json.clone();
let mut s = Selector::new(); let mut s = Selector::default();
let _ = s.compiled_path(selector.node_ref().unwrap()).value(&json); let _ = s.compiled_path(selector.node_ref().unwrap()).value(&json);
let _ = s.select(); let _ = s.select();
} }

View File

@ -53,7 +53,7 @@ fn _selector(b: &mut Bencher, index: usize) {
let json = get_json(); let json = get_json();
b.iter(move || { b.iter(move || {
for _ in 1..100 { for _ in 1..100 {
let mut selector = jsonpath::Selector::new(); let mut selector = jsonpath::Selector::default();
let _ = selector.str_path(get_path(index)); let _ = selector.str_path(get_path(index));
selector.value(&json); selector.value(&json);
let r = selector.select(); let r = selector.select();

View File

@ -28,6 +28,16 @@ __cargo_clean () {
cd "${DIR}" && cargo clean cd "${DIR}" && cargo clean
} }
if [ "$1" = "clippy" ]
then
echo
__msg "clippy"
cargo clippy -- -D warnings && \
cargo clippy --all-targets --all-features -- -D warnings -A clippy::cognitive_complexity && \
cd "${WASM}" && cargo clippy -- -A clippy::suspicious_else_formatting && \
cd "${NODEJS}" && cargo clippy
fi
echo echo
__msg "clean" __msg "clean"
rm -rf \ rm -rf \

View File

@ -206,7 +206,7 @@ declare_types! {
{ {
let guard = ctx.lock(); let guard = ctx.lock();
let mut this = this.borrow_mut(&guard); let mut this = this.borrow_mut(&guard);
let _ = this.path(&path); this.path(&path);
} }
Ok(JsUndefined::new().upcast()) Ok(JsUndefined::new().upcast())
@ -219,7 +219,7 @@ declare_types! {
{ {
let guard = ctx.lock(); let guard = ctx.lock();
let mut this = this.borrow_mut(&guard); let mut this = this.borrow_mut(&guard);
let _ = this.value(&json_str); this.value(&json_str);
} }
Ok(JsUndefined::new().upcast()) Ok(JsUndefined::new().upcast())

View File

@ -132,7 +132,7 @@ extern crate serde_json;
use serde_json::Value; use serde_json::Value;
pub use parser::parser::{Node, Parser}; pub use parser::Parser;
pub use select::JsonPathError; pub use select::JsonPathError;
pub use select::{Selector, SelectorMut}; pub use select::{Selector, SelectorMut};
@ -169,10 +169,10 @@ mod select;
/// ]); /// ]);
/// ``` /// ```
pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> { pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> {
let node = Parser::compile(path); let node = parser::Parser::compile(path);
move |json| match &node { move |json| match &node {
Ok(node) => { Ok(node) => {
let mut selector = Selector::new(); let mut selector = Selector::default();
selector.compiled_path(node).value(json).select() selector.compiled_path(node).value(json).select()
} }
Err(e) => Err(JsonPathError::Path(e.to_string())), Err(e) => Err(JsonPathError::Path(e.to_string())),
@ -213,8 +213,9 @@ pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPath
/// &json!({"name": "친구2", "age": 20}) /// &json!({"name": "친구2", "age": 20})
/// ]); /// ]);
/// ``` /// ```
#[allow(clippy::needless_lifetimes)]
pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value>, JsonPathError> { pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value>, JsonPathError> {
let mut selector = Selector::new(); let mut selector = Selector::default();
let _ = selector.value(json); let _ = selector.value(json);
move |path: &str| selector.str_path(path)?.reset_value().select() move |path: &str| selector.str_path(path)?.reset_value().select()
} }
@ -268,7 +269,7 @@ pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value
pub fn selector_as<T: serde::de::DeserializeOwned>( pub fn selector_as<T: serde::de::DeserializeOwned>(
json: &Value, json: &Value,
) -> impl FnMut(&str) -> Result<Vec<T>, JsonPathError> + '_ { ) -> impl FnMut(&str) -> Result<Vec<T>, JsonPathError> + '_ {
let mut selector = Selector::new(); let mut selector = Selector::default();
let _ = selector.value(json); let _ = selector.value(json);
move |path: &str| selector.str_path(path)?.reset_value().select_as() move |path: &str| selector.str_path(path)?.reset_value().select_as()
} }
@ -299,7 +300,7 @@ pub fn selector_as<T: serde::de::DeserializeOwned>(
/// ]); /// ]);
/// ``` /// ```
pub fn select<'a>(json: &'a Value, path: &'a str) -> Result<Vec<&'a Value>, JsonPathError> { pub fn select<'a>(json: &'a Value, path: &'a str) -> Result<Vec<&'a Value>, JsonPathError> {
Selector::new().str_path(path)?.value(json).select() Selector::default().str_path(path)?.value(json).select()
} }
/// It is the same to `select` function but it return the result as string. /// It is the same to `select` function but it return the result as string.
@ -327,7 +328,7 @@ pub fn select<'a>(json: &'a Value, path: &'a str) -> Result<Vec<&'a Value>, Json
/// ``` /// ```
pub fn select_as_str(json_str: &str, path: &str) -> Result<String, JsonPathError> { pub fn select_as_str(json_str: &str, path: &str) -> Result<String, JsonPathError> {
let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?; let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
let ret = Selector::new().str_path(path)?.value(&json).select()?; let ret = Selector::default().str_path(path)?.value(&json).select()?;
serde_json::to_string(&ret).map_err(|e| JsonPathError::Serde(e.to_string())) serde_json::to_string(&ret).map_err(|e| JsonPathError::Serde(e.to_string()))
} }
@ -374,7 +375,7 @@ pub fn select_as<T: serde::de::DeserializeOwned>(
path: &str, path: &str,
) -> Result<Vec<T>, JsonPathError> { ) -> Result<Vec<T>, JsonPathError> {
let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?; let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
Selector::new().str_path(path)?.value(&json).select_as() Selector::default().str_path(path)?.value(&json).select_as()
} }
/// Delete(= replace with null) the JSON property using the jsonpath. /// Delete(= replace with null) the JSON property using the jsonpath.
@ -410,7 +411,7 @@ pub fn select_as<T: serde::de::DeserializeOwned>(
/// ]})); /// ]}));
/// ``` /// ```
pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> { pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
let mut selector = SelectorMut::new(); let mut selector = SelectorMut::default();
let ret = selector let ret = selector
.str_path(path)? .str_path(path)?
.value(value) .value(value)
@ -466,7 +467,7 @@ pub fn replace_with<F>(value: Value, path: &str, fun: &mut F) -> Result<Value, J
where where
F: FnMut(&Value) -> Value, F: FnMut(&Value) -> Value,
{ {
let mut selector = SelectorMut::new(); let mut selector = SelectorMut::default();
let ret = selector let ret = selector
.str_path(path)? .str_path(path)?
.value(value) .value(value)

View File

@ -1,10 +1,695 @@
pub mod parser;
mod path_reader; mod path_reader;
pub(crate) mod tokenizer; mod tokenizer;
use std::str::FromStr;
use self::tokenizer::*;
const DUMMY: usize = 0;
type ParseResult<T> = Result<T, String>;
mod utils {
use std::str::FromStr;
pub fn string_to_num<F, S: FromStr>(string: &str, msg_handler: F) -> Result<S, String>
where
F: Fn() -> String,
{
match string.parse() {
Ok(n) => Ok(n),
_ => Err(msg_handler()),
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum ParseToken {
// '$'
Absolute,
// '@'
Relative,
// '.'
In,
// '..'
Leaves,
// '*'
All,
Key(String),
Keys(Vec<String>),
// []
Array,
// 메타토큰
ArrayEof,
// ?( filter )
Filter(FilterToken),
// 1 : 2
Range(Option<isize>, Option<isize>, Option<usize>),
// 1, 2, 3
Union(Vec<isize>),
Number(f64),
Bool(bool),
Eof,
}
#[derive(Debug, PartialEq, Clone)]
pub enum FilterToken {
Equal,
NotEqual,
Little,
LittleOrEqual,
Greater,
GreaterOrEqual,
And,
Or,
}
#[derive(Debug, Clone)]
pub struct Node {
left: Option<Box<Node>>,
right: Option<Box<Node>>,
token: ParseToken,
}
pub struct Parser;
impl Parser {
pub fn compile(input: &str) -> ParseResult<Node> {
let mut tokenizer = TokenReader::new(input);
Ok(Self::json_path(&mut tokenizer)?)
}
fn json_path(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#json_path");
match tokenizer.next_token() {
Ok(Token::Absolute(_)) => {
let node = Self::node(ParseToken::Absolute);
Self::paths(node, tokenizer)
}
_ => Err(tokenizer.err_msg()),
}
}
fn paths(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#paths");
match tokenizer.peek_token() {
Ok(Token::Dot(_)) => {
Self::eat_token(tokenizer);
Self::paths_dot(prev, tokenizer)
}
Ok(Token::OpenArray(_)) => {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
let node = Self::array(prev, tokenizer)?;
Self::paths(node, tokenizer)
}
_ => Ok(prev),
}
}
fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#paths_dot");
let node = Self::path(prev, tokenizer)?;
Self::paths(node, tokenizer)
}
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path");
match tokenizer.peek_token() {
Ok(Token::Dot(_)) => Self::path_leaves(prev, tokenizer),
Ok(Token::Asterisk(_)) => Self::path_in_all(prev, tokenizer),
Ok(Token::Key(_, _)) => Self::path_in_key(prev, tokenizer),
Ok(Token::OpenArray(_)) => {
Self::eat_token(tokenizer);
Self::array(prev, tokenizer)
}
_ => Err(tokenizer.err_msg()),
}
}
fn path_leaves(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_leaves");
Self::eat_token(tokenizer);
match tokenizer.peek_token() {
Ok(Token::Asterisk(_)) => Self::path_leaves_all(prev, tokenizer),
Ok(Token::OpenArray(_)) => {
let mut leaves_node = Self::node(ParseToken::Leaves);
leaves_node.left = Some(Box::new(prev));
Ok(Self::paths(leaves_node, tokenizer)?)
}
_ => Self::path_leaves_key(prev, tokenizer),
}
}
fn path_leaves_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_leaves_key");
Ok(Node {
token: ParseToken::Leaves,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::key(tokenizer)?)),
})
}
fn path_leaves_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_leaves_all");
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Leaves,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::node(ParseToken::All))),
})
}
fn path_in_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_in_all");
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::In,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::node(ParseToken::All))),
})
}
fn path_in_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_in_key");
Ok(Node {
token: ParseToken::In,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::key(tokenizer)?)),
})
}
fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#key");
match tokenizer.next_token() {
Ok(Token::Key(_, v)) => Ok(Self::node(ParseToken::Key(v))),
_ => Err(tokenizer.err_msg()),
}
}
fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#boolean");
match tokenizer.next_token() {
Ok(Token::Key(_, ref v))
if {
let b = v.as_bytes();
!b.is_empty() && (b[0] == b't' || b[0] == b'T' || b[0] == b'f' || b[0] == b'F')
} =>
{
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
}
_ => Err(tokenizer.err_msg()),
}
}
fn array_keys(tokenizer: &mut TokenReader, first_key: String) -> ParseResult<Node> {
let mut keys = vec![first_key];
while tokenizer.peek_is(COMMA) {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
match tokenizer.next_token() {
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
keys.push(val);
}
_ => return Err(tokenizer.err_msg()),
}
Self::eat_whitespace(tokenizer);
}
Ok(Self::node(ParseToken::Keys(keys)))
}
fn array_quote_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_quote_value");
match tokenizer.next_token() {
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
if !tokenizer.peek_is(COMMA) {
Ok(Self::node(ParseToken::Key(val)))
} else {
Self::array_keys(tokenizer, val)
}
}
_ => Err(tokenizer.err_msg()),
}
}
fn array_start(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_start");
match tokenizer.peek_token() {
Ok(Token::Question(_)) => {
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::filter(tokenizer)?)),
})
}
Ok(Token::Asterisk(_)) => {
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::node(ParseToken::All))),
})
}
_ => Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::array_value(tokenizer)?)),
}),
}
}
fn array(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array");
let ret = Self::array_start(prev, tokenizer)?;
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseArray(DUMMY), tokenizer)
}
fn array_value_key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_value_key");
match tokenizer.next_token() {
Ok(Token::Key(pos, ref val)) => {
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
Self::eat_whitespace(tokenizer);
match tokenizer.peek_token() {
Ok(Token::Comma(_)) => Self::union(digit, tokenizer),
Ok(Token::Split(_)) => Self::range_from(digit, tokenizer),
_ => Ok(Self::node(ParseToken::Number(digit as f64))),
}
}
_ => Err(tokenizer.err_msg()),
}
}
fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_value");
match tokenizer.peek_token() {
Ok(Token::Key(_, _)) => Self::array_value_key(tokenizer),
Ok(Token::Split(_)) => {
Self::eat_token(tokenizer);
Self::range_to(tokenizer)
}
Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => {
Self::array_quote_value(tokenizer)
}
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
_ => {
Self::eat_token(tokenizer);
Err(tokenizer.err_msg())
}
}
}
fn union(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#union");
let mut values = vec![num];
while match tokenizer.peek_token() {
Ok(Token::Comma(_)) => true,
_ => false,
} {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
match tokenizer.next_token() {
Ok(Token::Key(pos, ref val)) => {
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
values.push(digit);
}
_ => {
return Err(tokenizer.err_msg());
}
}
}
Ok(Self::node(ParseToken::Union(values)))
}
fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> {
Self::eat_whitespace(tokenizer);
match tokenizer.peek_token() {
Ok(Token::Split(_)) => {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
}
_ => {
return Ok(None);
}
}
match tokenizer.peek_token() {
Ok(Token::Key(_, _)) => {}
_ => {
return Ok(None);
}
}
match tokenizer.next_token() {
Ok(Token::Key(pos, str_step)) => {
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
Ok(step) => Ok(Some(step)),
Err(e) => Err(e),
}
}
_ => {
unreachable!();
}
}
}
fn range_from(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#range_from");
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
match tokenizer.peek_token() {
Ok(Token::Key(_, _)) => Self::range(from, tokenizer),
Ok(Token::Split(_)) => match Self::range_value(tokenizer)? {
Some(step) => Ok(Self::node(ParseToken::Range(Some(from), None, Some(step)))),
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
},
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
}
}
fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#range_to");
if let Some(step) = Self::range_value(tokenizer)? {
return Ok(Self::node(ParseToken::Range(None, None, Some(step))));
}
if let Ok(Token::CloseArray(_)) = tokenizer.peek_token() {
return Ok(Self::node(ParseToken::Range(None, None, None)));
}
match tokenizer.next_token() {
Ok(Token::Key(pos, ref to_str)) => {
let to = utils::string_to_num(to_str, || tokenizer.err_msg_with_pos(pos))?;
let step = Self::range_value(tokenizer)?;
Ok(Self::node(ParseToken::Range(None, Some(to), step)))
}
_ => Err(tokenizer.err_msg()),
}
}
fn range(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#range");
match tokenizer.next_token() {
Ok(Token::Key(pos, ref str_to)) => {
let to = utils::string_to_num(str_to, || tokenizer.err_msg_with_pos(pos))?;
let step = Self::range_value(tokenizer)?;
Ok(Self::node(ParseToken::Range(Some(from), Some(to), step)))
}
_ => Err(tokenizer.err_msg()),
}
}
fn filter(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#filter");
match tokenizer.next_token() {
Ok(Token::OpenParenthesis(_)) => {
let ret = Self::exprs(tokenizer)?;
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
}
_ => Err(tokenizer.err_msg()),
}
}
fn exprs(tokenizer: &mut TokenReader) -> ParseResult<Node> {
Self::eat_whitespace(tokenizer);
debug!("#exprs");
let node = match tokenizer.peek_token() {
Ok(Token::OpenParenthesis(_)) => {
Self::eat_token(tokenizer);
trace!("\t-exprs - open_parenthesis");
let ret = Self::exprs(tokenizer)?;
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)?
}
_ => {
trace!("\t-exprs - else");
Self::expr(tokenizer)?
}
};
Self::eat_whitespace(tokenizer);
Self::condition_expr(node, tokenizer)
}
fn condition_expr(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#condition_expr");
match tokenizer.peek_token() {
Ok(Token::And(_)) => {
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Filter(FilterToken::And),
left: Some(Box::new(prev)),
right: Some(Box::new(Self::exprs(tokenizer)?)),
})
}
Ok(Token::Or(_)) => {
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Filter(FilterToken::Or),
left: Some(Box::new(prev)),
right: Some(Box::new(Self::exprs(tokenizer)?)),
})
}
_ => Ok(prev),
}
}
fn expr(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#expr");
let has_prop_candidate = match tokenizer.peek_token() {
Ok(Token::At(_)) => true,
_ => false,
};
let node = Self::term(tokenizer)?;
Self::eat_whitespace(tokenizer);
if match tokenizer.peek_token() {
Ok(Token::Equal(_))
| Ok(Token::NotEqual(_))
| Ok(Token::Little(_))
| Ok(Token::LittleOrEqual(_))
| Ok(Token::Greater(_))
| Ok(Token::GreaterOrEqual(_)) => true,
_ => false,
} {
Self::op(node, tokenizer)
} else if has_prop_candidate {
Ok(node)
} else {
Err(tokenizer.err_msg())
}
}
fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term_num");
match tokenizer.next_token() {
Ok(Token::Key(pos, val)) => match tokenizer.peek_token() {
Ok(Token::Dot(_)) => Self::term_num_float(val.as_str(), tokenizer),
_ => {
let number = utils::string_to_num(&val, || tokenizer.err_msg_with_pos(pos))?;
Ok(Self::node(ParseToken::Number(number)))
}
},
_ => Err(tokenizer.err_msg()),
}
}
fn term_num_float(num: &str, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term_num_float");
Self::eat_token(tokenizer);
match tokenizer.next_token() {
Ok(Token::Key(pos, frac)) => {
let mut f = String::new();
f.push_str(&num);
f.push('.');
f.push_str(frac.as_str());
let number = utils::string_to_num(&f, || tokenizer.err_msg_with_pos(pos))?;
Ok(Self::node(ParseToken::Number(number)))
}
_ => Err(tokenizer.err_msg()),
}
}
fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term");
if tokenizer.peek_is(AT) {
Self::eat_token(tokenizer);
let node = Self::node(ParseToken::Relative);
return match tokenizer.peek_token() {
Ok(Token::Whitespace(_, _)) => {
Self::eat_whitespace(tokenizer);
Ok(node)
}
_ => Self::paths(node, tokenizer),
};
}
if tokenizer.peek_is(ABSOLUTE) {
return Self::json_path(tokenizer);
}
if tokenizer.peek_is(DOUBLE_QUOTE) || tokenizer.peek_is(SINGLE_QUOTE) {
return Self::array_quote_value(tokenizer);
}
if tokenizer.peek_is(KEY) {
let key = if let Ok(Token::Key(_, k)) = tokenizer.peek_token() {
k.clone()
} else {
unreachable!()
};
return match key.as_bytes()[0] {
b'-' | b'0'..=b'9' => Self::term_num(tokenizer),
_ => Self::boolean(tokenizer),
};
}
Err(tokenizer.err_msg())
}
fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#op");
let token = match tokenizer.next_token() {
Ok(Token::Equal(_)) => ParseToken::Filter(FilterToken::Equal),
Ok(Token::NotEqual(_)) => ParseToken::Filter(FilterToken::NotEqual),
Ok(Token::Little(_)) => ParseToken::Filter(FilterToken::Little),
Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual),
Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater),
Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual),
_ => {
return Err(tokenizer.err_msg());
}
};
Self::eat_whitespace(tokenizer);
Ok(Node {
token,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::term(tokenizer)?)),
})
}
fn eat_whitespace(tokenizer: &mut TokenReader) {
while let Ok(Token::Whitespace(_, _)) = tokenizer.peek_token() {
let _ = tokenizer.next_token();
}
}
fn eat_token(tokenizer: &mut TokenReader) {
let _ = tokenizer.next_token();
}
fn node(token: ParseToken) -> Node {
Node {
left: None,
right: None,
token,
}
}
fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#close_token");
match tokenizer.next_token() {
Ok(ref t) if t.partial_eq(token) => Ok(ret),
_ => Err(tokenizer.err_msg()),
}
}
}
pub trait NodeVisitor {
fn visit(&mut self, node: &Node) {
match &node.token {
ParseToken::Absolute
| ParseToken::Relative
| ParseToken::All
| ParseToken::Key(_)
| ParseToken::Keys(_)
| ParseToken::Range(_, _, _)
| ParseToken::Union(_)
| ParseToken::Number(_)
| ParseToken::Bool(_) => {
self.visit_token(&node.token);
}
ParseToken::In | ParseToken::Leaves => {
if let Some(n) = &node.left {
self.visit(&*n);
}
self.visit_token(&node.token);
if let Some(n) = &node.right {
self.visit(&*n);
}
}
ParseToken::Array => {
if let Some(n) = &node.left {
self.visit(&*n);
}
self.visit_token(&node.token);
if let Some(n) = &node.right {
self.visit(&*n);
}
self.visit_token(&ParseToken::ArrayEof);
}
ParseToken::Filter(FilterToken::And) | ParseToken::Filter(FilterToken::Or) => {
if let Some(n) = &node.left {
self.visit(&*n);
}
if let Some(n) = &node.right {
self.visit(&*n);
}
self.visit_token(&node.token);
}
ParseToken::Filter(_) => {
if let Some(n) = &node.left {
self.visit(&*n);
}
self.end_term();
if let Some(n) = &node.right {
self.visit(&*n);
}
self.end_term();
self.visit_token(&node.token);
}
_ => {}
}
}
fn visit_token(&mut self, token: &ParseToken);
fn end_term(&mut self) {}
}
#[cfg(test)] #[cfg(test)]
mod parser_tests { mod parser_tests {
use parser::parser::{FilterToken, NodeVisitor, ParseToken, Parser}; use parser::{FilterToken, NodeVisitor, ParseToken, Parser};
struct NodeVisitorTestImpl<'a> { struct NodeVisitorTestImpl<'a> {
input: &'a str, input: &'a str,
@ -46,11 +731,7 @@ mod parser_tests {
setup(); setup();
fn invalid(path: &str) { fn invalid(path: &str) {
if let Err(_) = run(path) { assert!(run(path).is_err());
assert!(true);
} else {
assert!(false);
}
} }
invalid("$[]"); invalid("$[]");
@ -135,24 +816,21 @@ mod parser_tests {
]) ])
); );
match run("$.") { if run("$.").is_ok() {
Ok(_) => panic!(), panic!();
_ => {}
} }
match run("$..") { if run("$..").is_ok() {
Ok(_) => panic!(), panic!();
_ => {}
} }
match run("$. a") { if run("$. a").is_ok() {
Ok(_) => panic!(), panic!();
_ => {}
} }
} }
#[test] #[test]
fn parse_array_sytax() { fn parse_array_syntax() {
setup(); setup();
assert_eq!( assert_eq!(
@ -525,24 +1203,20 @@ mod parser_tests {
]) ])
); );
match run("$[1.1]") { if run("$[1.1]").is_ok() {
Ok(_) => panic!(), panic!();
_ => {}
} }
match run("$[?(1.1<.2)]") { if run("$[?(1.1<.2)]").is_ok() {
Ok(_) => panic!(), panic!();
_ => {}
} }
match run("$[?(1.1<2.)]") { if run("$[?(1.1<2.)]").is_ok() {
Ok(_) => panic!(), panic!();
_ => {}
} }
match run("$[?(1.1<2.a)]") { if run("$[?(1.1<2.a)]").is_ok() {
Ok(_) => panic!(), panic!();
_ => {}
} }
} }
} }
@ -567,7 +1241,7 @@ mod tokenizer_tests {
} }
fn run(input: &str, expected: (Vec<Token>, Option<TokenError>)) { fn run(input: &str, expected: (Vec<Token>, Option<TokenError>)) {
let (vec, err) = collect_token(input.clone()); let (vec, err) = collect_token(input);
assert_eq!((vec, err), expected, "\"{}\"", input); assert_eq!((vec, err), expected, "\"{}\"", input);
} }

View File

@ -1,696 +0,0 @@
use std::str::FromStr;
use super::tokenizer::*;
const DUMMY: usize = 0;
type ParseResult<T> = Result<T, String>;
mod utils {
use std::str::FromStr;
pub fn string_to_num<F, S: FromStr>(string: &String, msg_handler: F) -> Result<S, String>
where
F: Fn() -> String,
{
match string.as_str().parse() {
Ok(n) => Ok(n),
_ => Err(msg_handler()),
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum ParseToken {
// '$'
Absolute,
// '@'
Relative,
// '.'
In,
// '..'
Leaves,
// '*'
All,
Key(String),
Keys(Vec<String>),
// []
Array,
// 메타토큰
ArrayEof,
// ?( filter )
Filter(FilterToken),
// 1 : 2
Range(Option<isize>, Option<isize>, Option<usize>),
// 1, 2, 3
Union(Vec<isize>),
Number(f64),
Bool(bool),
Eof,
}
#[derive(Debug, PartialEq, Clone)]
pub enum FilterToken {
Equal,
NotEqual,
Little,
LittleOrEqual,
Greater,
GreaterOrEqual,
And,
Or,
}
#[derive(Debug, Clone)]
pub struct Node {
left: Option<Box<Node>>,
right: Option<Box<Node>>,
token: ParseToken,
}
pub struct Parser;
impl Parser {
pub fn compile(input: &str) -> ParseResult<Node> {
let mut tokenizer = TokenReader::new(input);
Ok(Self::json_path(&mut tokenizer)?)
}
fn json_path(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#json_path");
match tokenizer.next_token() {
Ok(Token::Absolute(_)) => {
let node = Self::node(ParseToken::Absolute);
Self::paths(node, tokenizer)
}
_ => Err(tokenizer.err_msg()),
}
}
fn paths(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#paths");
match tokenizer.peek_token() {
Ok(Token::Dot(_)) => {
Self::eat_token(tokenizer);
Self::paths_dot(prev, tokenizer)
}
Ok(Token::OpenArray(_)) => {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
let node = Self::array(prev, tokenizer)?;
Self::paths(node, tokenizer)
}
_ => Ok(prev),
}
}
fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#paths_dot");
let node = Self::path(prev, tokenizer)?;
Self::paths(node, tokenizer)
}
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path");
match tokenizer.peek_token() {
Ok(Token::Dot(_)) => Self::path_leaves(prev, tokenizer),
Ok(Token::Asterisk(_)) => Self::path_in_all(prev, tokenizer),
Ok(Token::Key(_, _)) => Self::path_in_key(prev, tokenizer),
Ok(Token::OpenArray(_)) => {
Self::eat_token(tokenizer);
Self::array(prev, tokenizer)
}
_ => Err(tokenizer.err_msg()),
}
}
fn path_leaves(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_leaves");
Self::eat_token(tokenizer);
match tokenizer.peek_token() {
Ok(Token::Asterisk(_)) => Self::path_leaves_all(prev, tokenizer),
Ok(Token::OpenArray(_)) => {
let mut leaves_node = Self::node(ParseToken::Leaves);
leaves_node.left = Some(Box::new(prev));
Ok(Self::paths(leaves_node, tokenizer)?)
}
_ => Self::path_leaves_key(prev, tokenizer),
}
}
fn path_leaves_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_leaves_key");
Ok(Node {
token: ParseToken::Leaves,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::key(tokenizer)?)),
})
}
fn path_leaves_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_leaves_all");
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Leaves,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::node(ParseToken::All))),
})
}
fn path_in_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_in_all");
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::In,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::node(ParseToken::All))),
})
}
fn path_in_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_in_key");
Ok(Node {
token: ParseToken::In,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::key(tokenizer)?)),
})
}
fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#key");
match tokenizer.next_token() {
Ok(Token::Key(_, v)) => Ok(Self::node(ParseToken::Key(v))),
_ => Err(tokenizer.err_msg()),
}
}
fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#boolean");
match tokenizer.next_token() {
Ok(Token::Key(_, ref v))
if {
let b = v.as_bytes();
b.len() > 0 && (b[0] == b't' || b[0] == b'T' || b[0] == b'f' || b[0] == b'F')
} =>
{
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
}
_ => Err(tokenizer.err_msg()),
}
}
fn array_keys(tokenizer: &mut TokenReader, first_key: String) -> ParseResult<Node> {
let mut keys = vec![first_key];
while tokenizer.peek_is(COMMA) {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
match tokenizer.next_token() {
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
keys.push(val);
}
_ => return Err(tokenizer.err_msg()),
}
Self::eat_whitespace(tokenizer);
}
Ok(Self::node(ParseToken::Keys(keys)))
}
fn array_quote_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_quote_value");
match tokenizer.next_token() {
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
if !tokenizer.peek_is(COMMA) {
Ok(Self::node(ParseToken::Key(val)))
} else {
Self::array_keys(tokenizer, val)
}
}
_ => Err(tokenizer.err_msg()),
}
}
fn array_start(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_start");
match tokenizer.peek_token() {
Ok(Token::Question(_)) => {
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::filter(tokenizer)?)),
})
}
Ok(Token::Asterisk(_)) => {
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::node(ParseToken::All))),
})
}
_ => Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::array_value(tokenizer)?)),
}),
}
}
fn array(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array");
let ret = Self::array_start(prev, tokenizer)?;
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseArray(DUMMY), tokenizer)
}
fn array_value_key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_value_key");
match tokenizer.next_token() {
Ok(Token::Key(pos, ref val)) => {
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
Self::eat_whitespace(tokenizer);
match tokenizer.peek_token() {
Ok(Token::Comma(_)) => Self::union(digit, tokenizer),
Ok(Token::Split(_)) => Self::range_from(digit, tokenizer),
_ => Ok(Self::node(ParseToken::Number(digit as f64))),
}
}
_ => Err(tokenizer.err_msg()),
}
}
fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_value");
match tokenizer.peek_token() {
Ok(Token::Key(_, _)) => Self::array_value_key(tokenizer),
Ok(Token::Split(_)) => {
Self::eat_token(tokenizer);
Self::range_to(tokenizer)
}
Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => {
Self::array_quote_value(tokenizer)
}
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
_ => {
Self::eat_token(tokenizer);
Err(tokenizer.err_msg())
}
}
}
fn union(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#union");
let mut values = vec![num];
while match tokenizer.peek_token() {
Ok(Token::Comma(_)) => true,
_ => false,
} {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
match tokenizer.next_token() {
Ok(Token::Key(pos, ref val)) => {
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
values.push(digit);
}
_ => {
return Err(tokenizer.err_msg());
}
}
}
Ok(Self::node(ParseToken::Union(values)))
}
fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> {
Self::eat_whitespace(tokenizer);
match tokenizer.peek_token() {
Ok(Token::Split(_)) => {
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
}
_ => {
return Ok(None);
}
}
match tokenizer.peek_token() {
Ok(Token::Key(_, _)) => {}
_ => {
return Ok(None);
}
}
match tokenizer.next_token() {
Ok(Token::Key(pos, str_step)) => {
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
Ok(step) => Ok(Some(step)),
Err(e) => Err(e),
}
}
_ => {
unreachable!();
}
}
}
fn range_from(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#range_from");
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
match tokenizer.peek_token() {
Ok(Token::Key(_, _)) => Self::range(from, tokenizer),
Ok(Token::Split(_)) => match Self::range_value(tokenizer)? {
Some(step) => Ok(Self::node(ParseToken::Range(Some(from), None, Some(step)))),
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
},
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
}
}
fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#range_to");
match Self::range_value(tokenizer)? {
Some(step) => return Ok(Self::node(ParseToken::Range(None, None, Some(step)))),
_ => {}
}
match tokenizer.peek_token() {
Ok(Token::CloseArray(_)) => {
return Ok(Self::node(ParseToken::Range(None, None, None)));
}
_ => {}
}
match tokenizer.next_token() {
Ok(Token::Key(pos, ref to_str)) => {
let to = utils::string_to_num(to_str, || tokenizer.err_msg_with_pos(pos))?;
let step = Self::range_value(tokenizer)?;
Ok(Self::node(ParseToken::Range(None, Some(to), step)))
}
_ => Err(tokenizer.err_msg()),
}
}
fn range(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#range");
match tokenizer.next_token() {
Ok(Token::Key(pos, ref str_to)) => {
let to = utils::string_to_num(str_to, || tokenizer.err_msg_with_pos(pos))?;
let step = Self::range_value(tokenizer)?;
Ok(Self::node(ParseToken::Range(Some(from), Some(to), step)))
}
_ => Err(tokenizer.err_msg()),
}
}
fn filter(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#filter");
match tokenizer.next_token() {
Ok(Token::OpenParenthesis(_)) => {
let ret = Self::exprs(tokenizer)?;
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
}
_ => Err(tokenizer.err_msg()),
}
}
fn exprs(tokenizer: &mut TokenReader) -> ParseResult<Node> {
Self::eat_whitespace(tokenizer);
debug!("#exprs");
let node = match tokenizer.peek_token() {
Ok(Token::OpenParenthesis(_)) => {
Self::eat_token(tokenizer);
trace!("\t-exprs - open_parenthesis");
let ret = Self::exprs(tokenizer)?;
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)?
}
_ => {
trace!("\t-exprs - else");
Self::expr(tokenizer)?
}
};
Self::eat_whitespace(tokenizer);
Self::condition_expr(node, tokenizer)
}
fn condition_expr(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#condition_expr");
match tokenizer.peek_token() {
Ok(Token::And(_)) => {
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Filter(FilterToken::And),
left: Some(Box::new(prev)),
right: Some(Box::new(Self::exprs(tokenizer)?)),
})
}
Ok(Token::Or(_)) => {
Self::eat_token(tokenizer);
Ok(Node {
token: ParseToken::Filter(FilterToken::Or),
left: Some(Box::new(prev)),
right: Some(Box::new(Self::exprs(tokenizer)?)),
})
}
_ => Ok(prev),
}
}
fn expr(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#expr");
let has_prop_candidate = match tokenizer.peek_token() {
Ok(Token::At(_)) => true,
_ => false,
};
let node = Self::term(tokenizer)?;
Self::eat_whitespace(tokenizer);
if match tokenizer.peek_token() {
Ok(Token::Equal(_))
| Ok(Token::NotEqual(_))
| Ok(Token::Little(_))
| Ok(Token::LittleOrEqual(_))
| Ok(Token::Greater(_))
| Ok(Token::GreaterOrEqual(_)) => true,
_ => false,
} {
Self::op(node, tokenizer)
} else if has_prop_candidate {
Ok(node)
} else {
return Err(tokenizer.err_msg());
}
}
fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term_num");
match tokenizer.next_token() {
Ok(Token::Key(pos, val)) => match tokenizer.peek_token() {
Ok(Token::Dot(_)) => Self::term_num_float(val.as_str(), tokenizer),
_ => {
let number = utils::string_to_num(&val, || tokenizer.err_msg_with_pos(pos))?;
Ok(Self::node(ParseToken::Number(number)))
}
},
_ => Err(tokenizer.err_msg()),
}
}
fn term_num_float(mut num: &str, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term_num_float");
Self::eat_token(tokenizer);
match tokenizer.next_token() {
Ok(Token::Key(pos, frac)) => {
let mut f = String::new();
f.push_str(&mut num);
f.push('.');
f.push_str(frac.as_str());
let number = utils::string_to_num(&f, || tokenizer.err_msg_with_pos(pos))?;
Ok(Self::node(ParseToken::Number(number)))
}
_ => Err(tokenizer.err_msg()),
}
}
fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term");
if tokenizer.peek_is(AT) {
Self::eat_token(tokenizer);
let node = Self::node(ParseToken::Relative);
return match tokenizer.peek_token() {
Ok(Token::Whitespace(_, _)) => {
Self::eat_whitespace(tokenizer);
Ok(node)
}
_ => Self::paths(node, tokenizer),
};
}
if tokenizer.peek_is(ABSOLUTE) {
return Self::json_path(tokenizer);
}
if tokenizer.peek_is(DOUBLE_QUOTE) || tokenizer.peek_is(SINGLE_QUOTE) {
return Self::array_quote_value(tokenizer);
}
if tokenizer.peek_is(KEY) {
let key = if let Ok(Token::Key(_, k)) = tokenizer.peek_token() {
k.clone()
} else {
unreachable!()
};
return match key.as_bytes()[0] {
b'-' | b'0'...b'9' => Self::term_num(tokenizer),
_ => Self::boolean(tokenizer),
};
}
return Err(tokenizer.err_msg());
}
fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#op");
let token = match tokenizer.next_token() {
Ok(Token::Equal(_)) => ParseToken::Filter(FilterToken::Equal),
Ok(Token::NotEqual(_)) => ParseToken::Filter(FilterToken::NotEqual),
Ok(Token::Little(_)) => ParseToken::Filter(FilterToken::Little),
Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual),
Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater),
Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual),
_ => {
return Err(tokenizer.err_msg());
}
};
Self::eat_whitespace(tokenizer);
Ok(Node {
token,
left: Some(Box::new(prev)),
right: Some(Box::new(Self::term(tokenizer)?)),
})
}
fn eat_whitespace(tokenizer: &mut TokenReader) {
while let Ok(Token::Whitespace(_, _)) = tokenizer.peek_token() {
let _ = tokenizer.next_token();
}
}
fn eat_token(tokenizer: &mut TokenReader) {
let _ = tokenizer.next_token();
}
fn node(token: ParseToken) -> Node {
Node {
left: None,
right: None,
token,
}
}
fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#close_token");
match tokenizer.next_token() {
Ok(ref t) if t.partial_eq(token) => Ok(ret),
_ => Err(tokenizer.err_msg()),
}
}
}
pub trait NodeVisitor {
fn visit(&mut self, node: &Node) {
match &node.token {
ParseToken::Absolute
| ParseToken::Relative
| ParseToken::All
| ParseToken::Key(_)
| ParseToken::Keys(_)
| ParseToken::Range(_, _, _)
| ParseToken::Union(_)
| ParseToken::Number(_)
| ParseToken::Bool(_) => {
self.visit_token(&node.token);
}
ParseToken::In | ParseToken::Leaves => {
match &node.left {
Some(n) => self.visit(&*n),
_ => {}
}
self.visit_token(&node.token);
match &node.right {
Some(n) => self.visit(&*n),
_ => {}
}
}
ParseToken::Array => {
match &node.left {
Some(n) => self.visit(&*n),
_ => {}
}
self.visit_token(&node.token);
match &node.right {
Some(n) => self.visit(&*n),
_ => {}
}
self.visit_token(&ParseToken::ArrayEof);
}
ParseToken::Filter(FilterToken::And) | ParseToken::Filter(FilterToken::Or) => {
match &node.left {
Some(n) => self.visit(&*n),
_ => {}
}
match &node.right {
Some(n) => self.visit(&*n),
_ => {}
}
self.visit_token(&node.token);
}
ParseToken::Filter(_) => {
match &node.left {
Some(n) => self.visit(&*n),
_ => {}
}
self.end_term();
match &node.right {
Some(n) => self.visit(&*n),
_ => {}
}
self.end_term();
self.visit_token(&node.token);
}
_ => {}
}
}
fn visit_token(&mut self, token: &ParseToken);
fn end_term(&mut self) {}
}

View File

@ -2,29 +2,29 @@ use std::result::Result;
use super::path_reader::{PathReader, ReaderError}; use super::path_reader::{PathReader, ReaderError};
pub const ABSOLUTE: &'static str = "$"; pub const ABSOLUTE: &str = "$";
pub const DOT: &'static str = "."; pub const DOT: &str = ".";
pub const AT: &'static str = "@"; pub const AT: &str = "@";
pub const OPEN_ARRAY: &'static str = "["; pub const OPEN_ARRAY: &str = "[";
pub const CLOSE_ARRAY: &'static str = "]"; pub const CLOSE_ARRAY: &str = "]";
pub const ASTERISK: &'static str = "*"; pub const ASTERISK: &str = "*";
pub const QUESTION: &'static str = "?"; pub const QUESTION: &str = "?";
pub const COMMA: &'static str = ","; pub const COMMA: &str = ",";
pub const SPLIT: &'static str = ":"; pub const SPLIT: &str = ":";
pub const OPEN_PARENTHESIS: &'static str = "("; pub const OPEN_PARENTHESIS: &str = "(";
pub const CLOSE_PARENTHESIS: &'static str = ")"; pub const CLOSE_PARENTHESIS: &str = ")";
pub const KEY: &'static str = "Key"; pub const KEY: &str = "Key";
pub const DOUBLE_QUOTE: &'static str = "\""; pub const DOUBLE_QUOTE: &str = "\"";
pub const SINGLE_QUOTE: &'static str = "'"; pub const SINGLE_QUOTE: &str = "'";
pub const EQUAL: &'static str = "=="; pub const EQUAL: &str = "==";
pub const GREATER_OR_EQUAL: &'static str = ">="; pub const GREATER_OR_EQUAL: &str = ">=";
pub const GREATER: &'static str = ">"; pub const GREATER: &str = ">";
pub const LITTLE: &'static str = "<"; pub const LITTLE: &str = "<";
pub const LITTLE_OR_EQUAL: &'static str = "<="; pub const LITTLE_OR_EQUAL: &str = "<=";
pub const NOT_EQUAL: &'static str = "!="; pub const NOT_EQUAL: &str = "!=";
pub const AND: &'static str = "&&"; pub const AND: &str = "&&";
pub const OR: &'static str = "||"; pub const OR: &str = "||";
pub const WHITESPACE: &'static str = " "; pub const WHITESPACE: &str = " ";
const CH_DOLLA: char = '$'; const CH_DOLLA: char = '$';
const CH_DOT: char = '.'; const CH_DOT: char = '.';
@ -161,7 +161,7 @@ impl<'a> Tokenizer<'a> {
if let Some('\\') = val.chars().last() { if let Some('\\') = val.chars().last() {
self.input.next_char().map_err(to_token_error)?; self.input.next_char().map_err(to_token_error)?;
let _ = val.pop(); let _ = val.pop();
let (_, mut val_remain) = self let (_, val_remain) = self
.input .input
.take_while(|c| *c != ch) .take_while(|c| *c != ch)
.map_err(to_token_error)?; .map_err(to_token_error)?;
@ -321,7 +321,7 @@ impl<'a> TokenReader<'a> {
} }
Err(e) => { Err(e) => {
return TokenReader { return TokenReader {
origin_input: input.clone(), origin_input: input,
err: e, err: e,
err_pos: tokenizer.current_pos(), err_pos: tokenizer.current_pos(),
tokens, tokens,

View File

@ -4,7 +4,7 @@ use std::fmt;
use array_tool::vec::{Intersect, Union}; use array_tool::vec::{Intersect, Union};
use serde_json::{Number, Value}; use serde_json::{Number, Value};
use parser::parser::*; use parser::*;
fn to_f64(n: &Number) -> f64 { fn to_f64(n: &Number) -> f64 {
if n.is_i64() { if n.is_i64() {
@ -17,13 +17,13 @@ fn to_f64(n: &Number) -> f64 {
} }
trait Cmp { trait Cmp {
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool; fn cmp_bool(&self, v1: bool, v2: bool) -> bool;
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool; fn cmp_f64(&self, v1: f64, v2: f64) -> bool;
fn cmp_string(&self, v1: &String, v2: &String) -> bool; fn cmp_string(&self, v1: &str, v2: &str) -> bool;
fn cmp_json<'a>(&self, v1: &Vec<&'a Value>, v2: &Vec<&'a Value>) -> Vec<&'a Value>; fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value>;
fn default(&self) -> bool { fn default(&self) -> bool {
false false
@ -33,59 +33,59 @@ trait Cmp {
struct CmpEq; struct CmpEq;
impl Cmp for CmpEq { impl Cmp for CmpEq {
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool { fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
v1 == v2 v1 == v2
} }
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool { fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
(v1 - v2).abs() == 0_f64
}
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
v1 == v2 v1 == v2
} }
fn cmp_string(&self, v1: &String, v2: &String) -> bool { fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
v1 == v2 v1.to_vec().intersect(v2.to_vec())
}
fn cmp_json<'a>(&self, v1: &Vec<&'a Value>, v2: &Vec<&'a Value>) -> Vec<&'a Value> {
v1.intersect(v2.to_vec())
} }
} }
struct CmpNe; struct CmpNe;
impl Cmp for CmpNe { impl Cmp for CmpNe {
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool { fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
v1 != v2 v1 != v2
} }
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool { fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
(v1 - v2).abs() != 0_f64
}
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
v1 != v2 v1 != v2
} }
fn cmp_string(&self, v1: &String, v2: &String) -> bool { fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
v1 != v2 v1.to_vec().intersect_if(v2.to_vec(), |a, b| a != b)
}
fn cmp_json<'a>(&self, v1: &Vec<&'a Value>, v2: &Vec<&'a Value>) -> Vec<&'a Value> {
v1.intersect_if(v2.to_vec(), |a, b| a != b)
} }
} }
struct CmpGt; struct CmpGt;
impl Cmp for CmpGt { impl Cmp for CmpGt {
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool { fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
v1 & !v2
}
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
v1 > v2 v1 > v2
} }
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool { fn cmp_string(&self, v1: &str, v2: &str) -> bool {
v1 > v2 v1 > v2
} }
fn cmp_string(&self, v1: &String, v2: &String) -> bool { fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
v1 > v2
}
fn cmp_json<'a>(&self, _: &Vec<&'a Value>, _: &Vec<&'a Value>) -> Vec<&'a Value> {
Vec::new() Vec::new()
} }
} }
@ -93,19 +93,19 @@ impl Cmp for CmpGt {
struct CmpGe; struct CmpGe;
impl Cmp for CmpGe { impl Cmp for CmpGe {
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool { fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
v1 >= v2 v1 >= v2
} }
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool { fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
v1 >= v2 v1 >= v2
} }
fn cmp_string(&self, v1: &String, v2: &String) -> bool { fn cmp_string(&self, v1: &str, v2: &str) -> bool {
v1 >= v2 v1 >= v2
} }
fn cmp_json<'a>(&self, _: &Vec<&'a Value>, _: &Vec<&'a Value>) -> Vec<&'a Value> { fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
Vec::new() Vec::new()
} }
} }
@ -113,19 +113,19 @@ impl Cmp for CmpGe {
struct CmpLt; struct CmpLt;
impl Cmp for CmpLt { impl Cmp for CmpLt {
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool { fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
!v1 & v2
}
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
v1 < v2 v1 < v2
} }
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool { fn cmp_string(&self, v1: &str, v2: &str) -> bool {
v1 < v2 v1 < v2
} }
fn cmp_string(&self, v1: &String, v2: &String) -> bool { fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
v1 < v2
}
fn cmp_json<'a>(&self, _: &Vec<&'a Value>, _: &Vec<&'a Value>) -> Vec<&'a Value> {
Vec::new() Vec::new()
} }
} }
@ -133,19 +133,19 @@ impl Cmp for CmpLt {
struct CmpLe; struct CmpLe;
impl Cmp for CmpLe { impl Cmp for CmpLe {
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool { fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
v1 <= v2 v1 <= v2
} }
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool { fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
v1 <= v2 v1 <= v2
} }
fn cmp_string(&self, v1: &String, v2: &String) -> bool { fn cmp_string(&self, v1: &str, v2: &str) -> bool {
v1 <= v2 v1 <= v2
} }
fn cmp_json<'a>(&self, _: &Vec<&'a Value>, _: &Vec<&'a Value>) -> Vec<&'a Value> { fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
Vec::new() Vec::new()
} }
} }
@ -153,40 +153,40 @@ impl Cmp for CmpLe {
struct CmpAnd; struct CmpAnd;
impl Cmp for CmpAnd { impl Cmp for CmpAnd {
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool { fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
*v1 && *v2 v1 && v2
} }
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool { fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
v1 > &0_f64 && v2 > &0_f64 v1 > 0_f64 && v2 > 0_f64
} }
fn cmp_string(&self, v1: &String, v2: &String) -> bool { fn cmp_string(&self, v1: &str, v2: &str) -> bool {
!v1.is_empty() && !v2.is_empty() !v1.is_empty() && !v2.is_empty()
} }
fn cmp_json<'a>(&self, v1: &Vec<&'a Value>, v2: &Vec<&'a Value>) -> Vec<&'a Value> { fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
v1.intersect(v2.to_vec()) v1.to_vec().intersect(v2.to_vec())
} }
} }
struct CmpOr; struct CmpOr;
impl Cmp for CmpOr { impl Cmp for CmpOr {
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool { fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
*v1 || *v2 v1 || v2
} }
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool { fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
v1 > &0_f64 || v2 > &0_f64 v1 > 0_f64 || v2 > 0_f64
} }
fn cmp_string(&self, v1: &String, v2: &String) -> bool { fn cmp_string(&self, v1: &str, v2: &str) -> bool {
!v1.is_empty() || !v2.is_empty() !v1.is_empty() || !v2.is_empty()
} }
fn cmp_json<'a>(&self, v1: &Vec<&'a Value>, v2: &Vec<&'a Value>) -> Vec<&'a Value> { fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
v1.union(v2.to_vec()) v1.to_vec().union(v2.to_vec())
} }
} }
@ -199,34 +199,6 @@ enum ExprTerm<'a> {
} }
impl<'a> ExprTerm<'a> { impl<'a> ExprTerm<'a> {
fn is_string(&self) -> bool {
match &self {
ExprTerm::String(_) => true,
_ => false,
}
}
fn is_number(&self) -> bool {
match &self {
ExprTerm::Number(_) => true,
_ => false,
}
}
fn is_bool(&self) -> bool {
match &self {
ExprTerm::Bool(_) => true,
_ => false,
}
}
fn is_json(&self) -> bool {
match &self {
ExprTerm::Json(_, _) => true,
_ => false,
}
}
fn cmp<C1: Cmp, C2: Cmp>( fn cmp<C1: Cmp, C2: Cmp>(
&self, &self,
other: &Self, other: &Self,
@ -240,23 +212,18 @@ impl<'a> ExprTerm<'a> {
_ => ExprTerm::Bool(cmp_fn.default()), _ => ExprTerm::Bool(cmp_fn.default()),
}, },
ExprTerm::Number(n1) => match &other { ExprTerm::Number(n1) => match &other {
ExprTerm::Number(n2) => ExprTerm::Bool(cmp_fn.cmp_f64(&to_f64(n1), &to_f64(n2))), ExprTerm::Number(n2) => ExprTerm::Bool(cmp_fn.cmp_f64(to_f64(n1), to_f64(n2))),
ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn), ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
_ => ExprTerm::Bool(cmp_fn.default()), _ => ExprTerm::Bool(cmp_fn.default()),
}, },
ExprTerm::Bool(b1) => match &other { ExprTerm::Bool(b1) => match &other {
ExprTerm::Bool(b2) => ExprTerm::Bool(cmp_fn.cmp_bool(b1, b2)), ExprTerm::Bool(b2) => ExprTerm::Bool(cmp_fn.cmp_bool(*b1, *b2)),
ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn), ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
_ => ExprTerm::Bool(cmp_fn.default()), _ => ExprTerm::Bool(cmp_fn.default()),
}, },
ExprTerm::Json(fk1, vec1) if other.is_string() => { ExprTerm::Json(fk1, vec1) => {
let s2 = if let ExprTerm::String(s2) = &other { let ret: Vec<&Value> = match &other {
s2 ExprTerm::String(s2) => vec1
} else {
unreachable!()
};
let ret: Vec<&Value> = vec1
.iter() .iter()
.filter(|v1| match v1 { .filter(|v1| match v1 {
Value::String(s1) => cmp_fn.cmp_string(s1, s2), Value::String(s1) => cmp_fn.cmp_string(s1, s2),
@ -270,66 +237,42 @@ impl<'a> ExprTerm<'a> {
} }
_ => cmp_fn.default(), _ => cmp_fn.default(),
}) })
.map(|v| *v) .cloned()
.collect(); .collect(),
ExprTerm::Number(n2) => vec1
if ret.is_empty() {
ExprTerm::Bool(cmp_fn.default())
} else {
ExprTerm::Json(None, ret)
}
}
ExprTerm::Json(fk1, vec1) if other.is_number() => {
let n2 = if let ExprTerm::Number(n2) = &other {
n2
} else {
unreachable!()
};
let ret: Vec<&Value> = vec1
.iter() .iter()
.filter(|v1| match v1 { .filter(|v1| match v1 {
Value::Number(n1) => cmp_fn.cmp_f64(&to_f64(n1), &to_f64(n2)), Value::Number(n1) => cmp_fn.cmp_f64(to_f64(n1), to_f64(n2)),
Value::Object(map1) => { Value::Object(map1) => {
if let Some(FilterKey::String(k)) = fk1 { if let Some(FilterKey::String(k)) = fk1 {
if let Some(Value::Number(n1)) = map1.get(k) { if let Some(Value::Number(n1)) = map1.get(k) {
return cmp_fn.cmp_f64(&to_f64(n1), &to_f64(n2)); return cmp_fn.cmp_f64(to_f64(n1), to_f64(n2));
} }
} }
cmp_fn.default() cmp_fn.default()
} }
_ => cmp_fn.default(), _ => cmp_fn.default(),
}) })
.map(|v| *v) .cloned()
.collect(); .collect(),
ExprTerm::Bool(b2) => vec1
if ret.is_empty() {
ExprTerm::Bool(cmp_fn.default())
} else {
ExprTerm::Json(None, ret)
}
}
ExprTerm::Json(fk1, vec1) if other.is_bool() => {
let b2 = if let ExprTerm::Bool(b2) = &other {
b2
} else {
unreachable!()
};
let ret: Vec<&Value> = vec1
.iter() .iter()
.filter(|v1| match v1 { .filter(|v1| match v1 {
Value::Bool(b1) => cmp_fn.cmp_bool(b1, b2), Value::Bool(b1) => cmp_fn.cmp_bool(*b1, *b2),
Value::Object(map1) => { Value::Object(map1) => {
if let Some(FilterKey::String(k)) = fk1 { if let Some(FilterKey::String(k)) = fk1 {
if let Some(Value::Bool(b1)) = map1.get(k) { if let Some(Value::Bool(b1)) = map1.get(k) {
return cmp_fn.cmp_bool(b1, b2); return cmp_fn.cmp_bool(*b1, *b2);
} }
} }
cmp_fn.default() cmp_fn.default()
} }
_ => cmp_fn.default(), _ => cmp_fn.default(),
}) })
.map(|v| *v) .cloned()
.collect(); .collect(),
ExprTerm::Json(_, vec2) => cmp_fn.cmp_json(vec1, vec2),
};
if ret.is_empty() { if ret.is_empty() {
ExprTerm::Bool(cmp_fn.default()) ExprTerm::Bool(cmp_fn.default())
@ -337,18 +280,6 @@ impl<'a> ExprTerm<'a> {
ExprTerm::Json(None, ret) ExprTerm::Json(None, ret)
} }
} }
ExprTerm::Json(_, vec1) if other.is_json() => match &other {
ExprTerm::Json(_, vec2) => {
let vec = cmp_fn.cmp_json(vec1, vec2);
if vec.is_empty() {
ExprTerm::Bool(cmp_fn.default())
} else {
ExprTerm::Json(None, vec)
}
}
_ => unreachable!(),
},
_ => unreachable!(),
} }
} }
@ -432,12 +363,7 @@ impl<'a> Into<ExprTerm<'a>> for &Vec<&'a Value> {
} }
} }
fn walk_all_with_str<'a>( fn walk_all_with_str<'a>(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, key: &str, is_filter: bool) {
vec: &Vec<&'a Value>,
tmp: &mut Vec<&'a Value>,
key: &str,
is_filter: bool,
) {
if is_filter { if is_filter {
walk(vec, tmp, &|v| match v { walk(vec, tmp, &|v| match v {
Value::Object(map) if map.contains_key(key) => Some(vec![v]), Value::Object(map) if map.contains_key(key) => Some(vec![v]),
@ -454,7 +380,7 @@ fn walk_all_with_str<'a>(
} }
} }
fn walk_all<'a>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>) { fn walk_all<'a>(vec: &[&'a Value], tmp: &mut Vec<&'a Value>) {
walk(vec, tmp, &|v| match v { walk(vec, tmp, &|v| match v {
Value::Array(vec) => Some(vec.iter().collect()), Value::Array(vec) => Some(vec.iter().collect()),
Value::Object(map) => { Value::Object(map) => {
@ -468,7 +394,7 @@ fn walk_all<'a>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>) {
}); });
} }
fn walk<'a, F>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>, fun: &F) fn walk<'a, F>(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, fun: &F)
where where
F: Fn(&Value) -> Option<Vec<&Value>>, F: Fn(&Value) -> Option<Vec<&Value>>,
{ {
@ -500,11 +426,11 @@ where
} }
} }
fn abs_index(n: &isize, len: usize) -> usize { fn abs_index(n: isize, len: usize) -> usize {
if n < &0_isize { if n < 0_isize {
(n + len as isize) as usize (n + len as isize) as usize
} else { } else {
*n as usize n as usize
} }
} }
@ -538,7 +464,7 @@ impl fmt::Display for JsonPathError {
} }
} }
#[derive(Debug)] #[derive(Debug, Default)]
pub struct Selector<'a, 'b> { pub struct Selector<'a, 'b> {
node: Option<Node>, node: Option<Node>,
node_ref: Option<&'b Node>, node_ref: Option<&'b Node>,
@ -551,15 +477,7 @@ pub struct Selector<'a, 'b> {
impl<'a, 'b> Selector<'a, 'b> { impl<'a, 'b> Selector<'a, 'b> {
pub fn new() -> Self { pub fn new() -> Self {
Selector { Selector::default()
node: None,
node_ref: None,
value: None,
tokens: Vec::new(),
terms: Vec::new(),
current: None,
selectors: Vec::new(),
}
} }
pub fn str_path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> { pub fn str_path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> {
@ -569,7 +487,7 @@ impl<'a, 'b> Selector<'a, 'b> {
self.node_ref.take(); self.node_ref.take();
} }
self.node = Some(Parser::compile(path).map_err(|e| JsonPathError::Path(e))?); self.node = Some(Parser::compile(path).map_err(JsonPathError::Path)?);
Ok(self) Ok(self)
} }
@ -662,8 +580,8 @@ impl<'a, 'b> Selector<'a, 'b> {
} }
fn in_filter<F: Fn(&Vec<&'a Value>, &mut Vec<&'a Value>) -> FilterKey>(&mut self, fun: F) { fn in_filter<F: Fn(&Vec<&'a Value>, &mut Vec<&'a Value>) -> FilterKey>(&mut self, fun: F) {
match self.terms.pop() { if let Some(peek) = self.terms.pop() {
Some(peek) => match peek { match peek {
Some(v) => { Some(v) => {
debug!("in_filter 1.: {:?}", v); debug!("in_filter 1.: {:?}", v);
@ -685,8 +603,7 @@ impl<'a, 'b> Selector<'a, 'b> {
self.terms.push(Some(ExprTerm::Json(Some(filter_key), tmp))); self.terms.push(Some(ExprTerm::Json(Some(filter_key), tmp)));
} }
} }
}, }
_ => {}
} }
} }
@ -741,7 +658,7 @@ impl<'a, 'b> Selector<'a, 'b> {
let mut tmp = Vec::new(); let mut tmp = Vec::new();
for c in current { for c in current {
if let Value::Array(vec) = c { if let Value::Array(vec) = c {
let index = abs_index(&(index as isize), vec.len()); let index = abs_index(index as isize, vec.len());
if let Some(v) = c.get(index) { if let Some(v) = c.get(index) {
tmp.push(v); tmp.push(v);
} }
@ -756,20 +673,17 @@ impl<'a, 'b> Selector<'a, 'b> {
); );
} }
fn next_from_current_with_str(&mut self, keys: &Vec<String>) { fn next_from_current_with_str(&mut self, keys: &[String]) {
if let Some(current) = self.current.take() { if let Some(current) = self.current.take() {
let mut tmp = Vec::new(); let mut tmp = Vec::new();
for c in current { for c in current {
match c { if let Value::Object(map) = c {
Value::Object(map) => {
for key in keys { for key in keys {
if let Some(v) = map.get(key) { if let Some(v) = map.get(key) {
tmp.push(v) tmp.push(v)
} }
} }
} }
_ => {}
}
} }
self.current = Some(tmp); self.current = Some(tmp);
} }
@ -821,12 +735,8 @@ impl<'a, 'b> Selector<'a, 'b> {
} }
debug!("all_from_current_with_str: {}, {:?}", key, self.current); debug!("all_from_current_with_str: {}, {:?}", key, self.current);
} }
}
impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
fn visit_token(&mut self, token: &ParseToken) {
debug!("token: {:?}, stack: {:?}", token, self.tokens);
fn compute_absolute_path_filter(&mut self, token: &ParseToken) -> bool {
if !self.selectors.is_empty() { if !self.selectors.is_empty() {
match token { match token {
ParseToken::Absolute | ParseToken::Relative | ParseToken::Filter(_) => { ParseToken::Absolute | ParseToken::Relative | ParseToken::Filter(_) => {
@ -850,13 +760,17 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
if let Some(selector) = self.selectors.last_mut() { if let Some(selector) = self.selectors.last_mut() {
selector.visit_token(token); selector.visit_token(token);
return; true
} else {
false
} }
}
}
match token { impl<'a, 'b> Selector<'a, 'b> {
ParseToken::Absolute => { fn visit_absolute(&mut self) {
if self.current.is_some() { if self.current.is_some() {
let mut selector = Selector::new(); let mut selector = Selector::default();
if let Some(value) = self.value { if let Some(value) = self.value {
selector.value = Some(value); selector.value = Some(value);
@ -866,12 +780,11 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
return; return;
} }
match &self.value { if let Some(v) = &self.value {
Some(v) => self.current = Some(vec![v]), self.current = Some(vec![v]);
_ => {}
} }
} }
ParseToken::Relative => { fn visit_relative(&mut self) {
if let Some(ParseToken::Array) = self.tokens.last() { if let Some(ParseToken::Array) = self.tokens.last() {
let array_token = self.tokens.pop(); let array_token = self.tokens.pop();
if let Some(ParseToken::Leaves) = self.tokens.last() { if let Some(ParseToken::Leaves) = self.tokens.last() {
@ -882,17 +795,15 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
} }
self.new_filter_context(); self.new_filter_context();
} }
ParseToken::In | ParseToken::Leaves | ParseToken::Array => {
self.tokens.push(token.clone()); fn visit_array_eof(&mut self) {
}
ParseToken::ArrayEof => {
if let Some(Some(e)) = self.terms.pop() { if let Some(Some(e)) = self.terms.pop() {
match e { match e {
ExprTerm::Number(n) => { ExprTerm::Number(n) => {
self.next_from_current_with_num(to_f64(&n)); self.next_from_current_with_num(to_f64(&n));
} }
ExprTerm::String(key) => { ExprTerm::String(key) => {
self.next_from_current_with_str(&vec![key]); self.next_from_current_with_str(&[key]);
} }
ExprTerm::Json(_, v) => { ExprTerm::Json(_, v) => {
if v.is_empty() { if v.is_empty() {
@ -910,13 +821,11 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
self.tokens.pop(); self.tokens.pop();
} }
ParseToken::All => {
match self.tokens.last() { fn visit_all(&mut self) {
Some(ParseToken::Array) => { if let Some(ParseToken::Array) = self.tokens.last() {
self.tokens.pop(); self.tokens.pop();
} }
_ => {}
}
match self.tokens.last() { match self.tokens.last() {
Some(ParseToken::Leaves) => { Some(ParseToken::Leaves) => {
@ -932,41 +841,35 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
} }
} }
} }
ParseToken::Bool(b) => {
self.terms.push(Some(ExprTerm::Bool(*b))); fn visit_key(&mut self, key: &str) {
}
ParseToken::Key(key) => {
if let Some(ParseToken::Array) = self.tokens.last() { if let Some(ParseToken::Array) = self.tokens.last() {
self.terms.push(Some(ExprTerm::String(key.clone()))); self.terms.push(Some(ExprTerm::String(key.to_string())));
return; return;
} }
match self.tokens.pop() { if let Some(t) = self.tokens.pop() {
Some(t) => {
if self.terms.is_empty() { if self.terms.is_empty() {
match t { match t {
ParseToken::Leaves => self.all_from_current_with_str(key.as_str()), ParseToken::Leaves => self.all_from_current_with_str(key),
ParseToken::In => { ParseToken::In => self.next_from_current_with_str(&[key.to_string()]),
self.next_from_current_with_str(&vec![key.clone()])
}
_ => {} _ => {}
} }
} else { } else {
match t { match t {
ParseToken::Leaves => { ParseToken::Leaves => {
self.all_in_filter_with_str(key.as_str()); self.all_in_filter_with_str(key);
} }
ParseToken::In => { ParseToken::In => {
self.next_in_filter_with_str(key.as_str()); self.next_in_filter_with_str(key);
} }
_ => {} _ => {}
} }
} }
} }
_ => {}
} }
}
ParseToken::Keys(keys) => { fn visit_keys(&mut self, keys: &[String]) {
if !self.terms.is_empty() { if !self.terms.is_empty() {
unimplemented!("keys in filter"); unimplemented!("keys in filter");
} }
@ -977,12 +880,9 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
unreachable!(); unreachable!();
} }
} }
ParseToken::Number(v) => {
self.terms fn visit_filter(&mut self, ft: &FilterToken) {
.push(Some(ExprTerm::Number(Number::from_f64(*v).unwrap()))); let right = match self.terms.pop() {
}
ParseToken::Filter(ref ft) => {
let ref right = match self.terms.pop() {
Some(Some(right)) => right, Some(Some(right)) => right,
Some(None) => ExprTerm::Json( Some(None) => ExprTerm::Json(
None, None,
@ -1008,21 +908,22 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
let mut ret = None; let mut ret = None;
match ft { match ft {
FilterToken::Equal => left.eq(right, &mut ret), FilterToken::Equal => left.eq(&right, &mut ret),
FilterToken::NotEqual => left.ne(right, &mut ret), FilterToken::NotEqual => left.ne(&right, &mut ret),
FilterToken::Greater => left.gt(right, &mut ret), FilterToken::Greater => left.gt(&right, &mut ret),
FilterToken::GreaterOrEqual => left.ge(right, &mut ret), FilterToken::GreaterOrEqual => left.ge(&right, &mut ret),
FilterToken::Little => left.lt(right, &mut ret), FilterToken::Little => left.lt(&right, &mut ret),
FilterToken::LittleOrEqual => left.le(right, &mut ret), FilterToken::LittleOrEqual => left.le(&right, &mut ret),
FilterToken::And => left.and(right, &mut ret), FilterToken::And => left.and(&right, &mut ret),
FilterToken::Or => left.or(right, &mut ret), FilterToken::Or => left.or(&right, &mut ret),
}; };
if let Some(e) = ret { if let Some(e) = ret {
self.terms.push(Some(e)); self.terms.push(Some(e));
} }
} }
ParseToken::Range(from, to, step) => {
fn visit_range(&mut self, from: &Option<isize>, to: &Option<isize>, step: &Option<usize>) {
if !self.terms.is_empty() { if !self.terms.is_empty() {
unimplemented!("range syntax in filter"); unimplemented!("range syntax in filter");
} }
@ -1033,13 +934,13 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
for v in current { for v in current {
if let Value::Array(vec) = v { if let Value::Array(vec) = v {
let from = if let Some(from) = from { let from = if let Some(from) = from {
abs_index(from, vec.len()) abs_index(*from, vec.len())
} else { } else {
0 0
}; };
let to = if let Some(to) = to { let to = if let Some(to) = to {
abs_index(to, vec.len()) abs_index(*to, vec.len())
} else { } else {
vec.len() vec.len()
}; };
@ -1060,7 +961,8 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
unreachable!(); unreachable!();
} }
} }
ParseToken::Union(indices) => {
fn visit_union(&mut self, indices: &[isize]) {
if !self.terms.is_empty() { if !self.terms.is_empty() {
unimplemented!("union syntax in filter"); unimplemented!("union syntax in filter");
} }
@ -1071,7 +973,7 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
for v in current { for v in current {
if let Value::Array(vec) = v { if let Value::Array(vec) = v {
for i in indices { for i in indices {
if let Some(v) = vec.get(abs_index(i, vec.len())) { if let Some(v) = vec.get(abs_index(*i, vec.len())) {
tmp.push(v); tmp.push(v);
} }
} }
@ -1084,6 +986,36 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
unreachable!(); unreachable!();
} }
} }
}
impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
fn visit_token(&mut self, token: &ParseToken) {
debug!("token: {:?}, stack: {:?}", token, self.tokens);
if self.compute_absolute_path_filter(token) {
return;
}
match token {
ParseToken::Absolute => self.visit_absolute(),
ParseToken::Relative => self.visit_relative(),
ParseToken::In | ParseToken::Leaves | ParseToken::Array => {
self.tokens.push(token.clone());
}
ParseToken::ArrayEof => self.visit_array_eof(),
ParseToken::All => self.visit_all(),
ParseToken::Bool(b) => {
self.terms.push(Some(ExprTerm::Bool(*b)));
}
ParseToken::Key(key) => self.visit_key(key),
ParseToken::Keys(keys) => self.visit_keys(keys),
ParseToken::Number(v) => {
self.terms
.push(Some(ExprTerm::Number(Number::from_f64(*v).unwrap())));
}
ParseToken::Filter(ref ft) => self.visit_filter(ft),
ParseToken::Range(from, to, step) => self.visit_range(from, to, step),
ParseToken::Union(indices) => self.visit_union(indices),
ParseToken::Eof => { ParseToken::Eof => {
debug!("visit_token eof"); debug!("visit_token eof");
} }
@ -1091,6 +1023,7 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
} }
} }
#[derive(Default)]
pub struct SelectorMut { pub struct SelectorMut {
path: Option<Node>, path: Option<Node>,
value: Option<Value>, value: Option<Value>,
@ -1141,14 +1074,11 @@ fn replace_value<F: FnMut(&Value) -> Value>(tokens: Vec<String>, value: &mut Val
impl SelectorMut { impl SelectorMut {
pub fn new() -> Self { pub fn new() -> Self {
SelectorMut { Self::default()
path: None,
value: None,
}
} }
pub fn str_path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> { pub fn str_path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> {
self.path = Some(Parser::compile(path).map_err(|e| JsonPathError::Path(e))?); self.path = Some(Parser::compile(path).map_err(JsonPathError::Path)?);
Ok(self) Ok(self)
} }
@ -1208,7 +1138,7 @@ impl SelectorMut {
_ => {} _ => {}
} }
return false; false
} }
let mut visited = HashSet::new(); let mut visited = HashSet::new();
@ -1234,7 +1164,7 @@ impl SelectorMut {
fn select(&self) -> Result<Vec<&Value>, JsonPathError> { fn select(&self) -> Result<Vec<&Value>, JsonPathError> {
if let Some(node) = &self.path { if let Some(node) = &self.path {
let mut selector = Selector::new(); let mut selector = Selector::default();
selector.compiled_path(&node); selector.compiled_path(&node);
if let Some(value) = &self.value { if let Some(value) = &self.value {

View File

@ -30,8 +30,8 @@ pub fn read_contents(path: &str) -> String {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) { pub fn select_and_then_compare(path: &str, json: Value, target: Value) {
let mut selector = Selector::new(); let mut selector = Selector::default();
let result = selector let result = selector
.str_path(path) .str_path(path)
.unwrap() .unwrap()
@ -50,7 +50,7 @@ pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn compare_result<'a>(result: Vec<&Value>, target: Value) { pub fn compare_result(result: Vec<&Value>, target: Value) {
let result = serde_json::to_value(result).unwrap(); let result = serde_json::to_value(result).unwrap();
assert_eq!(result, target); assert_eq!(result, target);
} }

View File

@ -624,7 +624,7 @@ fn quote() {
fn all_filter() { fn all_filter() {
setup(); setup();
for path in vec![r#"$.*"#, r#"$[*]"#] { for path in &[r#"$.*"#, r#"$[*]"#] {
select_and_then_compare( select_and_then_compare(
path, path,
json!(["string", 42, { "key": "value" }, [0, 1]]), json!(["string", 42, { "key": "value" }, [0, 1]]),
@ -632,7 +632,7 @@ fn all_filter() {
); );
} }
for path in vec![r#"$..*"#, r#"$..[*]"#] { for path in &[r#"$..*"#, r#"$..[*]"#] {
select_and_then_compare( select_and_then_compare(
path, path,
json!(["string", 42, { "key": "value" }, [0, 1]]), json!(["string", 42, { "key": "value" }, [0, 1]]),
@ -640,7 +640,7 @@ fn all_filter() {
); );
} }
for path in vec![r#"$.*.*"#, r#"$[*].*"#, r#"$.*[*]"#, r#"$[*][*]"#] { for path in &[r#"$.*.*"#, r#"$[*].*"#, r#"$.*[*]"#, r#"$[*][*]"#] {
select_and_then_compare( select_and_then_compare(
path, path,
json!(["string", 42, { "key": "value" }, [0, 1]]), json!(["string", 42, { "key": "value" }, [0, 1]]),
@ -648,7 +648,7 @@ fn all_filter() {
); );
} }
for path in vec![r#"$..friends.*"#, r#"$[*].friends.*"#] { for path in &[r#"$..friends.*"#, r#"$[*].friends.*"#] {
select_and_then_compare( select_and_then_compare(
path, path,
read_json("./benches/data_array.json"), read_json("./benches/data_array.json"),

View File

@ -37,11 +37,7 @@ fn compile() {
fn compile_error() { fn compile_error() {
let mut template = jsonpath::compile("$["); let mut template = jsonpath::compile("$[");
if let Err(JsonPathError::Path(_)) = template(&Value::Null) { assert!(template(&Value::Null).is_err());
assert!(true);
} else {
assert!(false);
}
} }
setup(); setup();

View File

@ -12,7 +12,7 @@ mod common;
fn selector_mut() { fn selector_mut() {
setup(); setup();
let mut selector_mut = SelectorMut::new(); let mut selector_mut = SelectorMut::default();
let mut nums = Vec::new(); let mut nums = Vec::new();
let result = selector_mut let result = selector_mut
@ -20,12 +20,9 @@ fn selector_mut() {
.unwrap() .unwrap()
.value(read_json("./benches/example.json")) .value(read_json("./benches/example.json"))
.replace_with(&mut |v| { .replace_with(&mut |v| {
match v { if let Value::Number(n) = v {
Value::Number(n) => {
nums.push(n.as_f64().unwrap()); nums.push(n.as_f64().unwrap());
} }
_ => {}
}
Value::String("a".to_string()) Value::String("a".to_string())
}) })
.unwrap() .unwrap()
@ -37,7 +34,7 @@ fn selector_mut() {
vec![8.95_f64, 12.99_f64, 8.99_f64, 22.99_f64, 19.95_f64] vec![8.95_f64, 12.99_f64, 8.99_f64, 22.99_f64, 19.95_f64]
); );
let mut selector = Selector::new(); let mut selector = Selector::default();
let result = selector let result = selector
.str_path(r#"$.store..price"#) .str_path(r#"$.store..price"#)
.unwrap() .unwrap()

View File

@ -173,7 +173,7 @@ fn readme_selector() {
{"name": "친구4"} {"name": "친구4"}
]}); ]});
let mut selector = Selector::new(); let mut selector = Selector::default();
let result = selector let result = selector
.str_path("$..[?(@.age >= 30)]") .str_path("$..[?(@.age >= 30)]")
@ -211,7 +211,7 @@ fn readme_selector_mut() {
{"name": "친구4"} {"name": "친구4"}
]}); ]});
let mut selector_mut = SelectorMut::new(); let mut selector_mut = SelectorMut::default();
let result = selector_mut let result = selector_mut
.str_path("$..[?(@.age == 20)].age") .str_path("$..[?(@.age == 20)].age")

View File

@ -102,7 +102,7 @@ pub fn compile(path: &str) -> JsValue {
}, },
Err(e) => JsValue::from_str(&format!("{:?}", e)), Err(e) => JsValue::from_str(&format!("{:?}", e)),
} }
}) as Box<Fn(JsValue) -> JsValue>); }) as Box<dyn Fn(JsValue) -> JsValue>);
let ret = cb.as_ref().clone(); let ret = cb.as_ref().clone();
cb.forget(); cb.forget();
@ -131,8 +131,8 @@ pub fn selector(js_value: JsValue) -> JsValue {
Err(e) => JsValue::from_str(&format!("{:?}", e)), Err(e) => JsValue::from_str(&format!("{:?}", e)),
} }
} }
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Path(e))), Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Path(e))),
}) as Box<Fn(String) -> JsValue>, }) as Box<dyn Fn(String) -> JsValue>,
); );
let ret = cb.as_ref().clone(); let ret = cb.as_ref().clone();
@ -193,6 +193,7 @@ pub fn replace_with(js_value: JsValue, path: &str, fun: js_sys::Function) -> JsV
/// lifetime 제약으로 Selector를 사용 할 수 없다. /// lifetime 제약으로 Selector를 사용 할 수 없다.
/// ///
#[wasm_bindgen] #[wasm_bindgen]
#[derive(Default)]
pub struct Selector { pub struct Selector {
path: Option<String>, path: Option<String>,
value: Option<Value>, value: Option<Value>,
@ -202,10 +203,7 @@ pub struct Selector {
impl Selector { impl Selector {
#[wasm_bindgen(constructor)] #[wasm_bindgen(constructor)]
pub fn new() -> Self { pub fn new() -> Self {
Selector { Selector::default()
path: None,
value: None,
}
} }
#[wasm_bindgen(catch)] #[wasm_bindgen(catch)]
@ -263,6 +261,7 @@ impl Selector {
/// `wasm_bindgen` 제약으로 builder-pattern을 구사 할 수 없다. /// `wasm_bindgen` 제약으로 builder-pattern을 구사 할 수 없다.
/// ///
#[wasm_bindgen] #[wasm_bindgen]
#[derive(Default)]
pub struct SelectorMut { pub struct SelectorMut {
path: Option<String>, path: Option<String>,
value: Option<Value>, value: Option<Value>,
@ -272,10 +271,7 @@ pub struct SelectorMut {
impl SelectorMut { impl SelectorMut {
#[wasm_bindgen(constructor)] #[wasm_bindgen(constructor)]
pub fn new() -> Self { pub fn new() -> Self {
SelectorMut { SelectorMut::default()
path: None,
value: None,
}
} }
#[wasm_bindgen(catch)] #[wasm_bindgen(catch)]