jsonpath/src/parser/parser.rs

716 lines
21 KiB
Rust
Raw Normal View History

2019-03-14 22:30:42 +09:00
use super::tokenizer::*;
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
const DUMMY: usize = 0;
2019-03-14 22:30:42 +09:00
type ParseResult<T> = Result<T, String>;
2018-12-26 14:45:31 +09:00
2019-02-22 00:22:30 +09:00
mod utils {
2019-03-14 22:30:42 +09:00
pub fn string_to_isize<F>(string: &String, msg_handler: F) -> Result<isize, String>
2019-02-22 00:22:30 +09:00
where F: Fn() -> String {
2019-03-11 17:35:15 +09:00
match string.as_str().parse::<isize>() {
2019-02-22 00:22:30 +09:00
Ok(n) => Ok(n),
_ => Err(msg_handler())
}
}
2019-03-14 22:30:42 +09:00
pub fn string_to_f64<F>(string: &String, msg_handler: F) -> Result<f64, String>
2019-02-22 00:22:30 +09:00
where F: Fn() -> String {
2019-03-11 17:35:15 +09:00
match string.as_str().parse::<f64>() {
2019-02-22 00:22:30 +09:00
Ok(n) => Ok(n),
_ => Err(msg_handler())
}
}
}
2019-02-26 23:04:04 +09:00
#[derive(Debug, PartialEq, Clone)]
2019-02-19 08:20:59 +09:00
pub enum ParseToken {
2018-12-27 23:46:15 +09:00
// '$'
2018-12-26 14:45:31 +09:00
Absolute,
2018-12-27 23:46:15 +09:00
// '@'
2018-12-26 14:45:31 +09:00
Relative,
2018-12-27 23:46:15 +09:00
// '.'
In,
// '..'
Leaves,
// '*'
All,
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
Key(String),
// []
Array,
// 메타토큰
ArrayEof,
// ?( filter )
Filter(FilterToken),
// 1 : 2
Range(Option<isize>, Option<isize>),
// 1, 2, 3
Union(Vec<isize>),
Number(f64),
2019-06-02 22:03:35 +09:00
Bool(bool),
2018-12-27 23:46:15 +09:00
Eof,
2018-12-26 14:45:31 +09:00
}
2019-02-26 23:04:04 +09:00
#[derive(Debug, PartialEq, Clone)]
2019-02-19 08:20:59 +09:00
pub enum FilterToken {
2018-12-26 14:45:31 +09:00
Equal,
2018-12-27 23:46:15 +09:00
NotEqual,
2018-12-26 14:45:31 +09:00
Little,
LittleOrEqual,
2018-12-27 23:46:15 +09:00
Greater,
GreaterOrEqual,
2018-12-26 22:45:54 +09:00
And,
Or,
2018-12-26 14:45:31 +09:00
}
2019-02-26 23:04:04 +09:00
#[derive(Debug, Clone)]
2019-02-19 08:20:59 +09:00
pub struct Node {
2018-12-27 23:46:15 +09:00
left: Option<Box<Node>>,
right: Option<Box<Node>>,
token: ParseToken,
2018-12-26 14:45:31 +09:00
}
2019-06-03 18:45:26 +09:00
pub struct Parser;
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
impl Parser {
2019-02-19 08:20:59 +09:00
2019-06-03 18:45:26 +09:00
pub fn compile(input: &str) -> ParseResult<Node> {
let mut tokenizer = TokenReader::new(input);
Ok(Self::json_path(&mut tokenizer)?)
2019-02-26 23:04:04 +09:00
}
2019-06-03 18:45:26 +09:00
fn json_path(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#json_path");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Absolute(_)) => {
2019-06-03 18:45:26 +09:00
let node = Self::node(ParseToken::Absolute);
Self::paths(node, tokenizer)
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
}
}
2019-06-03 18:45:26 +09:00
fn paths(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#paths");
2019-06-03 18:45:26 +09:00
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Dot(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
Self::paths_dot(prev, tokenizer)
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
Ok(Token::OpenArray(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
let node = Self::array(prev, tokenizer)?;
Self::paths(node, tokenizer)
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
Ok(prev)
2018-12-26 14:45:31 +09:00
}
}
}
2019-06-03 18:45:26 +09:00
fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#paths_dot");
2019-06-03 18:45:26 +09:00
let node = Self::path(prev, tokenizer)?;
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Equal(_))
| Ok(Token::NotEqual(_))
| Ok(Token::Little(_))
| Ok(Token::LittleOrEqual(_))
| Ok(Token::Greater(_))
| Ok(Token::GreaterOrEqual(_))
| Ok(Token::And(_))
| Ok(Token::Or(_)) => {
Ok(node)
}
_ => {
2019-06-03 18:45:26 +09:00
Self::paths(node, tokenizer)
2018-12-26 14:45:31 +09:00
}
}
}
2019-06-03 18:45:26 +09:00
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#path");
2019-06-03 18:45:26 +09:00
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Dot(_)) => {
2019-06-03 18:45:26 +09:00
Self::path_leaves(prev, tokenizer)
2018-12-27 23:46:15 +09:00
}
Ok(Token::Asterisk(_)) => {
2019-06-03 18:45:26 +09:00
Self::path_in_all(prev, tokenizer)
2018-12-27 23:46:15 +09:00
}
Ok(Token::Key(_, _)) => {
2019-06-03 18:45:26 +09:00
Self::path_in_key(prev, tokenizer)
2018-12-27 23:46:15 +09:00
}
Ok(Token::OpenArray(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
Self::array(prev, tokenizer)
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-26 14:45:31 +09:00
}
}
}
2019-06-03 18:45:26 +09:00
fn path_leaves(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#path_leaves");
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Asterisk(_)) => {
2019-06-03 18:45:26 +09:00
Self::path_leaves_all(prev, tokenizer)
2018-12-26 14:45:31 +09:00
}
2019-02-27 12:12:14 +09:00
Ok(Token::OpenArray(_)) => {
2019-06-03 18:45:26 +09:00
let mut leaves_node = Self::node(ParseToken::Leaves);
2019-02-27 12:12:14 +09:00
leaves_node.left = Some(Box::new(prev));
2019-06-03 18:45:26 +09:00
Ok(Self::paths(leaves_node, tokenizer)?)
2019-02-27 12:12:14 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
2019-06-03 18:45:26 +09:00
Self::path_leaves_key(prev, tokenizer)
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
}
}
2019-06-03 18:45:26 +09:00
fn path_leaves_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#path_leaves_key");
Ok(Node {
token: ParseToken::Leaves,
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::key(tokenizer)?)),
2018-12-27 23:46:15 +09:00
})
}
2019-06-03 18:45:26 +09:00
fn path_leaves_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#path_leaves_all");
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
2018-12-27 23:46:15 +09:00
Ok(Node {
token: ParseToken::Leaves,
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::node(ParseToken::All))),
2018-12-27 23:46:15 +09:00
})
}
2019-06-03 18:45:26 +09:00
fn path_in_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#path_in_all");
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
2018-12-27 23:46:15 +09:00
Ok(Node {
token: ParseToken::In,
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::node(ParseToken::All))),
2018-12-27 23:46:15 +09:00
})
}
2019-06-03 18:45:26 +09:00
fn path_in_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#path_in_key");
Ok(Node {
token: ParseToken::In,
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::key(tokenizer)?)),
2018-12-27 23:46:15 +09:00
})
}
2019-06-03 18:45:26 +09:00
fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#key");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Key(_, v)) => {
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Key(v)))
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-26 14:45:31 +09:00
}
}
}
2019-06-03 18:45:26 +09:00
fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2019-06-02 22:03:35 +09:00
debug!("#boolean");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2019-06-02 22:03:35 +09:00
Ok(Token::Key(_, v)) => {
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
2019-06-02 22:03:35 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2019-06-02 22:03:35 +09:00
}
}
}
2019-06-03 18:45:26 +09:00
fn array_quota_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#array_quota_value");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2019-03-11 17:35:15 +09:00
Ok(Token::SingleQuoted(_, val))
| Ok(Token::DoubleQuoted(_, val)) => {
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Key(val)))
2018-12-27 23:46:15 +09:00
}
Err(TokenError::Eof) => {
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Eof))
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
}
}
2019-06-03 18:45:26 +09:00
fn array_start(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#array_start");
2019-06-03 18:45:26 +09:00
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Question(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
2018-12-27 23:46:15 +09:00
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::filter(tokenizer)?)),
2018-12-27 23:46:15 +09:00
})
}
Ok(Token::Asterisk(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
2018-12-27 23:46:15 +09:00
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::node(ParseToken::All))),
2018-12-27 23:46:15 +09:00
})
}
_ => {
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::array_value(tokenizer)?)),
2018-12-27 23:46:15 +09:00
})
}
2018-12-26 14:45:31 +09:00
}
}
2019-06-03 18:45:26 +09:00
fn array(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#array");
2019-06-03 18:45:26 +09:00
let ret = Self::array_start(prev, tokenizer)?;
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseArray(DUMMY), tokenizer)
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn array_value_key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#array_value_key");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2019-03-11 17:35:15 +09:00
Ok(Token::Key(pos, ref val)) => {
2019-06-03 18:45:26 +09:00
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
Self::eat_whitespace(tokenizer);
2018-12-27 23:46:15 +09:00
2019-06-03 18:45:26 +09:00
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Comma(_)) => {
2019-06-03 18:45:26 +09:00
Self::union(digit, tokenizer)
2018-12-26 22:45:54 +09:00
}
2018-12-27 23:46:15 +09:00
Ok(Token::Split(_)) => {
2019-06-03 18:45:26 +09:00
Self::range_from(digit, tokenizer)
2018-12-26 22:45:54 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Number(digit as f64)))
2018-12-26 22:45:54 +09:00
}
2018-12-27 23:46:15 +09:00
}
}
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-26 22:45:54 +09:00
}
}
2018-12-27 23:46:15 +09:00
}
2018-12-26 22:45:54 +09:00
2019-06-03 18:45:26 +09:00
fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#array_value");
2019-06-03 18:45:26 +09:00
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Key(_, _)) => {
2019-06-03 18:45:26 +09:00
Self::array_value_key(tokenizer)
2018-12-27 23:46:15 +09:00
}
Ok(Token::Split(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
Self::range_to(tokenizer)
2018-12-27 23:46:15 +09:00
}
Ok(Token::DoubleQuoted(_, _))
| Ok(Token::SingleQuoted(_, _)) => {
2019-06-03 18:45:26 +09:00
Self::array_quota_value(tokenizer)
2018-12-27 23:46:15 +09:00
}
Err(TokenError::Eof) => {
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Eof))
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
2018-12-26 22:45:54 +09:00
}
2018-12-26 14:45:31 +09:00
}
2019-06-03 18:45:26 +09:00
fn union(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#union");
let mut values = vec![num];
2019-06-03 18:45:26 +09:00
while match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Comma(_)) => true,
_ => false
} {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
match tokenizer.next_token() {
2019-03-11 17:35:15 +09:00
Ok(Token::Key(pos, ref val)) => {
2019-06-03 18:45:26 +09:00
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
2018-12-27 23:46:15 +09:00
values.push(digit);
}
_ => {
2019-06-03 18:45:26 +09:00
return Err(tokenizer.err_msg());
2018-12-27 23:46:15 +09:00
}
}
2018-12-26 14:45:31 +09:00
}
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Union(values)))
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn range_from(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#range_from");
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
Self::eat_whitespace(tokenizer);
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Key(_, _)) => {
2019-06-03 18:45:26 +09:00
Self::range(num, tokenizer)
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Range(Some(num), None)))
2018-12-27 23:46:15 +09:00
}
}
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#range_to");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2019-03-11 17:35:15 +09:00
Ok(Token::Key(pos, ref val)) => {
2019-06-03 18:45:26 +09:00
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
Ok(Self::node(ParseToken::Range(None, Some(digit))))
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
}
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn range(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#range");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2019-03-11 17:35:15 +09:00
Ok(Token::Key(pos, ref val)) => {
2019-06-03 18:45:26 +09:00
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
Ok(Self::node(ParseToken::Range(Some(num), Some(digit))))
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
}
2018-12-26 14:45:31 +09:00
}
2019-06-03 18:45:26 +09:00
fn filter(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#filter");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::OpenParenthesis(_)) => {
2019-06-03 18:45:26 +09:00
let ret = Self::exprs(tokenizer)?;
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
2018-12-27 23:46:15 +09:00
}
Err(TokenError::Eof) => {
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Eof))
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
}
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn exprs(tokenizer: &mut TokenReader) -> ParseResult<Node> {
Self::eat_whitespace(tokenizer);
2018-12-27 23:46:15 +09:00
debug!("#exprs");
2019-06-03 18:45:26 +09:00
let node = match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::OpenParenthesis(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
2018-12-27 23:46:15 +09:00
trace!("\t-exprs - open_parenthesis");
2019-06-03 18:45:26 +09:00
let ret = Self::exprs(tokenizer)?;
Self::eat_whitespace(tokenizer);
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)?
2018-12-27 23:46:15 +09:00
}
_ => {
trace!("\t-exprs - else");
2019-06-03 18:45:26 +09:00
Self::expr(tokenizer)?
2018-12-27 23:46:15 +09:00
}
};
2019-06-03 18:45:26 +09:00
Self::eat_whitespace(tokenizer);
Self::condition_expr(node, tokenizer)
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn condition_expr(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#condition_expr");
2019-06-03 18:45:26 +09:00
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::And(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
2018-12-27 23:46:15 +09:00
Ok(Node {
token: ParseToken::Filter(FilterToken::And),
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::exprs(tokenizer)?)),
2018-12-27 23:46:15 +09:00
})
}
Ok(Token::Or(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
2018-12-27 23:46:15 +09:00
Ok(Node {
token: ParseToken::Filter(FilterToken::Or),
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::exprs(tokenizer)?)),
2018-12-27 23:46:15 +09:00
})
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
Ok(prev)
}
}
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn expr(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#expr");
2019-02-19 08:20:59 +09:00
2019-06-03 18:45:26 +09:00
let has_prop_candidate = match tokenizer.peek_token() {
2019-02-19 08:20:59 +09:00
Ok(Token::At(_)) => true,
_ => false
};
2019-06-03 18:45:26 +09:00
let node = Self::term(tokenizer)?;
Self::eat_whitespace(tokenizer);
2019-02-19 08:20:59 +09:00
2019-06-03 18:45:26 +09:00
if match tokenizer.peek_token() {
2019-02-19 08:20:59 +09:00
Ok(Token::Equal(_))
| Ok(Token::NotEqual(_))
| Ok(Token::Little(_))
| Ok(Token::LittleOrEqual(_))
| Ok(Token::Greater(_))
| Ok(Token::GreaterOrEqual(_)) => true,
_ => false
} {
2019-06-03 18:45:26 +09:00
Self::op(node, tokenizer)
2019-02-19 08:20:59 +09:00
} else if has_prop_candidate {
Ok(node)
} else {
2019-06-03 18:45:26 +09:00
return Err(tokenizer.err_msg());
2019-02-19 08:20:59 +09:00
}
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#term_num");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2019-03-11 17:35:15 +09:00
Ok(Token::Key(pos, val)) => {
2019-06-03 18:45:26 +09:00
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Dot(_)) => {
2019-06-03 18:45:26 +09:00
Self::term_num_float(val.as_str(), tokenizer)
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
let number = utils::string_to_f64(&val, || tokenizer.err_msg_with_pos(pos))?;
Ok(Self::node(ParseToken::Number(number)))
2018-12-27 23:46:15 +09:00
}
}
}
Err(TokenError::Eof) => {
2019-06-03 18:45:26 +09:00
Ok(Self::node(ParseToken::Eof))
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
}
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn term_num_float(mut num: &str, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#term_num_float");
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
match tokenizer.next_token() {
2019-03-11 17:35:15 +09:00
Ok(Token::Key(pos, frac)) => {
let mut f = String::new();
f.push_str(&mut num);
2018-12-27 23:46:15 +09:00
f.push('.');
2019-03-11 17:35:15 +09:00
f.push_str(frac.as_str());
2019-06-03 18:45:26 +09:00
let number = utils::string_to_f64(&f, || tokenizer.err_msg_with_pos(pos))?;
Ok(Self::node(ParseToken::Number(number)))
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
}
}
2018-12-26 22:45:54 +09:00
2019-06-03 18:45:26 +09:00
fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#term");
2019-06-03 18:45:26 +09:00
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::At(_)) => {
2019-06-03 18:45:26 +09:00
Self::eat_token(tokenizer);
let node = Self::node(ParseToken::Relative);
2018-12-27 23:46:15 +09:00
2019-06-03 18:45:26 +09:00
match tokenizer.peek_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Whitespace(_, _)) => {
2019-06-03 18:45:26 +09:00
Self::eat_whitespace(tokenizer);
2018-12-27 23:46:15 +09:00
Ok(node)
}
_ => {
2019-06-03 18:45:26 +09:00
Self::paths(node, tokenizer)
2018-12-27 23:46:15 +09:00
}
}
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
Ok(Token::Absolute(_)) => {
2019-06-03 18:45:26 +09:00
Self::json_path(tokenizer)
2018-12-27 23:46:15 +09:00
}
Ok(Token::DoubleQuoted(_, _))
| Ok(Token::SingleQuoted(_, _)) => {
2019-06-03 18:45:26 +09:00
Self::array_quota_value(tokenizer)
2018-12-27 23:46:15 +09:00
}
2019-06-02 22:03:35 +09:00
Ok(Token::Key(_, k)) => {
match k.chars().next() {
Some(ch) => match ch {
2019-06-03 18:45:26 +09:00
'-' | '0'...'9' => Self::term_num(tokenizer),
_ => Self::boolean(tokenizer)
2019-06-02 22:03:35 +09:00
}
2019-06-03 18:45:26 +09:00
_ => Err(tokenizer.err_msg())
2019-06-02 22:03:35 +09:00
}
2018-12-27 23:46:15 +09:00
}
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
}
}
2018-12-26 22:45:54 +09:00
2019-06-03 18:45:26 +09:00
fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#op");
2019-06-03 18:45:26 +09:00
let token = match tokenizer.next_token() {
2018-12-27 23:46:15 +09:00
Ok(Token::Equal(_)) => {
ParseToken::Filter(FilterToken::Equal)
}
Ok(Token::NotEqual(_)) => {
ParseToken::Filter(FilterToken::NotEqual)
}
Ok(Token::Little(_)) => {
ParseToken::Filter(FilterToken::Little)
}
Ok(Token::LittleOrEqual(_)) => {
ParseToken::Filter(FilterToken::LittleOrEqual)
}
Ok(Token::Greater(_)) => {
ParseToken::Filter(FilterToken::Greater)
}
Ok(Token::GreaterOrEqual(_)) => {
ParseToken::Filter(FilterToken::GreaterOrEqual)
}
Err(TokenError::Eof) => {
ParseToken::Eof
}
_ => {
2019-06-03 18:45:26 +09:00
return Err(tokenizer.err_msg());
2018-12-27 23:46:15 +09:00
}
};
2018-12-26 22:45:54 +09:00
2019-06-03 18:45:26 +09:00
Self::eat_whitespace(tokenizer);
2018-12-26 22:45:54 +09:00
2018-12-27 23:46:15 +09:00
Ok(Node {
token,
left: Some(Box::new(prev)),
2019-06-03 18:45:26 +09:00
right: Some(Box::new(Self::term(tokenizer)?)),
2018-12-27 23:46:15 +09:00
})
}
2018-12-26 22:45:54 +09:00
2019-06-03 18:45:26 +09:00
fn eat_whitespace(tokenizer: &mut TokenReader) {
while let Ok(Token::Whitespace(_, _)) = tokenizer.peek_token() {
let _ = tokenizer.next_token();
2018-12-26 14:45:31 +09:00
}
}
2019-06-03 18:45:26 +09:00
fn eat_token(tokenizer: &mut TokenReader) {
let _ = tokenizer.next_token();
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
2019-06-03 18:45:26 +09:00
fn node(token: ParseToken) -> Node {
Node { left: None, right: None, token }
2018-12-27 23:46:15 +09:00
}
2019-06-03 18:45:26 +09:00
fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
2018-12-27 23:46:15 +09:00
debug!("#close_token");
2019-06-03 18:45:26 +09:00
match tokenizer.next_token() {
2018-12-27 23:46:15 +09:00
Ok(ref t) if t.partial_eq(token) => {
Ok(ret)
}
2019-02-19 08:20:59 +09:00
_ => {
2019-06-03 18:45:26 +09:00
Err(tokenizer.err_msg())
2018-12-27 23:46:15 +09:00
}
}
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
2019-02-19 08:20:59 +09:00
pub trait NodeVisitor {
2019-06-02 22:03:35 +09:00
fn visit(&mut self, node: &Node) {
match &node.token {
2018-12-27 23:46:15 +09:00
ParseToken::Absolute
| ParseToken::Relative
| ParseToken::All
2019-06-02 22:03:35 +09:00
| ParseToken::Key(_)
| ParseToken::Range(_, _)
| ParseToken::Union(_)
| ParseToken::Number(_)
| ParseToken::Bool(_) => {
self.visit_token(&node.token);
2018-12-27 23:46:15 +09:00
}
ParseToken::In
| ParseToken::Leaves => {
2019-06-02 22:03:35 +09:00
match &node.left {
Some(n) => self.visit(&*n),
_ => {}
}
self.visit_token(&node.token);
match &node.right {
Some(n) => self.visit(&*n),
_ => {}
}
2018-12-27 23:46:15 +09:00
}
2019-06-02 22:03:35 +09:00
ParseToken::Array => {
match &node.left {
Some(n) => self.visit(&*n),
_ => {}
}
self.visit_token(&node.token);
2018-12-26 22:45:54 +09:00
2019-06-02 22:03:35 +09:00
match &node.right {
Some(n) => self.visit(&*n),
_ => {}
}
self.visit_token(&ParseToken::ArrayEof);
2018-12-27 23:46:15 +09:00
}
ParseToken::Filter(FilterToken::And)
| ParseToken::Filter(FilterToken::Or) => {
2019-06-02 22:03:35 +09:00
match &node.left {
Some(n) => self.visit(&*n),
_ => {}
}
match &node.right {
Some(n) => self.visit(&*n),
_ => {}
}
self.visit_token(&node.token);
2018-12-27 23:46:15 +09:00
}
ParseToken::Filter(_) => {
2019-06-02 22:03:35 +09:00
match &node.left {
Some(n) => self.visit(&*n),
_ => {}
}
2019-02-22 00:22:30 +09:00
self.end_term();
2019-06-02 22:03:35 +09:00
match &node.right {
Some(n) => self.visit(&*n),
_ => {}
}
2019-02-22 00:22:30 +09:00
self.end_term();
2019-06-02 22:03:35 +09:00
self.visit_token(&node.token);
2018-12-27 23:46:15 +09:00
}
_ => {}
2018-12-26 14:45:31 +09:00
}
}
2019-02-19 08:20:59 +09:00
2019-06-02 22:03:35 +09:00
fn visit_token(&mut self, token: &ParseToken);
2019-02-22 00:22:30 +09:00
fn end_term(&mut self) {}
2018-12-26 14:45:31 +09:00
}