jsonpath/src/jsonpath/parser.rs

971 lines
28 KiB
Rust
Raw Normal View History

2018-12-26 14:45:31 +09:00
use std::result;
use super::tokenizer::{
Token,
2018-12-27 23:46:15 +09:00
PreloadedTokenizer,
TokenError,
2018-12-26 14:45:31 +09:00
};
use super::utils;
2018-12-27 23:46:15 +09:00
const DUMMY: usize = 0;
2018-12-26 14:45:31 +09:00
type Result<T> = result::Result<T, String>;
2018-12-27 23:46:15 +09:00
#[derive(Debug, PartialEq)]
2019-02-19 08:20:59 +09:00
pub enum ParseToken {
2018-12-27 23:46:15 +09:00
// '$'
2018-12-26 14:45:31 +09:00
Absolute,
2018-12-27 23:46:15 +09:00
// '@'
2018-12-26 14:45:31 +09:00
Relative,
2018-12-27 23:46:15 +09:00
// '.'
In,
// '..'
Leaves,
// '*'
All,
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
Key(String),
// []
Array,
// 메타토큰
ArrayEof,
// ?( filter )
Filter(FilterToken),
// 1 : 2
Range(Option<isize>, Option<isize>),
// 1, 2, 3
Union(Vec<isize>),
Number(f64),
Eof,
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
#[derive(Debug, PartialEq)]
2019-02-19 08:20:59 +09:00
pub enum FilterToken {
2018-12-26 14:45:31 +09:00
Equal,
2018-12-27 23:46:15 +09:00
NotEqual,
2018-12-26 14:45:31 +09:00
Little,
LittleOrEqual,
2018-12-27 23:46:15 +09:00
Greater,
GreaterOrEqual,
2018-12-26 22:45:54 +09:00
And,
Or,
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
#[derive(Debug)]
2019-02-19 08:20:59 +09:00
pub struct Node {
2018-12-27 23:46:15 +09:00
left: Option<Box<Node>>,
right: Option<Box<Node>>,
token: ParseToken,
2018-12-26 14:45:31 +09:00
}
2019-02-19 08:20:59 +09:00
pub struct Parser<'a> {
2018-12-27 23:46:15 +09:00
tokenizer: PreloadedTokenizer<'a>
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
impl<'a> Parser<'a> {
2019-02-19 08:20:59 +09:00
pub fn new(input: &'a str) -> Self {
Parser { tokenizer: PreloadedTokenizer::new(input) }
}
pub fn parse<V: NodeVisitor>(&mut self, visitor: &mut V) -> Result<()> {
let node = self.json_path()?;
visitor.visit(node);
Ok(())
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
fn json_path(&mut self) -> Result<Node> {
debug!("#json_path");
match self.tokenizer.next_token() {
Ok(Token::Absolute(_)) => {
let node = self.node(ParseToken::Absolute);
self.paths(node)
}
_ => {
Err(self.tokenizer.err_msg())
}
2018-12-26 14:45:31 +09:00
}
}
2019-02-19 08:20:59 +09:00
fn paths(&mut self, prev: Node) -> Result<Node> {
2018-12-27 23:46:15 +09:00
debug!("#paths");
match self.tokenizer.peek_token() {
Ok(Token::Dot(_)) => {
self.eat_token();
self.paths_dot(prev)
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
Ok(Token::OpenArray(_)) => {
self.eat_token();
self.eat_whitespace();
let node = self.array(prev)?;
self.paths(node)
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
Ok(prev)
2018-12-26 14:45:31 +09:00
}
}
}
2018-12-27 23:46:15 +09:00
fn paths_dot(&mut self, prev: Node) -> Result<Node> {
debug!("#paths_dot");
let node = self.path(prev)?;
match self.tokenizer.peek_token() {
Ok(Token::Equal(_))
| Ok(Token::NotEqual(_))
| Ok(Token::Little(_))
| Ok(Token::LittleOrEqual(_))
| Ok(Token::Greater(_))
| Ok(Token::GreaterOrEqual(_))
| Ok(Token::And(_))
| Ok(Token::Or(_)) => {
Ok(node)
}
_ => {
self.paths(node)
2018-12-26 14:45:31 +09:00
}
}
}
2018-12-27 23:46:15 +09:00
fn path(&mut self, prev: Node) -> Result<Node> {
debug!("#path");
match self.tokenizer.peek_token() {
Ok(Token::Dot(_)) => {
self.path_leaves(prev)
}
Ok(Token::Asterisk(_)) => {
self.path_in_all(prev)
}
Ok(Token::Key(_, _)) => {
self.path_in_key(prev)
}
Ok(Token::OpenArray(_)) => {
self.eat_token();
self.array(prev)
}
_ => {
Err(self.tokenizer.err_msg())
2018-12-26 14:45:31 +09:00
}
}
}
2018-12-27 23:46:15 +09:00
fn path_leaves(&mut self, prev: Node) -> Result<Node> {
debug!("#path_leaves");
self.eat_token();
match self.tokenizer.peek_token() {
Ok(Token::Asterisk(_)) => {
self.path_leaves_all(prev)
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
self.path_leaves_key(prev)
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
}
}
fn path_leaves_key(&mut self, prev: Node) -> Result<Node> {
debug!("#path_leaves_key");
Ok(Node {
token: ParseToken::Leaves,
left: Some(Box::new(prev)),
right: Some(Box::new(self.key()?)),
})
}
fn path_leaves_all(&mut self, prev: Node) -> Result<Node> {
debug!("#path_leaves_all");
self.eat_token();
Ok(Node {
token: ParseToken::Leaves,
left: Some(Box::new(prev)),
right: Some(Box::new(self.node(ParseToken::All))),
})
}
fn path_in_all(&mut self, prev: Node) -> Result<Node> {
debug!("#path_in_all");
self.eat_token();
Ok(Node {
token: ParseToken::In,
left: Some(Box::new(prev)),
right: Some(Box::new(self.node(ParseToken::All))),
})
}
fn path_in_key(&mut self, prev: Node) -> Result<Node> {
debug!("#path_in_key");
Ok(Node {
token: ParseToken::In,
left: Some(Box::new(prev)),
right: Some(Box::new(self.key()?)),
})
}
fn key(&mut self) -> Result<Node> {
debug!("#key");
match self.tokenizer.next_token() {
Ok(Token::Key(_, v)) => {
Ok(self.node(ParseToken::Key(utils::vec_to_string(&v))))
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
Err(self.tokenizer.err_msg())
2018-12-26 14:45:31 +09:00
}
}
}
2018-12-27 23:46:15 +09:00
fn array_quota_value(&mut self) -> Result<Node> {
debug!("#array_quota_value");
match self.tokenizer.next_token() {
Ok(Token::SingleQuoted(_, ref vec))
| Ok(Token::DoubleQuoted(_, ref vec)) => {
Ok(self.node(ParseToken::Key(utils::vec_to_string(vec))))
}
Err(TokenError::Eof) => {
Ok(self.node(ParseToken::Eof))
}
_ => {
Err(self.tokenizer.err_msg())
}
2018-12-26 14:45:31 +09:00
}
}
2018-12-27 23:46:15 +09:00
fn array_start(&mut self, prev: Node) -> Result<Node> {
debug!("#array_start");
match self.tokenizer.peek_token() {
Ok(Token::Question(_)) => {
self.eat_token();
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
right: Some(Box::new(self.filter()?)),
})
}
Ok(Token::Asterisk(_)) => {
self.eat_token();
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
right: Some(Box::new(self.node(ParseToken::All))),
})
}
_ => {
Ok(Node {
token: ParseToken::Array,
left: Some(Box::new(prev)),
right: Some(Box::new(self.array_value()?)),
})
}
2018-12-26 14:45:31 +09:00
}
}
2018-12-27 23:46:15 +09:00
fn array(&mut self, prev: Node) -> Result<Node> {
debug!("#array");
let ret = self.array_start(prev)?;
self.eat_whitespace();
self.close_token(ret, Token::CloseArray(DUMMY))
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn array_value_key(&mut self) -> Result<Node> {
debug!("#array_value_key");
match self.tokenizer.next_token() {
Ok(Token::Key(pos, ref vec)) => {
let digit = utils::vec_to_int(vec,
|| self.tokenizer.err_msg_with_pos(pos))?;
self.eat_whitespace();
match self.tokenizer.peek_token() {
Ok(Token::Comma(_)) => {
self.union(digit)
2018-12-26 22:45:54 +09:00
}
2018-12-27 23:46:15 +09:00
Ok(Token::Split(_)) => {
self.range_from(digit)
2018-12-26 22:45:54 +09:00
}
_ => {
2018-12-27 23:46:15 +09:00
Ok(self.node(ParseToken::Number(digit as f64)))
2018-12-26 22:45:54 +09:00
}
2018-12-27 23:46:15 +09:00
}
}
_ => {
Err(self.tokenizer.err_msg())
2018-12-26 22:45:54 +09:00
}
}
2018-12-27 23:46:15 +09:00
}
2018-12-26 22:45:54 +09:00
2018-12-27 23:46:15 +09:00
fn array_value(&mut self) -> Result<Node> {
debug!("#array_value");
match self.tokenizer.peek_token() {
Ok(Token::Key(_, _)) => {
self.array_value_key()
}
Ok(Token::Split(_)) => {
self.eat_token();
self.range_to()
}
Ok(Token::DoubleQuoted(_, _))
| Ok(Token::SingleQuoted(_, _)) => {
self.array_quota_value()
}
Err(TokenError::Eof) => {
Ok(self.node(ParseToken::Eof))
}
_ => {
self.eat_token();
Err(self.tokenizer.err_msg())
}
2018-12-26 22:45:54 +09:00
}
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
fn union(&mut self, num: isize) -> Result<Node> {
debug!("#union");
let mut values = vec![num];
while match self.tokenizer.peek_token() {
Ok(Token::Comma(_)) => true,
_ => false
} {
self.eat_token();
self.eat_whitespace();
match self.tokenizer.next_token() {
Ok(Token::Key(pos, ref vec)) => {
let digit = utils::vec_to_int(vec,
|| self.tokenizer.err_msg_with_pos(pos))?;
values.push(digit);
}
_ => {
return Err(self.tokenizer.err_msg());
}
}
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
Ok(self.node(ParseToken::Union(values)))
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn range_from(&mut self, num: isize) -> Result<Node> {
debug!("#range_from");
self.eat_token();
self.eat_whitespace();
match self.tokenizer.peek_token() {
Ok(Token::Key(_, _)) => {
self.range(num)
}
_ => {
Ok(self.node(ParseToken::Range(Some(num), None)))
}
}
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn range_to(&mut self) -> Result<Node> {
debug!("#range_to");
match self.tokenizer.next_token() {
Ok(Token::Key(pos, ref vec)) => {
let digit = utils::vec_to_int(vec,
|| self.tokenizer.err_msg_with_pos(pos))?;
Ok(self.node(ParseToken::Range(None, Some(digit))))
}
_ => {
Err(self.tokenizer.err_msg())
}
}
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn range(&mut self, num: isize) -> Result<Node> {
debug!("#range");
match self.tokenizer.next_token() {
Ok(Token::Key(pos, ref vec)) => {
let digit = utils::vec_to_int(vec,
|| self.tokenizer.err_msg_with_pos(pos))?;
Ok(self.node(ParseToken::Range(Some(num), Some(digit))))
}
_ => {
Err(self.tokenizer.err_msg())
}
}
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
fn filter(&mut self) -> Result<Node> {
debug!("#filter");
match self.tokenizer.next_token() {
Ok(Token::OpenParenthesis(_)) => {
let ret = self.exprs()?;
self.eat_whitespace();
self.close_token(ret, Token::CloseParenthesis(DUMMY))
}
Err(TokenError::Eof) => {
Ok(self.node(ParseToken::Eof))
}
_ => {
Err(self.tokenizer.err_msg())
}
}
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn exprs(&mut self) -> Result<Node> {
self.eat_whitespace();
debug!("#exprs");
let node = match self.tokenizer.peek_token() {
Ok(Token::OpenParenthesis(_)) => {
self.eat_token();
trace!("\t-exprs - open_parenthesis");
let ret = self.exprs()?;
self.eat_whitespace();
self.close_token(ret, Token::CloseParenthesis(DUMMY))?
}
_ => {
trace!("\t-exprs - else");
self.expr()?
}
};
self.eat_whitespace();
self.condition_expr(node)
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn condition_expr(&mut self, prev: Node) -> Result<Node> {
debug!("#condition_expr");
match self.tokenizer.peek_token() {
Ok(Token::And(_)) => {
self.eat_token();
Ok(Node {
token: ParseToken::Filter(FilterToken::And),
left: Some(Box::new(prev)),
right: Some(Box::new(self.exprs()?)),
})
}
Ok(Token::Or(_)) => {
self.eat_token();
Ok(Node {
token: ParseToken::Filter(FilterToken::Or),
left: Some(Box::new(prev)),
right: Some(Box::new(self.exprs()?)),
})
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
Ok(prev)
}
}
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn expr(&mut self) -> Result<Node> {
debug!("#expr");
2019-02-19 08:20:59 +09:00
let has_prop_candidate = match self.tokenizer.peek_token() {
Ok(Token::At(_)) => true,
_ => false
};
2018-12-27 23:46:15 +09:00
let node = self.term()?;
self.eat_whitespace();
2019-02-19 08:20:59 +09:00
if match self.tokenizer.peek_token() {
Ok(Token::Equal(_))
| Ok(Token::NotEqual(_))
| Ok(Token::Little(_))
| Ok(Token::LittleOrEqual(_))
| Ok(Token::Greater(_))
| Ok(Token::GreaterOrEqual(_)) => true,
_ => false
} {
self.op(node)
} else if has_prop_candidate {
Ok(node)
} else {
return Err(self.tokenizer.err_msg());
}
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn term_num(&mut self) -> Result<Node> {
debug!("#term_num");
match self.tokenizer.next_token() {
Ok(Token::Key(pos, mut vec)) => {
match self.tokenizer.peek_token() {
Ok(Token::Dot(_)) => {
self.term_num_float(&mut vec)
}
_ => {
let number = utils::vec_to_float(&vec,
|| self.tokenizer.err_msg_with_pos(pos))?;
Ok(self.node(ParseToken::Number(number)))
}
}
}
Err(TokenError::Eof) => {
Ok(self.node(ParseToken::Eof))
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
Err(self.tokenizer.err_msg())
}
}
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn term_num_float(&mut self, mut num: &mut Vec<char>) -> Result<Node> {
debug!("#term_num_float");
self.eat_token();
match self.tokenizer.next_token() {
Ok(Token::Key(pos, mut frac)) => {
let mut f = vec![];
f.append(&mut num);
f.push('.');
f.append(&mut frac);
let number = utils::vec_to_float(&f,
|| self.tokenizer.err_msg_with_pos(pos))?;
Ok(self.node(ParseToken::Number(number)))
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
_ => {
Err(self.tokenizer.err_msg())
}
}
}
2018-12-26 22:45:54 +09:00
2018-12-27 23:46:15 +09:00
fn term(&mut self) -> Result<Node> {
debug!("#term");
match self.tokenizer.peek_token() {
Ok(Token::At(_)) => {
self.eat_token();
let node = self.node(ParseToken::Relative);
match self.tokenizer.peek_token() {
Ok(Token::Whitespace(_, _)) => {
self.eat_whitespace();
Ok(node)
}
_ => {
self.paths(node)
}
}
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
Ok(Token::Absolute(_)) => {
self.json_path()
}
Ok(Token::DoubleQuoted(_, _))
| Ok(Token::SingleQuoted(_, _)) => {
self.array_quota_value()
}
Ok(Token::Key(_, _)) => {
self.term_num()
}
_ => {
Err(self.tokenizer.err_msg())
}
}
}
2018-12-26 22:45:54 +09:00
2018-12-27 23:46:15 +09:00
fn op(&mut self, prev: Node) -> Result<Node> {
debug!("#op");
let token = match self.tokenizer.next_token() {
Ok(Token::Equal(_)) => {
ParseToken::Filter(FilterToken::Equal)
}
Ok(Token::NotEqual(_)) => {
ParseToken::Filter(FilterToken::NotEqual)
}
Ok(Token::Little(_)) => {
ParseToken::Filter(FilterToken::Little)
}
Ok(Token::LittleOrEqual(_)) => {
ParseToken::Filter(FilterToken::LittleOrEqual)
}
Ok(Token::Greater(_)) => {
ParseToken::Filter(FilterToken::Greater)
}
Ok(Token::GreaterOrEqual(_)) => {
ParseToken::Filter(FilterToken::GreaterOrEqual)
}
Err(TokenError::Eof) => {
ParseToken::Eof
}
_ => {
return Err(self.tokenizer.err_msg());
}
};
2018-12-26 22:45:54 +09:00
2018-12-27 23:46:15 +09:00
self.eat_whitespace();
2018-12-26 22:45:54 +09:00
2018-12-27 23:46:15 +09:00
Ok(Node {
token,
left: Some(Box::new(prev)),
right: Some(Box::new(self.term()?)),
})
}
2018-12-26 22:45:54 +09:00
2018-12-27 23:46:15 +09:00
fn eat_whitespace(&mut self) {
while let Ok(Token::Whitespace(_, _)) = self.tokenizer.peek_token() {
let _ = self.tokenizer.next_token();
2018-12-26 14:45:31 +09:00
}
}
2018-12-27 23:46:15 +09:00
fn eat_token(&mut self) {
let _ = self.tokenizer.next_token();
}
2018-12-26 14:45:31 +09:00
2018-12-27 23:46:15 +09:00
fn node(&mut self, token: ParseToken) -> Node {
Node { left: None, right: None, token: token }
}
2019-02-19 08:20:59 +09:00
fn close_token(&mut self, ret: Node, token: Token) -> Result<Node> {
2018-12-27 23:46:15 +09:00
debug!("#close_token");
match self.tokenizer.next_token() {
Ok(ref t) if t.partial_eq(token) => {
Ok(ret)
}
Err(TokenError::Eof) => {
Ok(ret)
}
2019-02-19 08:20:59 +09:00
_ => {
2018-12-27 23:46:15 +09:00
Err(self.tokenizer.err_msg())
}
}
2018-12-26 14:45:31 +09:00
}
2018-12-27 23:46:15 +09:00
}
2018-12-26 14:45:31 +09:00
2019-02-19 08:20:59 +09:00
pub trait NodeVisitor {
2018-12-27 23:46:15 +09:00
fn visit(&mut self, node: Node) {
match node.token {
ParseToken::Absolute
| ParseToken::Relative
| ParseToken::All
| ParseToken::Key(_) => {
2019-02-19 08:20:59 +09:00
self.visit_token(node.token);
2018-12-27 23:46:15 +09:00
}
ParseToken::In
| ParseToken::Leaves => {
node.left.map(|n| self.visit(*n));
2019-02-19 08:20:59 +09:00
self.visit_token(node.token);
2018-12-27 23:46:15 +09:00
node.right.map(|n| self.visit(*n));
}
| ParseToken::Range(_, _)
| ParseToken::Union(_)
| ParseToken::Number(_) => {
2019-02-19 08:20:59 +09:00
self.visit_token(node.token);
2018-12-27 23:46:15 +09:00
}
2018-12-26 22:45:54 +09:00
2018-12-27 23:46:15 +09:00
| ParseToken::Array => {
node.left.map(|n| self.visit(*n));
2019-02-19 08:20:59 +09:00
self.visit_token(node.token);
2018-12-27 23:46:15 +09:00
node.right.map(|n| self.visit(*n));
2019-02-19 08:20:59 +09:00
self.visit_token(ParseToken::ArrayEof);
2018-12-27 23:46:15 +09:00
}
ParseToken::Filter(FilterToken::And)
| ParseToken::Filter(FilterToken::Or) => {
node.left.map(|n| self.visit(*n));
node.right.map(|n| self.visit(*n));
2019-02-19 08:20:59 +09:00
self.visit_token(node.token);
2018-12-27 23:46:15 +09:00
}
ParseToken::Filter(_) => {
node.left.map(|n| self.visit(*n));
2019-02-19 08:20:59 +09:00
self.clean_filter_context();
2018-12-27 23:46:15 +09:00
node.right.map(|n| self.visit(*n));
2019-02-19 08:20:59 +09:00
self.clean_filter_context();
self.visit_token(node.token);
2018-12-27 23:46:15 +09:00
}
_ => {}
2018-12-26 14:45:31 +09:00
}
}
2019-02-19 08:20:59 +09:00
fn visit_token(&mut self, token: ParseToken);
fn clean_filter_context(&mut self) {}
2018-12-26 14:45:31 +09:00
}
#[cfg(test)]
mod tests {
extern crate env_logger;
2018-12-26 22:45:54 +09:00
2018-12-27 23:46:15 +09:00
use std::sync::{Once, ONCE_INIT};
2018-12-26 14:45:31 +09:00
use super::*;
2018-12-27 23:46:15 +09:00
static INIT: Once = ONCE_INIT;
2019-02-19 08:20:59 +09:00
struct NodeVisitorTestImpl<'a> {
input: &'a str,
stack: Vec<ParseToken>,
}
impl<'a> NodeVisitorTestImpl<'a> {
fn new(input: &'a str) -> Self {
NodeVisitorTestImpl { input, stack: Vec::new() }
}
fn visit(&mut self) -> result::Result<Vec<ParseToken>, String> {
let tokenizer = PreloadedTokenizer::new(self.input);
let mut parser = Parser { tokenizer };
parser.parse(self)?;
Ok(self.stack.split_off(0))
}
}
impl<'a> NodeVisitor for NodeVisitorTestImpl<'a> {
fn visit_token(&mut self, token: ParseToken) {
self.stack.push(token);
}
}
2018-12-27 23:46:15 +09:00
fn setup() {
INIT.call_once(|| {
env_logger::init();
});
}
2019-02-19 08:20:59 +09:00
fn run(input: &str) -> result::Result<Vec<ParseToken>, String> {
let mut interpreter = NodeVisitorTestImpl::new(input);
interpreter.visit()
2018-12-27 23:46:15 +09:00
}
#[test]
fn parse_path() {
setup();
assert_eq!(run("$.aa"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::In,
ParseToken::Key("aa".to_owned())
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.00.a"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::In,
ParseToken::Key("00".to_owned()),
ParseToken::In,
ParseToken::Key("a".to_owned())
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.00.韓창.seok"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::In,
ParseToken::Key("00".to_owned()),
ParseToken::In,
ParseToken::Key("韓창".to_owned()),
ParseToken::In,
ParseToken::Key("seok".to_owned())
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.*"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::In,
ParseToken::All
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$..*"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::Leaves,
ParseToken::All
2018-12-27 23:46:15 +09:00
]));
match run("$.") {
Ok(_) => panic!(),
_ => {}
}
match run("$..") {
Ok(_) => panic!(),
_ => {}
}
match run("$. a") {
Ok(_) => panic!(),
_ => {}
2018-12-26 14:45:31 +09:00
}
}
#[test]
2018-12-27 23:46:15 +09:00
fn parse_array() {
setup();
2019-02-19 08:20:59 +09:00
assert_eq!(run("$.book[?(@.isbn)]"), Ok(vec![
ParseToken::Absolute,
ParseToken::In,
ParseToken::Key("book".to_string()),
ParseToken::Array,
ParseToken::Relative,
ParseToken::In,
ParseToken::Key("isbn".to_string()),
ParseToken::ArrayEof
]));
2018-12-27 23:46:15 +09:00
//
// Array도 컨텍스트 In으로 간주 할거라서 중첩되면 하나만
//
assert_eq!(run("$.[*]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::Array,
ParseToken::All,
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.a[*]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Array,
ParseToken::All,
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.a[*].가"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Array,
ParseToken::All,
ParseToken::ArrayEof,
ParseToken::In, ParseToken::Key("".to_owned())
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.a[0][1]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Array,
ParseToken::Number(0_f64),
ParseToken::ArrayEof,
ParseToken::Array,
ParseToken::Number(1_f64),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.a[1,2]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Array,
ParseToken::Union(vec![1, 2]),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.a[10:]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Array,
ParseToken::Range(Some(10), None),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.a[:11]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Array,
ParseToken::Range(None, Some(11)),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.a[-12:13]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Array,
ParseToken::Range(Some(-12), Some(13)),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.a[?(1>2)]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Array,
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Greater),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$.a[?($.b>3)]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Array,
ParseToken::Absolute, ParseToken::In, ParseToken::Key("b".to_owned()), ParseToken::Number(3_f64), ParseToken::Filter(FilterToken::Greater),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$[?($.c>@.d && 1==2)]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::Array,
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
ParseToken::Filter(FilterToken::Greater),
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
ParseToken::Filter(FilterToken::And),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$[?($.c>@.d&&(1==2||3>=4))]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::Array,
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
ParseToken::Filter(FilterToken::Greater),
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
ParseToken::Number(3_f64), ParseToken::Number(4_f64), ParseToken::Filter(FilterToken::GreaterOrEqual),
ParseToken::Filter(FilterToken::Or),
ParseToken::Filter(FilterToken::And),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$[?(@.a<@.b)]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::Array,
ParseToken::Relative, ParseToken::In, ParseToken::Key("a".to_owned()),
ParseToken::Relative, ParseToken::In, ParseToken::Key("b".to_owned()),
ParseToken::Filter(FilterToken::Little),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
assert_eq!(run("$[*][*][*]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::Array,
ParseToken::All,
ParseToken::ArrayEof,
ParseToken::Array,
ParseToken::All,
ParseToken::ArrayEof,
ParseToken::Array,
ParseToken::All,
ParseToken::ArrayEof
]));
assert_eq!(run("$['a']['bb']"), Ok(vec![
ParseToken::Absolute,
ParseToken::Array,
ParseToken::Key("a".to_string()),
ParseToken::ArrayEof,
ParseToken::Array,
ParseToken::Key("bb".to_string()),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
match run("$[]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[a]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?($.a)]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?(@.a > @.b]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?(@.a < @.b&&(@.c < @.d)]") {
Ok(_) => panic!(),
_ => {}
}
}
#[test]
fn parse_array_float() {
setup();
assert_eq!(run("$[?(1.1<2.1)]"), Ok(vec![
2019-02-19 08:20:59 +09:00
ParseToken::Absolute,
ParseToken::Array,
ParseToken::Number(1.1), ParseToken::Number(2.1), ParseToken::Filter(FilterToken::Little),
ParseToken::ArrayEof
2018-12-27 23:46:15 +09:00
]));
match run("$[1.1]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?(1.1<.2)]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?(1.1<2.)]") {
Ok(_) => panic!(),
_ => {}
}
match run("$[?(1.1<2.a)]") {
Ok(_) => panic!(),
_ => {}
}
2018-12-26 14:45:31 +09:00
}
}