Remove Parser's lifetime

This commit is contained in:
freestrings 2019-06-03 18:45:26 +09:00
parent 498f2ce4f4
commit ac3224892b
7 changed files with 198 additions and 209 deletions

View File

@ -39,8 +39,7 @@ pub struct SelectorCls {
impl SelectorCls { impl SelectorCls {
fn path(&mut self, path: &str) { fn path(&mut self, path: &str) {
let mut parser = Parser::new(path); let node = match Parser::compile(path) {
let node = match parser.compile() {
Ok(node) => node, Ok(node) => node,
Err(e) => panic!("{:?}", e) Err(e) => panic!("{:?}", e)
}; };
@ -82,8 +81,7 @@ declare_types! {
pub class JsCompileFn for SelectorCls { pub class JsCompileFn for SelectorCls {
init(mut ctx) { init(mut ctx) {
let path = ctx.argument::<JsString>(0)?.value(); let path = ctx.argument::<JsString>(0)?.value();
let mut parser = Parser::new(path.as_str()); let node = match Parser::compile(path.as_str()) {
let node = match parser.compile() {
Ok(node) => node, Ok(node) => node,
Err(e) => panic!("{:?}", e) Err(e) => panic!("{:?}", e)
}; };

View File

@ -170,8 +170,7 @@ pub use parser::parser::{Node, Parser};
/// ]); /// ]);
/// ``` /// ```
pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> { pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> {
let mut parser = Parser::new(path); let node = Parser::compile(path);
let node = parser.compile();
move |json| { move |json| {
let mut selector = Selector::new(); let mut selector = Selector::new();
match &node { match &node {

View File

@ -17,8 +17,7 @@ mod parser_tests {
} }
fn start(&mut self) -> Result<Vec<ParseToken>, String> { fn start(&mut self) -> Result<Vec<ParseToken>, String> {
let mut parser = Parser::new(self.input); let node = Parser::compile(self.input)?;
let node = parser.compile()?;
self.visit(&node); self.visit(&node);
Ok(self.stack.split_off(0)) Ok(self.stack.split_off(0))
} }
@ -330,7 +329,7 @@ mod parser_tests {
#[cfg(test)] #[cfg(test)]
mod tokenizer_tests { mod tokenizer_tests {
use parser::tokenizer::{Token, TokenError, Tokenizer, PreloadedTokenizer}; use parser::tokenizer::{Token, TokenError, Tokenizer, TokenReader};
fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) { fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
let mut tokenizer = Tokenizer::new(input); let mut tokenizer = Tokenizer::new(input);
@ -350,7 +349,7 @@ mod tokenizer_tests {
#[test] #[test]
fn peek() { fn peek() {
let mut tokenizer = PreloadedTokenizer::new("$.a"); let mut tokenizer = TokenReader::new("$.a");
match tokenizer.next_token() { match tokenizer.next_token() {
Ok(t) => assert_eq!(Token::Absolute(0), t), Ok(t) => assert_eq!(Token::Absolute(0), t),
_ => panic!() _ => panic!()

View File

@ -73,44 +73,40 @@ pub struct Node {
token: ParseToken, token: ParseToken,
} }
pub struct Parser<'a> { pub struct Parser;
tokenizer: PreloadedTokenizer<'a>
}
impl<'a> Parser<'a> { impl Parser {
pub fn new(input: &'a str) -> Self {
Parser { tokenizer: PreloadedTokenizer::new(input) } pub fn compile(input: &str) -> ParseResult<Node> {
let mut tokenizer = TokenReader::new(input);
Ok(Self::json_path(&mut tokenizer)?)
} }
pub fn compile(&mut self) -> ParseResult<Node> { fn json_path(tokenizer: &mut TokenReader) -> ParseResult<Node> {
Ok(self.json_path()?)
}
fn json_path(&mut self) -> ParseResult<Node> {
debug!("#json_path"); debug!("#json_path");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Absolute(_)) => { Ok(Token::Absolute(_)) => {
let node = self.node(ParseToken::Absolute); let node = Self::node(ParseToken::Absolute);
self.paths(node) Self::paths(node, tokenizer)
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn paths(&mut self, prev: Node) -> ParseResult<Node> { fn paths(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#paths"); debug!("#paths");
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Dot(_)) => { Ok(Token::Dot(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
self.paths_dot(prev) Self::paths_dot(prev, tokenizer)
} }
Ok(Token::OpenArray(_)) => { Ok(Token::OpenArray(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
let node = self.array(prev)?; let node = Self::array(prev, tokenizer)?;
self.paths(node) Self::paths(node, tokenizer)
} }
_ => { _ => {
Ok(prev) Ok(prev)
@ -118,10 +114,10 @@ impl<'a> Parser<'a> {
} }
} }
fn paths_dot(&mut self, prev: Node) -> ParseResult<Node> { fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#paths_dot"); debug!("#paths_dot");
let node = self.path(prev)?; let node = Self::path(prev, tokenizer)?;
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Equal(_)) Ok(Token::Equal(_))
| Ok(Token::NotEqual(_)) | Ok(Token::NotEqual(_))
| Ok(Token::Little(_)) | Ok(Token::Little(_))
@ -133,331 +129,331 @@ impl<'a> Parser<'a> {
Ok(node) Ok(node)
} }
_ => { _ => {
self.paths(node) Self::paths(node, tokenizer)
} }
} }
} }
fn path(&mut self, prev: Node) -> ParseResult<Node> { fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path"); debug!("#path");
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Dot(_)) => { Ok(Token::Dot(_)) => {
self.path_leaves(prev) Self::path_leaves(prev, tokenizer)
} }
Ok(Token::Asterisk(_)) => { Ok(Token::Asterisk(_)) => {
self.path_in_all(prev) Self::path_in_all(prev, tokenizer)
} }
Ok(Token::Key(_, _)) => { Ok(Token::Key(_, _)) => {
self.path_in_key(prev) Self::path_in_key(prev, tokenizer)
} }
Ok(Token::OpenArray(_)) => { Ok(Token::OpenArray(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
self.array(prev) Self::array(prev, tokenizer)
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn path_leaves(&mut self, prev: Node) -> ParseResult<Node> { fn path_leaves(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_leaves"); debug!("#path_leaves");
self.eat_token(); Self::eat_token(tokenizer);
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Asterisk(_)) => { Ok(Token::Asterisk(_)) => {
self.path_leaves_all(prev) Self::path_leaves_all(prev, tokenizer)
} }
Ok(Token::OpenArray(_)) => { Ok(Token::OpenArray(_)) => {
let mut leaves_node = self.node(ParseToken::Leaves); let mut leaves_node = Self::node(ParseToken::Leaves);
leaves_node.left = Some(Box::new(prev)); leaves_node.left = Some(Box::new(prev));
Ok(self.paths(leaves_node)?) Ok(Self::paths(leaves_node, tokenizer)?)
} }
_ => { _ => {
self.path_leaves_key(prev) Self::path_leaves_key(prev, tokenizer)
} }
} }
} }
fn path_leaves_key(&mut self, prev: Node) -> ParseResult<Node> { fn path_leaves_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_leaves_key"); debug!("#path_leaves_key");
Ok(Node { Ok(Node {
token: ParseToken::Leaves, token: ParseToken::Leaves,
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.key()?)), right: Some(Box::new(Self::key(tokenizer)?)),
}) })
} }
fn path_leaves_all(&mut self, prev: Node) -> ParseResult<Node> { fn path_leaves_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_leaves_all"); debug!("#path_leaves_all");
self.eat_token(); Self::eat_token(tokenizer);
Ok(Node { Ok(Node {
token: ParseToken::Leaves, token: ParseToken::Leaves,
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.node(ParseToken::All))), right: Some(Box::new(Self::node(ParseToken::All))),
}) })
} }
fn path_in_all(&mut self, prev: Node) -> ParseResult<Node> { fn path_in_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_in_all"); debug!("#path_in_all");
self.eat_token(); Self::eat_token(tokenizer);
Ok(Node { Ok(Node {
token: ParseToken::In, token: ParseToken::In,
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.node(ParseToken::All))), right: Some(Box::new(Self::node(ParseToken::All))),
}) })
} }
fn path_in_key(&mut self, prev: Node) -> ParseResult<Node> { fn path_in_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#path_in_key"); debug!("#path_in_key");
Ok(Node { Ok(Node {
token: ParseToken::In, token: ParseToken::In,
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.key()?)), right: Some(Box::new(Self::key(tokenizer)?)),
}) })
} }
fn key(&mut self) -> ParseResult<Node> { fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#key"); debug!("#key");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Key(_, v)) => { Ok(Token::Key(_, v)) => {
Ok(self.node(ParseToken::Key(v))) Ok(Self::node(ParseToken::Key(v)))
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn boolean(&mut self) -> ParseResult<Node> { fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#boolean"); debug!("#boolean");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Key(_, v)) => { Ok(Token::Key(_, v)) => {
Ok(self.node(ParseToken::Bool(v.eq_ignore_ascii_case("true")))) Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn array_quota_value(&mut self) -> ParseResult<Node> { fn array_quota_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_quota_value"); debug!("#array_quota_value");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::SingleQuoted(_, val)) Ok(Token::SingleQuoted(_, val))
| Ok(Token::DoubleQuoted(_, val)) => { | Ok(Token::DoubleQuoted(_, val)) => {
Ok(self.node(ParseToken::Key(val))) Ok(Self::node(ParseToken::Key(val)))
} }
Err(TokenError::Eof) => { Err(TokenError::Eof) => {
Ok(self.node(ParseToken::Eof)) Ok(Self::node(ParseToken::Eof))
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn array_start(&mut self, prev: Node) -> ParseResult<Node> { fn array_start(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_start"); debug!("#array_start");
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Question(_)) => { Ok(Token::Question(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
Ok(Node { Ok(Node {
token: ParseToken::Array, token: ParseToken::Array,
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.filter()?)), right: Some(Box::new(Self::filter(tokenizer)?)),
}) })
} }
Ok(Token::Asterisk(_)) => { Ok(Token::Asterisk(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
Ok(Node { Ok(Node {
token: ParseToken::Array, token: ParseToken::Array,
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.node(ParseToken::All))), right: Some(Box::new(Self::node(ParseToken::All))),
}) })
} }
_ => { _ => {
Ok(Node { Ok(Node {
token: ParseToken::Array, token: ParseToken::Array,
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.array_value()?)), right: Some(Box::new(Self::array_value(tokenizer)?)),
}) })
} }
} }
} }
fn array(&mut self, prev: Node) -> ParseResult<Node> { fn array(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array"); debug!("#array");
let ret = self.array_start(prev)?; let ret = Self::array_start(prev, tokenizer)?;
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
self.close_token(ret, Token::CloseArray(DUMMY)) Self::close_token(ret, Token::CloseArray(DUMMY), tokenizer)
} }
fn array_value_key(&mut self) -> ParseResult<Node> { fn array_value_key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_value_key"); debug!("#array_value_key");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Key(pos, ref val)) => { Ok(Token::Key(pos, ref val)) => {
let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?; let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Comma(_)) => { Ok(Token::Comma(_)) => {
self.union(digit) Self::union(digit, tokenizer)
} }
Ok(Token::Split(_)) => { Ok(Token::Split(_)) => {
self.range_from(digit) Self::range_from(digit, tokenizer)
} }
_ => { _ => {
Ok(self.node(ParseToken::Number(digit as f64))) Ok(Self::node(ParseToken::Number(digit as f64)))
} }
} }
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn array_value(&mut self) -> ParseResult<Node> { fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#array_value"); debug!("#array_value");
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Key(_, _)) => { Ok(Token::Key(_, _)) => {
self.array_value_key() Self::array_value_key(tokenizer)
} }
Ok(Token::Split(_)) => { Ok(Token::Split(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
self.range_to() Self::range_to(tokenizer)
} }
Ok(Token::DoubleQuoted(_, _)) Ok(Token::DoubleQuoted(_, _))
| Ok(Token::SingleQuoted(_, _)) => { | Ok(Token::SingleQuoted(_, _)) => {
self.array_quota_value() Self::array_quota_value(tokenizer)
} }
Err(TokenError::Eof) => { Err(TokenError::Eof) => {
Ok(self.node(ParseToken::Eof)) Ok(Self::node(ParseToken::Eof))
} }
_ => { _ => {
self.eat_token(); Self::eat_token(tokenizer);
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn union(&mut self, num: isize) -> ParseResult<Node> { fn union(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#union"); debug!("#union");
let mut values = vec![num]; let mut values = vec![num];
while match self.tokenizer.peek_token() { while match tokenizer.peek_token() {
Ok(Token::Comma(_)) => true, Ok(Token::Comma(_)) => true,
_ => false _ => false
} { } {
self.eat_token(); Self::eat_token(tokenizer);
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Key(pos, ref val)) => { Ok(Token::Key(pos, ref val)) => {
let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?; let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
values.push(digit); values.push(digit);
} }
_ => { _ => {
return Err(self.tokenizer.err_msg()); return Err(tokenizer.err_msg());
} }
} }
} }
Ok(self.node(ParseToken::Union(values))) Ok(Self::node(ParseToken::Union(values)))
} }
fn range_from(&mut self, num: isize) -> ParseResult<Node> { fn range_from(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#range_from"); debug!("#range_from");
self.eat_token(); Self::eat_token(tokenizer);
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Key(_, _)) => { Ok(Token::Key(_, _)) => {
self.range(num) Self::range(num, tokenizer)
} }
_ => { _ => {
Ok(self.node(ParseToken::Range(Some(num), None))) Ok(Self::node(ParseToken::Range(Some(num), None)))
} }
} }
} }
fn range_to(&mut self) -> ParseResult<Node> { fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#range_to"); debug!("#range_to");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Key(pos, ref val)) => { Ok(Token::Key(pos, ref val)) => {
let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?; let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
Ok(self.node(ParseToken::Range(None, Some(digit)))) Ok(Self::node(ParseToken::Range(None, Some(digit))))
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn range(&mut self, num: isize) -> ParseResult<Node> { fn range(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#range"); debug!("#range");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Key(pos, ref val)) => { Ok(Token::Key(pos, ref val)) => {
let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?; let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
Ok(self.node(ParseToken::Range(Some(num), Some(digit)))) Ok(Self::node(ParseToken::Range(Some(num), Some(digit))))
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn filter(&mut self) -> ParseResult<Node> { fn filter(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#filter"); debug!("#filter");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::OpenParenthesis(_)) => { Ok(Token::OpenParenthesis(_)) => {
let ret = self.exprs()?; let ret = Self::exprs(tokenizer)?;
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
self.close_token(ret, Token::CloseParenthesis(DUMMY)) Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
} }
Err(TokenError::Eof) => { Err(TokenError::Eof) => {
Ok(self.node(ParseToken::Eof)) Ok(Self::node(ParseToken::Eof))
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn exprs(&mut self) -> ParseResult<Node> { fn exprs(tokenizer: &mut TokenReader) -> ParseResult<Node> {
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
debug!("#exprs"); debug!("#exprs");
let node = match self.tokenizer.peek_token() { let node = match tokenizer.peek_token() {
Ok(Token::OpenParenthesis(_)) => { Ok(Token::OpenParenthesis(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
trace!("\t-exprs - open_parenthesis"); trace!("\t-exprs - open_parenthesis");
let ret = self.exprs()?; let ret = Self::exprs(tokenizer)?;
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
self.close_token(ret, Token::CloseParenthesis(DUMMY))? Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)?
} }
_ => { _ => {
trace!("\t-exprs - else"); trace!("\t-exprs - else");
self.expr()? Self::expr(tokenizer)?
} }
}; };
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
self.condition_expr(node) Self::condition_expr(node, tokenizer)
} }
fn condition_expr(&mut self, prev: Node) -> ParseResult<Node> { fn condition_expr(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#condition_expr"); debug!("#condition_expr");
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::And(_)) => { Ok(Token::And(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
Ok(Node { Ok(Node {
token: ParseToken::Filter(FilterToken::And), token: ParseToken::Filter(FilterToken::And),
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.exprs()?)), right: Some(Box::new(Self::exprs(tokenizer)?)),
}) })
} }
Ok(Token::Or(_)) => { Ok(Token::Or(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
Ok(Node { Ok(Node {
token: ParseToken::Filter(FilterToken::Or), token: ParseToken::Filter(FilterToken::Or),
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.exprs()?)), right: Some(Box::new(Self::exprs(tokenizer)?)),
}) })
} }
_ => { _ => {
@ -466,18 +462,18 @@ impl<'a> Parser<'a> {
} }
} }
fn expr(&mut self) -> ParseResult<Node> { fn expr(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#expr"); debug!("#expr");
let has_prop_candidate = match self.tokenizer.peek_token() { let has_prop_candidate = match tokenizer.peek_token() {
Ok(Token::At(_)) => true, Ok(Token::At(_)) => true,
_ => false _ => false
}; };
let node = self.term()?; let node = Self::term(tokenizer)?;
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
if match self.tokenizer.peek_token() { if match tokenizer.peek_token() {
Ok(Token::Equal(_)) Ok(Token::Equal(_))
| Ok(Token::NotEqual(_)) | Ok(Token::NotEqual(_))
| Ok(Token::Little(_)) | Ok(Token::Little(_))
@ -486,97 +482,97 @@ impl<'a> Parser<'a> {
| Ok(Token::GreaterOrEqual(_)) => true, | Ok(Token::GreaterOrEqual(_)) => true,
_ => false _ => false
} { } {
self.op(node) Self::op(node, tokenizer)
} else if has_prop_candidate { } else if has_prop_candidate {
Ok(node) Ok(node)
} else { } else {
return Err(self.tokenizer.err_msg()); return Err(tokenizer.err_msg());
} }
} }
fn term_num(&mut self) -> ParseResult<Node> { fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term_num"); debug!("#term_num");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Key(pos, val)) => { Ok(Token::Key(pos, val)) => {
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Dot(_)) => { Ok(Token::Dot(_)) => {
self.term_num_float(val.as_str()) Self::term_num_float(val.as_str(), tokenizer)
} }
_ => { _ => {
let number = utils::string_to_f64(&val, || self.tokenizer.err_msg_with_pos(pos))?; let number = utils::string_to_f64(&val, || tokenizer.err_msg_with_pos(pos))?;
Ok(self.node(ParseToken::Number(number))) Ok(Self::node(ParseToken::Number(number)))
} }
} }
} }
Err(TokenError::Eof) => { Err(TokenError::Eof) => {
Ok(self.node(ParseToken::Eof)) Ok(Self::node(ParseToken::Eof))
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn term_num_float(&mut self, mut num: &str) -> ParseResult<Node> { fn term_num_float(mut num: &str, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term_num_float"); debug!("#term_num_float");
self.eat_token(); Self::eat_token(tokenizer);
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(Token::Key(pos, frac)) => { Ok(Token::Key(pos, frac)) => {
let mut f = String::new(); let mut f = String::new();
f.push_str(&mut num); f.push_str(&mut num);
f.push('.'); f.push('.');
f.push_str(frac.as_str()); f.push_str(frac.as_str());
let number = utils::string_to_f64(&f, || self.tokenizer.err_msg_with_pos(pos))?; let number = utils::string_to_f64(&f, || tokenizer.err_msg_with_pos(pos))?;
Ok(self.node(ParseToken::Number(number))) Ok(Self::node(ParseToken::Number(number)))
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn term(&mut self) -> ParseResult<Node> { fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#term"); debug!("#term");
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::At(_)) => { Ok(Token::At(_)) => {
self.eat_token(); Self::eat_token(tokenizer);
let node = self.node(ParseToken::Relative); let node = Self::node(ParseToken::Relative);
match self.tokenizer.peek_token() { match tokenizer.peek_token() {
Ok(Token::Whitespace(_, _)) => { Ok(Token::Whitespace(_, _)) => {
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
Ok(node) Ok(node)
} }
_ => { _ => {
self.paths(node) Self::paths(node, tokenizer)
} }
} }
} }
Ok(Token::Absolute(_)) => { Ok(Token::Absolute(_)) => {
self.json_path() Self::json_path(tokenizer)
} }
Ok(Token::DoubleQuoted(_, _)) Ok(Token::DoubleQuoted(_, _))
| Ok(Token::SingleQuoted(_, _)) => { | Ok(Token::SingleQuoted(_, _)) => {
self.array_quota_value() Self::array_quota_value(tokenizer)
} }
Ok(Token::Key(_, k)) => { Ok(Token::Key(_, k)) => {
match k.chars().next() { match k.chars().next() {
Some(ch) => match ch { Some(ch) => match ch {
'-' | '0'...'9' => self.term_num(), '-' | '0'...'9' => Self::term_num(tokenizer),
_ => self.boolean() _ => Self::boolean(tokenizer)
} }
_ => Err(self.tokenizer.err_msg()) _ => Err(tokenizer.err_msg())
} }
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }
fn op(&mut self, prev: Node) -> ParseResult<Node> { fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#op"); debug!("#op");
let token = match self.tokenizer.next_token() { let token = match tokenizer.next_token() {
Ok(Token::Equal(_)) => { Ok(Token::Equal(_)) => {
ParseToken::Filter(FilterToken::Equal) ParseToken::Filter(FilterToken::Equal)
} }
@ -599,41 +595,41 @@ impl<'a> Parser<'a> {
ParseToken::Eof ParseToken::Eof
} }
_ => { _ => {
return Err(self.tokenizer.err_msg()); return Err(tokenizer.err_msg());
} }
}; };
self.eat_whitespace(); Self::eat_whitespace(tokenizer);
Ok(Node { Ok(Node {
token, token,
left: Some(Box::new(prev)), left: Some(Box::new(prev)),
right: Some(Box::new(self.term()?)), right: Some(Box::new(Self::term(tokenizer)?)),
}) })
} }
fn eat_whitespace(&mut self) { fn eat_whitespace(tokenizer: &mut TokenReader) {
while let Ok(Token::Whitespace(_, _)) = self.tokenizer.peek_token() { while let Ok(Token::Whitespace(_, _)) = tokenizer.peek_token() {
let _ = self.tokenizer.next_token(); let _ = tokenizer.next_token();
} }
} }
fn eat_token(&mut self) { fn eat_token(tokenizer: &mut TokenReader) {
let _ = self.tokenizer.next_token(); let _ = tokenizer.next_token();
} }
fn node(&mut self, token: ParseToken) -> Node { fn node(token: ParseToken) -> Node {
Node { left: None, right: None, token: token } Node { left: None, right: None, token }
} }
fn close_token(&mut self, ret: Node, token: Token) -> ParseResult<Node> { fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
debug!("#close_token"); debug!("#close_token");
match self.tokenizer.next_token() { match tokenizer.next_token() {
Ok(ref t) if t.partial_eq(token) => { Ok(ref t) if t.partial_eq(token) => {
Ok(ret) Ok(ret)
} }
_ => { _ => {
Err(self.tokenizer.err_msg()) Err(tokenizer.err_msg())
} }
} }
} }

View File

@ -275,7 +275,7 @@ impl<'a> Tokenizer<'a> {
} }
} }
pub struct PreloadedTokenizer<'a> { pub struct TokenReader<'a> {
origin_input: &'a str, origin_input: &'a str,
err: TokenError, err: TokenError,
err_pos: usize, err_pos: usize,
@ -283,7 +283,7 @@ pub struct PreloadedTokenizer<'a> {
curr_pos: Option<usize>, curr_pos: Option<usize>,
} }
impl<'a> PreloadedTokenizer<'a> { impl<'a> TokenReader<'a> {
pub fn new(input: &'a str) -> Self { pub fn new(input: &'a str) -> Self {
let mut tokenizer = Tokenizer::new(input); let mut tokenizer = Tokenizer::new(input);
let mut tokens = vec![]; let mut tokens = vec![];
@ -293,7 +293,7 @@ impl<'a> PreloadedTokenizer<'a> {
tokens.insert(0, (tokenizer.current_pos(), t)); tokens.insert(0, (tokenizer.current_pos(), t));
} }
Err(e) => { Err(e) => {
return PreloadedTokenizer { return TokenReader {
origin_input: input.clone(), origin_input: input.clone(),
err: e, err: e,
err_pos: tokenizer.current_pos(), err_pos: tokenizer.current_pos(),

View File

@ -508,8 +508,7 @@ impl<'a> Selector<'a> {
pub fn path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> { pub fn path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> {
debug!("path : {}", path); debug!("path : {}", path);
let mut parser = Parser::new(path); self.node = Some(Parser::compile(path).map_err(|e| JsonPathError::Path(e))?);
self.node = Some(parser.compile().map_err(|e| JsonPathError::Path(e))?);
Ok(self) Ok(self)
} }

View File

@ -50,8 +50,7 @@ fn into_serde_json<D>(js_value: &JsValue) -> Result<D, String>
#[wasm_bindgen] #[wasm_bindgen]
pub fn compile(path: &str) -> JsValue { pub fn compile(path: &str) -> JsValue {
let mut parser = Parser::new(path); let node = Parser::compile(path);
let node = parser.compile();
let cb = Closure::wrap(Box::new(move |js_value: JsValue| { let cb = Closure::wrap(Box::new(move |js_value: JsValue| {
let mut selector = _Selector::new(); let mut selector = _Selector::new();
@ -85,8 +84,7 @@ pub fn selector(js_value: JsValue) -> JsValue {
}; };
let cb = Closure::wrap(Box::new(move |path: String| { let cb = Closure::wrap(Box::new(move |path: String| {
let mut parser = Parser::new(path.as_str()); match Parser::compile(path.as_str()) {
match parser.compile() {
Ok(node) => { Ok(node) => {
let mut selector = _Selector::new(); let mut selector = _Selector::new();
let _ = selector.compiled_path(node); let _ = selector.compiled_path(node);