From ac3224892bb5a3cd0b31d995e1fccc2842dd2756 Mon Sep 17 00:00:00 2001 From: freestrings Date: Mon, 3 Jun 2019 18:45:26 +0900 Subject: [PATCH] Remove Parser's lifetime --- nodejs/native/src/lib.rs | 6 +- src/lib.rs | 3 +- src/parser/mod.rs | 7 +- src/parser/parser.rs | 376 +++++++++++++++++++-------------------- src/parser/tokenizer.rs | 6 +- src/select/mod.rs | 3 +- wasm/src/lib.rs | 6 +- 7 files changed, 198 insertions(+), 209 deletions(-) diff --git a/nodejs/native/src/lib.rs b/nodejs/native/src/lib.rs index 6056220..cb5b494 100644 --- a/nodejs/native/src/lib.rs +++ b/nodejs/native/src/lib.rs @@ -39,8 +39,7 @@ pub struct SelectorCls { impl SelectorCls { fn path(&mut self, path: &str) { - let mut parser = Parser::new(path); - let node = match parser.compile() { + let node = match Parser::compile(path) { Ok(node) => node, Err(e) => panic!("{:?}", e) }; @@ -82,8 +81,7 @@ declare_types! { pub class JsCompileFn for SelectorCls { init(mut ctx) { let path = ctx.argument::(0)?.value(); - let mut parser = Parser::new(path.as_str()); - let node = match parser.compile() { + let node = match Parser::compile(path.as_str()) { Ok(node) => node, Err(e) => panic!("{:?}", e) }; diff --git a/src/lib.rs b/src/lib.rs index 2a3401a..9c9dc74 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -170,8 +170,7 @@ pub use parser::parser::{Node, Parser}; /// ]); /// ``` pub fn compile(path: &str) -> impl FnMut(&Value) -> Result, JsonPathError> { - let mut parser = Parser::new(path); - let node = parser.compile(); + let node = Parser::compile(path); move |json| { let mut selector = Selector::new(); match &node { diff --git a/src/parser/mod.rs b/src/parser/mod.rs index bce22d6..42ac385 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -17,8 +17,7 @@ mod parser_tests { } fn start(&mut self) -> Result, String> { - let mut parser = Parser::new(self.input); - let node = parser.compile()?; + let node = Parser::compile(self.input)?; self.visit(&node); Ok(self.stack.split_off(0)) } @@ -330,7 +329,7 @@ mod parser_tests { #[cfg(test)] mod tokenizer_tests { - use parser::tokenizer::{Token, TokenError, Tokenizer, PreloadedTokenizer}; + use parser::tokenizer::{Token, TokenError, Tokenizer, TokenReader}; fn collect_token(input: &str) -> (Vec, Option) { let mut tokenizer = Tokenizer::new(input); @@ -350,7 +349,7 @@ mod tokenizer_tests { #[test] fn peek() { - let mut tokenizer = PreloadedTokenizer::new("$.a"); + let mut tokenizer = TokenReader::new("$.a"); match tokenizer.next_token() { Ok(t) => assert_eq!(Token::Absolute(0), t), _ => panic!() diff --git a/src/parser/parser.rs b/src/parser/parser.rs index 60318ce..253252f 100644 --- a/src/parser/parser.rs +++ b/src/parser/parser.rs @@ -73,44 +73,40 @@ pub struct Node { token: ParseToken, } -pub struct Parser<'a> { - tokenizer: PreloadedTokenizer<'a> -} +pub struct Parser; -impl<'a> Parser<'a> { - pub fn new(input: &'a str) -> Self { - Parser { tokenizer: PreloadedTokenizer::new(input) } +impl Parser { + + pub fn compile(input: &str) -> ParseResult { + let mut tokenizer = TokenReader::new(input); + Ok(Self::json_path(&mut tokenizer)?) } - pub fn compile(&mut self) -> ParseResult { - Ok(self.json_path()?) - } - - fn json_path(&mut self) -> ParseResult { + fn json_path(tokenizer: &mut TokenReader) -> ParseResult { debug!("#json_path"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(Token::Absolute(_)) => { - let node = self.node(ParseToken::Absolute); - self.paths(node) + let node = Self::node(ParseToken::Absolute); + Self::paths(node, tokenizer) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn paths(&mut self, prev: Node) -> ParseResult { + fn paths(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#paths"); - match self.tokenizer.peek_token() { + match tokenizer.peek_token() { Ok(Token::Dot(_)) => { - self.eat_token(); - self.paths_dot(prev) + Self::eat_token(tokenizer); + Self::paths_dot(prev, tokenizer) } Ok(Token::OpenArray(_)) => { - self.eat_token(); - self.eat_whitespace(); - let node = self.array(prev)?; - self.paths(node) + Self::eat_token(tokenizer); + Self::eat_whitespace(tokenizer); + let node = Self::array(prev, tokenizer)?; + Self::paths(node, tokenizer) } _ => { Ok(prev) @@ -118,10 +114,10 @@ impl<'a> Parser<'a> { } } - fn paths_dot(&mut self, prev: Node) -> ParseResult { + fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#paths_dot"); - let node = self.path(prev)?; - match self.tokenizer.peek_token() { + let node = Self::path(prev, tokenizer)?; + match tokenizer.peek_token() { Ok(Token::Equal(_)) | Ok(Token::NotEqual(_)) | Ok(Token::Little(_)) @@ -133,331 +129,331 @@ impl<'a> Parser<'a> { Ok(node) } _ => { - self.paths(node) + Self::paths(node, tokenizer) } } } - fn path(&mut self, prev: Node) -> ParseResult { + fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#path"); - match self.tokenizer.peek_token() { + match tokenizer.peek_token() { Ok(Token::Dot(_)) => { - self.path_leaves(prev) + Self::path_leaves(prev, tokenizer) } Ok(Token::Asterisk(_)) => { - self.path_in_all(prev) + Self::path_in_all(prev, tokenizer) } Ok(Token::Key(_, _)) => { - self.path_in_key(prev) + Self::path_in_key(prev, tokenizer) } Ok(Token::OpenArray(_)) => { - self.eat_token(); - self.array(prev) + Self::eat_token(tokenizer); + Self::array(prev, tokenizer) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn path_leaves(&mut self, prev: Node) -> ParseResult { + fn path_leaves(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#path_leaves"); - self.eat_token(); - match self.tokenizer.peek_token() { + Self::eat_token(tokenizer); + match tokenizer.peek_token() { Ok(Token::Asterisk(_)) => { - self.path_leaves_all(prev) + Self::path_leaves_all(prev, tokenizer) } Ok(Token::OpenArray(_)) => { - let mut leaves_node = self.node(ParseToken::Leaves); + let mut leaves_node = Self::node(ParseToken::Leaves); leaves_node.left = Some(Box::new(prev)); - Ok(self.paths(leaves_node)?) + Ok(Self::paths(leaves_node, tokenizer)?) } _ => { - self.path_leaves_key(prev) + Self::path_leaves_key(prev, tokenizer) } } } - fn path_leaves_key(&mut self, prev: Node) -> ParseResult { + fn path_leaves_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#path_leaves_key"); Ok(Node { token: ParseToken::Leaves, left: Some(Box::new(prev)), - right: Some(Box::new(self.key()?)), + right: Some(Box::new(Self::key(tokenizer)?)), }) } - fn path_leaves_all(&mut self, prev: Node) -> ParseResult { + fn path_leaves_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#path_leaves_all"); - self.eat_token(); + Self::eat_token(tokenizer); Ok(Node { token: ParseToken::Leaves, left: Some(Box::new(prev)), - right: Some(Box::new(self.node(ParseToken::All))), + right: Some(Box::new(Self::node(ParseToken::All))), }) } - fn path_in_all(&mut self, prev: Node) -> ParseResult { + fn path_in_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#path_in_all"); - self.eat_token(); + Self::eat_token(tokenizer); Ok(Node { token: ParseToken::In, left: Some(Box::new(prev)), - right: Some(Box::new(self.node(ParseToken::All))), + right: Some(Box::new(Self::node(ParseToken::All))), }) } - fn path_in_key(&mut self, prev: Node) -> ParseResult { + fn path_in_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#path_in_key"); Ok(Node { token: ParseToken::In, left: Some(Box::new(prev)), - right: Some(Box::new(self.key()?)), + right: Some(Box::new(Self::key(tokenizer)?)), }) } - fn key(&mut self) -> ParseResult { + fn key(tokenizer: &mut TokenReader) -> ParseResult { debug!("#key"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(Token::Key(_, v)) => { - Ok(self.node(ParseToken::Key(v))) + Ok(Self::node(ParseToken::Key(v))) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn boolean(&mut self) -> ParseResult { + fn boolean(tokenizer: &mut TokenReader) -> ParseResult { debug!("#boolean"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(Token::Key(_, v)) => { - Ok(self.node(ParseToken::Bool(v.eq_ignore_ascii_case("true")))) + Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true")))) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn array_quota_value(&mut self) -> ParseResult { + fn array_quota_value(tokenizer: &mut TokenReader) -> ParseResult { debug!("#array_quota_value"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => { - Ok(self.node(ParseToken::Key(val))) + Ok(Self::node(ParseToken::Key(val))) } Err(TokenError::Eof) => { - Ok(self.node(ParseToken::Eof)) + Ok(Self::node(ParseToken::Eof)) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn array_start(&mut self, prev: Node) -> ParseResult { + fn array_start(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#array_start"); - match self.tokenizer.peek_token() { + match tokenizer.peek_token() { Ok(Token::Question(_)) => { - self.eat_token(); + Self::eat_token(tokenizer); Ok(Node { token: ParseToken::Array, left: Some(Box::new(prev)), - right: Some(Box::new(self.filter()?)), + right: Some(Box::new(Self::filter(tokenizer)?)), }) } Ok(Token::Asterisk(_)) => { - self.eat_token(); + Self::eat_token(tokenizer); Ok(Node { token: ParseToken::Array, left: Some(Box::new(prev)), - right: Some(Box::new(self.node(ParseToken::All))), + right: Some(Box::new(Self::node(ParseToken::All))), }) } _ => { Ok(Node { token: ParseToken::Array, left: Some(Box::new(prev)), - right: Some(Box::new(self.array_value()?)), + right: Some(Box::new(Self::array_value(tokenizer)?)), }) } } } - fn array(&mut self, prev: Node) -> ParseResult { + fn array(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#array"); - let ret = self.array_start(prev)?; - self.eat_whitespace(); - self.close_token(ret, Token::CloseArray(DUMMY)) + let ret = Self::array_start(prev, tokenizer)?; + Self::eat_whitespace(tokenizer); + Self::close_token(ret, Token::CloseArray(DUMMY), tokenizer) } - fn array_value_key(&mut self) -> ParseResult { + fn array_value_key(tokenizer: &mut TokenReader) -> ParseResult { debug!("#array_value_key"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(Token::Key(pos, ref val)) => { - let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?; - self.eat_whitespace(); + let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?; + Self::eat_whitespace(tokenizer); - match self.tokenizer.peek_token() { + match tokenizer.peek_token() { Ok(Token::Comma(_)) => { - self.union(digit) + Self::union(digit, tokenizer) } Ok(Token::Split(_)) => { - self.range_from(digit) + Self::range_from(digit, tokenizer) } _ => { - Ok(self.node(ParseToken::Number(digit as f64))) + Ok(Self::node(ParseToken::Number(digit as f64))) } } } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn array_value(&mut self) -> ParseResult { + fn array_value(tokenizer: &mut TokenReader) -> ParseResult { debug!("#array_value"); - match self.tokenizer.peek_token() { + match tokenizer.peek_token() { Ok(Token::Key(_, _)) => { - self.array_value_key() + Self::array_value_key(tokenizer) } Ok(Token::Split(_)) => { - self.eat_token(); - self.range_to() + Self::eat_token(tokenizer); + Self::range_to(tokenizer) } Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => { - self.array_quota_value() + Self::array_quota_value(tokenizer) } Err(TokenError::Eof) => { - Ok(self.node(ParseToken::Eof)) + Ok(Self::node(ParseToken::Eof)) } _ => { - self.eat_token(); - Err(self.tokenizer.err_msg()) + Self::eat_token(tokenizer); + Err(tokenizer.err_msg()) } } } - fn union(&mut self, num: isize) -> ParseResult { + fn union(num: isize, tokenizer: &mut TokenReader) -> ParseResult { debug!("#union"); let mut values = vec![num]; - while match self.tokenizer.peek_token() { + while match tokenizer.peek_token() { Ok(Token::Comma(_)) => true, _ => false } { - self.eat_token(); - self.eat_whitespace(); - match self.tokenizer.next_token() { + Self::eat_token(tokenizer); + Self::eat_whitespace(tokenizer); + match tokenizer.next_token() { Ok(Token::Key(pos, ref val)) => { - let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?; + let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?; values.push(digit); } _ => { - return Err(self.tokenizer.err_msg()); + return Err(tokenizer.err_msg()); } } } - Ok(self.node(ParseToken::Union(values))) + Ok(Self::node(ParseToken::Union(values))) } - fn range_from(&mut self, num: isize) -> ParseResult { + fn range_from(num: isize, tokenizer: &mut TokenReader) -> ParseResult { debug!("#range_from"); - self.eat_token(); - self.eat_whitespace(); - match self.tokenizer.peek_token() { + Self::eat_token(tokenizer); + Self::eat_whitespace(tokenizer); + match tokenizer.peek_token() { Ok(Token::Key(_, _)) => { - self.range(num) + Self::range(num, tokenizer) } _ => { - Ok(self.node(ParseToken::Range(Some(num), None))) + Ok(Self::node(ParseToken::Range(Some(num), None))) } } } - fn range_to(&mut self) -> ParseResult { + fn range_to(tokenizer: &mut TokenReader) -> ParseResult { debug!("#range_to"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(Token::Key(pos, ref val)) => { - let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?; - Ok(self.node(ParseToken::Range(None, Some(digit)))) + let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?; + Ok(Self::node(ParseToken::Range(None, Some(digit)))) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn range(&mut self, num: isize) -> ParseResult { + fn range(num: isize, tokenizer: &mut TokenReader) -> ParseResult { debug!("#range"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(Token::Key(pos, ref val)) => { - let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?; - Ok(self.node(ParseToken::Range(Some(num), Some(digit)))) + let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?; + Ok(Self::node(ParseToken::Range(Some(num), Some(digit)))) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn filter(&mut self) -> ParseResult { + fn filter(tokenizer: &mut TokenReader) -> ParseResult { debug!("#filter"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(Token::OpenParenthesis(_)) => { - let ret = self.exprs()?; - self.eat_whitespace(); - self.close_token(ret, Token::CloseParenthesis(DUMMY)) + let ret = Self::exprs(tokenizer)?; + Self::eat_whitespace(tokenizer); + Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer) } Err(TokenError::Eof) => { - Ok(self.node(ParseToken::Eof)) + Ok(Self::node(ParseToken::Eof)) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn exprs(&mut self) -> ParseResult { - self.eat_whitespace(); + fn exprs(tokenizer: &mut TokenReader) -> ParseResult { + Self::eat_whitespace(tokenizer); debug!("#exprs"); - let node = match self.tokenizer.peek_token() { + let node = match tokenizer.peek_token() { Ok(Token::OpenParenthesis(_)) => { - self.eat_token(); + Self::eat_token(tokenizer); trace!("\t-exprs - open_parenthesis"); - let ret = self.exprs()?; - self.eat_whitespace(); - self.close_token(ret, Token::CloseParenthesis(DUMMY))? + let ret = Self::exprs(tokenizer)?; + Self::eat_whitespace(tokenizer); + Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)? } _ => { trace!("\t-exprs - else"); - self.expr()? + Self::expr(tokenizer)? } }; - self.eat_whitespace(); - self.condition_expr(node) + Self::eat_whitespace(tokenizer); + Self::condition_expr(node, tokenizer) } - fn condition_expr(&mut self, prev: Node) -> ParseResult { + fn condition_expr(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#condition_expr"); - match self.tokenizer.peek_token() { + match tokenizer.peek_token() { Ok(Token::And(_)) => { - self.eat_token(); + Self::eat_token(tokenizer); Ok(Node { token: ParseToken::Filter(FilterToken::And), left: Some(Box::new(prev)), - right: Some(Box::new(self.exprs()?)), + right: Some(Box::new(Self::exprs(tokenizer)?)), }) } Ok(Token::Or(_)) => { - self.eat_token(); + Self::eat_token(tokenizer); Ok(Node { token: ParseToken::Filter(FilterToken::Or), left: Some(Box::new(prev)), - right: Some(Box::new(self.exprs()?)), + right: Some(Box::new(Self::exprs(tokenizer)?)), }) } _ => { @@ -466,18 +462,18 @@ impl<'a> Parser<'a> { } } - fn expr(&mut self) -> ParseResult { + fn expr(tokenizer: &mut TokenReader) -> ParseResult { debug!("#expr"); - let has_prop_candidate = match self.tokenizer.peek_token() { + let has_prop_candidate = match tokenizer.peek_token() { Ok(Token::At(_)) => true, _ => false }; - let node = self.term()?; - self.eat_whitespace(); + let node = Self::term(tokenizer)?; + Self::eat_whitespace(tokenizer); - if match self.tokenizer.peek_token() { + if match tokenizer.peek_token() { Ok(Token::Equal(_)) | Ok(Token::NotEqual(_)) | Ok(Token::Little(_)) @@ -486,97 +482,97 @@ impl<'a> Parser<'a> { | Ok(Token::GreaterOrEqual(_)) => true, _ => false } { - self.op(node) + Self::op(node, tokenizer) } else if has_prop_candidate { Ok(node) } else { - return Err(self.tokenizer.err_msg()); + return Err(tokenizer.err_msg()); } } - fn term_num(&mut self) -> ParseResult { + fn term_num(tokenizer: &mut TokenReader) -> ParseResult { debug!("#term_num"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(Token::Key(pos, val)) => { - match self.tokenizer.peek_token() { + match tokenizer.peek_token() { Ok(Token::Dot(_)) => { - self.term_num_float(val.as_str()) + Self::term_num_float(val.as_str(), tokenizer) } _ => { - let number = utils::string_to_f64(&val, || self.tokenizer.err_msg_with_pos(pos))?; - Ok(self.node(ParseToken::Number(number))) + let number = utils::string_to_f64(&val, || tokenizer.err_msg_with_pos(pos))?; + Ok(Self::node(ParseToken::Number(number))) } } } Err(TokenError::Eof) => { - Ok(self.node(ParseToken::Eof)) + Ok(Self::node(ParseToken::Eof)) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn term_num_float(&mut self, mut num: &str) -> ParseResult { + fn term_num_float(mut num: &str, tokenizer: &mut TokenReader) -> ParseResult { debug!("#term_num_float"); - self.eat_token(); - match self.tokenizer.next_token() { + Self::eat_token(tokenizer); + match tokenizer.next_token() { Ok(Token::Key(pos, frac)) => { let mut f = String::new(); f.push_str(&mut num); f.push('.'); f.push_str(frac.as_str()); - let number = utils::string_to_f64(&f, || self.tokenizer.err_msg_with_pos(pos))?; - Ok(self.node(ParseToken::Number(number))) + let number = utils::string_to_f64(&f, || tokenizer.err_msg_with_pos(pos))?; + Ok(Self::node(ParseToken::Number(number))) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn term(&mut self) -> ParseResult { + fn term(tokenizer: &mut TokenReader) -> ParseResult { debug!("#term"); - match self.tokenizer.peek_token() { + match tokenizer.peek_token() { Ok(Token::At(_)) => { - self.eat_token(); - let node = self.node(ParseToken::Relative); + Self::eat_token(tokenizer); + let node = Self::node(ParseToken::Relative); - match self.tokenizer.peek_token() { + match tokenizer.peek_token() { Ok(Token::Whitespace(_, _)) => { - self.eat_whitespace(); + Self::eat_whitespace(tokenizer); Ok(node) } _ => { - self.paths(node) + Self::paths(node, tokenizer) } } } Ok(Token::Absolute(_)) => { - self.json_path() + Self::json_path(tokenizer) } Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => { - self.array_quota_value() + Self::array_quota_value(tokenizer) } Ok(Token::Key(_, k)) => { match k.chars().next() { Some(ch) => match ch { - '-' | '0'...'9' => self.term_num(), - _ => self.boolean() + '-' | '0'...'9' => Self::term_num(tokenizer), + _ => Self::boolean(tokenizer) } - _ => Err(self.tokenizer.err_msg()) + _ => Err(tokenizer.err_msg()) } } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } - fn op(&mut self, prev: Node) -> ParseResult { + fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult { debug!("#op"); - let token = match self.tokenizer.next_token() { + let token = match tokenizer.next_token() { Ok(Token::Equal(_)) => { ParseToken::Filter(FilterToken::Equal) } @@ -599,41 +595,41 @@ impl<'a> Parser<'a> { ParseToken::Eof } _ => { - return Err(self.tokenizer.err_msg()); + return Err(tokenizer.err_msg()); } }; - self.eat_whitespace(); + Self::eat_whitespace(tokenizer); Ok(Node { token, left: Some(Box::new(prev)), - right: Some(Box::new(self.term()?)), + right: Some(Box::new(Self::term(tokenizer)?)), }) } - fn eat_whitespace(&mut self) { - while let Ok(Token::Whitespace(_, _)) = self.tokenizer.peek_token() { - let _ = self.tokenizer.next_token(); + fn eat_whitespace(tokenizer: &mut TokenReader) { + while let Ok(Token::Whitespace(_, _)) = tokenizer.peek_token() { + let _ = tokenizer.next_token(); } } - fn eat_token(&mut self) { - let _ = self.tokenizer.next_token(); + fn eat_token(tokenizer: &mut TokenReader) { + let _ = tokenizer.next_token(); } - fn node(&mut self, token: ParseToken) -> Node { - Node { left: None, right: None, token: token } + fn node(token: ParseToken) -> Node { + Node { left: None, right: None, token } } - fn close_token(&mut self, ret: Node, token: Token) -> ParseResult { + fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult { debug!("#close_token"); - match self.tokenizer.next_token() { + match tokenizer.next_token() { Ok(ref t) if t.partial_eq(token) => { Ok(ret) } _ => { - Err(self.tokenizer.err_msg()) + Err(tokenizer.err_msg()) } } } diff --git a/src/parser/tokenizer.rs b/src/parser/tokenizer.rs index 9090a06..4a777f2 100644 --- a/src/parser/tokenizer.rs +++ b/src/parser/tokenizer.rs @@ -275,7 +275,7 @@ impl<'a> Tokenizer<'a> { } } -pub struct PreloadedTokenizer<'a> { +pub struct TokenReader<'a> { origin_input: &'a str, err: TokenError, err_pos: usize, @@ -283,7 +283,7 @@ pub struct PreloadedTokenizer<'a> { curr_pos: Option, } -impl<'a> PreloadedTokenizer<'a> { +impl<'a> TokenReader<'a> { pub fn new(input: &'a str) -> Self { let mut tokenizer = Tokenizer::new(input); let mut tokens = vec![]; @@ -293,7 +293,7 @@ impl<'a> PreloadedTokenizer<'a> { tokens.insert(0, (tokenizer.current_pos(), t)); } Err(e) => { - return PreloadedTokenizer { + return TokenReader { origin_input: input.clone(), err: e, err_pos: tokenizer.current_pos(), diff --git a/src/select/mod.rs b/src/select/mod.rs index 07c4334..3faa073 100644 --- a/src/select/mod.rs +++ b/src/select/mod.rs @@ -508,8 +508,7 @@ impl<'a> Selector<'a> { pub fn path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> { debug!("path : {}", path); - let mut parser = Parser::new(path); - self.node = Some(parser.compile().map_err(|e| JsonPathError::Path(e))?); + self.node = Some(Parser::compile(path).map_err(|e| JsonPathError::Path(e))?); Ok(self) } diff --git a/wasm/src/lib.rs b/wasm/src/lib.rs index ce1eefb..c184dc4 100644 --- a/wasm/src/lib.rs +++ b/wasm/src/lib.rs @@ -50,8 +50,7 @@ fn into_serde_json(js_value: &JsValue) -> Result #[wasm_bindgen] pub fn compile(path: &str) -> JsValue { - let mut parser = Parser::new(path); - let node = parser.compile(); + let node = Parser::compile(path); let cb = Closure::wrap(Box::new(move |js_value: JsValue| { let mut selector = _Selector::new(); @@ -85,8 +84,7 @@ pub fn selector(js_value: JsValue) -> JsValue { }; let cb = Closure::wrap(Box::new(move |path: String| { - let mut parser = Parser::new(path.as_str()); - match parser.compile() { + match Parser::compile(path.as_str()) { Ok(node) => { let mut selector = _Selector::new(); let _ = selector.compiled_path(node);