mirror of
https://github.com/fluencelabs/jsonpath
synced 2025-04-25 09:22:19 +00:00
escaped quote notation
This commit is contained in:
parent
909c851dcc
commit
51deec66d0
@ -4,7 +4,7 @@ pub mod parser;
|
||||
|
||||
#[cfg(test)]
|
||||
mod parser_tests {
|
||||
use parser::parser::{ParseToken, Parser, NodeVisitor, FilterToken};
|
||||
use parser::parser::{FilterToken, NodeVisitor, Parser, ParseToken};
|
||||
|
||||
struct NodeVisitorTestImpl<'a> {
|
||||
input: &'a str,
|
||||
@ -270,6 +270,20 @@ mod parser_tests {
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run(r#"$['single\'quote']"#), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("single'quote".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run(r#"$["single\"quote"]"#), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key(r#"single"quote"#.to_string()),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
match run("$[") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
@ -338,6 +352,10 @@ mod parser_tests {
|
||||
mod tokenizer_tests {
|
||||
use parser::tokenizer::{Token, TokenError, Tokenizer, TokenReader};
|
||||
|
||||
fn setup() {
|
||||
let _ = env_logger::try_init();
|
||||
}
|
||||
|
||||
fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
|
||||
let mut tokenizer = Tokenizer::new(input);
|
||||
let mut vec = vec![];
|
||||
@ -380,6 +398,8 @@ mod tokenizer_tests {
|
||||
|
||||
#[test]
|
||||
fn token() {
|
||||
setup();
|
||||
|
||||
run("$.01.a",
|
||||
(
|
||||
vec![
|
||||
@ -534,5 +554,19 @@ mod tokenizer_tests {
|
||||
Token::Split(2),
|
||||
Token::CloseArray(3)
|
||||
], Some(TokenError::Eof)));
|
||||
|
||||
run(r#"$['single\'quote']"#, (vec![
|
||||
Token::Absolute(0),
|
||||
Token::OpenArray(1),
|
||||
Token::SingleQuoted(2, "single\'quote".to_string()),
|
||||
Token::CloseArray(17)
|
||||
], Some(TokenError::Eof)));
|
||||
|
||||
run(r#"$["double\"quote"]"#, (vec![
|
||||
Token::Absolute(0),
|
||||
Token::OpenArray(1),
|
||||
Token::DoubleQuoted(2, "double\"quote".to_string()),
|
||||
Token::CloseArray(17)
|
||||
], Some(TokenError::Eof)));
|
||||
}
|
||||
}
|
@ -44,8 +44,8 @@ const CH_PIPE: char = '|';
|
||||
const CH_LITTLE: char = '<';
|
||||
const CH_GREATER: char = '>';
|
||||
const CH_EXCLAMATION: char = '!';
|
||||
const CH_SINGLE_QUOTA: char = '\'';
|
||||
const CH_DOUBLE_QUOTA: char = '"';
|
||||
const CH_SINGLE_QUOTE: char = '\'';
|
||||
const CH_DOUBLE_QUOTE: char = '"';
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum TokenError {
|
||||
@ -147,20 +147,36 @@ pub struct Tokenizer<'a> {
|
||||
|
||||
impl<'a> Tokenizer<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
trace!("input: {}", input);
|
||||
Tokenizer {
|
||||
input: PathReader::new(input),
|
||||
}
|
||||
}
|
||||
|
||||
fn single_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
fn quote(&mut self, ch: char) -> Result<String, TokenError> {
|
||||
let (_, mut val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
|
||||
if let Some('\\') = val.chars().last() {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
let _ = val.pop();
|
||||
let (_, mut val_remain) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
val.push(ch);
|
||||
val.push_str(val_remain.as_str());
|
||||
} else {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
}
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
|
||||
fn single_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let val = self.quote(ch)?;
|
||||
Ok(Token::SingleQuoted(pos, val))
|
||||
}
|
||||
|
||||
fn double_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
fn double_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let val = self.quote(ch)?;
|
||||
Ok(Token::DoubleQuoted(pos, val))
|
||||
}
|
||||
|
||||
@ -259,8 +275,8 @@ impl<'a> Tokenizer<'a> {
|
||||
Some(t) => Ok(t),
|
||||
None => {
|
||||
match ch {
|
||||
CH_SINGLE_QUOTA => self.single_quota(pos, ch),
|
||||
CH_DOUBLE_QUOTA => self.double_quota(pos, ch),
|
||||
CH_SINGLE_QUOTE => self.single_quote(pos, ch),
|
||||
CH_DOUBLE_QUOTE => self.double_quote(pos, ch),
|
||||
CH_EQUAL => self.equal(pos, ch),
|
||||
CH_GREATER => self.greater(pos, ch),
|
||||
CH_LITTLE => self.little(pos, ch),
|
||||
|
@ -356,4 +356,12 @@ fn empty_range() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$[:]", json!(["first", "second"]), json!(["first", "second"]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn quote() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(r#"$['single\'quote']"#, json!({"single'quote":"value"}), json!(["value"]));
|
||||
select_and_then_compare(r#"$["double\"quote"]"#, json!({"double\"quote":"value"}), json!(["value"]));
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user