escaped quote notation

This commit is contained in:
freestrings 2019-06-17 15:42:46 +09:00
parent 909c851dcc
commit 51deec66d0
3 changed files with 69 additions and 11 deletions

View File

@ -4,7 +4,7 @@ pub mod parser;
#[cfg(test)] #[cfg(test)]
mod parser_tests { mod parser_tests {
use parser::parser::{ParseToken, Parser, NodeVisitor, FilterToken}; use parser::parser::{FilterToken, NodeVisitor, Parser, ParseToken};
struct NodeVisitorTestImpl<'a> { struct NodeVisitorTestImpl<'a> {
input: &'a str, input: &'a str,
@ -270,6 +270,20 @@ mod parser_tests {
ParseToken::ArrayEof ParseToken::ArrayEof
])); ]));
assert_eq!(run(r#"$['single\'quote']"#), Ok(vec![
ParseToken::Absolute,
ParseToken::Array,
ParseToken::Key("single'quote".to_string()),
ParseToken::ArrayEof
]));
assert_eq!(run(r#"$["single\"quote"]"#), Ok(vec![
ParseToken::Absolute,
ParseToken::Array,
ParseToken::Key(r#"single"quote"#.to_string()),
ParseToken::ArrayEof
]));
match run("$[") { match run("$[") {
Ok(_) => panic!(), Ok(_) => panic!(),
_ => {} _ => {}
@ -338,6 +352,10 @@ mod parser_tests {
mod tokenizer_tests { mod tokenizer_tests {
use parser::tokenizer::{Token, TokenError, Tokenizer, TokenReader}; use parser::tokenizer::{Token, TokenError, Tokenizer, TokenReader};
fn setup() {
let _ = env_logger::try_init();
}
fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) { fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
let mut tokenizer = Tokenizer::new(input); let mut tokenizer = Tokenizer::new(input);
let mut vec = vec![]; let mut vec = vec![];
@ -380,6 +398,8 @@ mod tokenizer_tests {
#[test] #[test]
fn token() { fn token() {
setup();
run("$.01.a", run("$.01.a",
( (
vec![ vec![
@ -534,5 +554,19 @@ mod tokenizer_tests {
Token::Split(2), Token::Split(2),
Token::CloseArray(3) Token::CloseArray(3)
], Some(TokenError::Eof))); ], Some(TokenError::Eof)));
run(r#"$['single\'quote']"#, (vec![
Token::Absolute(0),
Token::OpenArray(1),
Token::SingleQuoted(2, "single\'quote".to_string()),
Token::CloseArray(17)
], Some(TokenError::Eof)));
run(r#"$["double\"quote"]"#, (vec![
Token::Absolute(0),
Token::OpenArray(1),
Token::DoubleQuoted(2, "double\"quote".to_string()),
Token::CloseArray(17)
], Some(TokenError::Eof)));
} }
} }

View File

@ -44,8 +44,8 @@ const CH_PIPE: char = '|';
const CH_LITTLE: char = '<'; const CH_LITTLE: char = '<';
const CH_GREATER: char = '>'; const CH_GREATER: char = '>';
const CH_EXCLAMATION: char = '!'; const CH_EXCLAMATION: char = '!';
const CH_SINGLE_QUOTA: char = '\''; const CH_SINGLE_QUOTE: char = '\'';
const CH_DOUBLE_QUOTA: char = '"'; const CH_DOUBLE_QUOTE: char = '"';
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub enum TokenError { pub enum TokenError {
@ -147,20 +147,36 @@ pub struct Tokenizer<'a> {
impl<'a> Tokenizer<'a> { impl<'a> Tokenizer<'a> {
pub fn new(input: &'a str) -> Self { pub fn new(input: &'a str) -> Self {
trace!("input: {}", input);
Tokenizer { Tokenizer {
input: PathReader::new(input), input: PathReader::new(input),
} }
} }
fn single_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> { fn quote(&mut self, ch: char) -> Result<String, TokenError> {
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?; let (_, mut val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
self.input.next_char().map_err(to_token_error)?;
if let Some('\\') = val.chars().last() {
self.input.next_char().map_err(to_token_error)?;
let _ = val.pop();
let (_, mut val_remain) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
self.input.next_char().map_err(to_token_error)?;
val.push(ch);
val.push_str(val_remain.as_str());
} else {
self.input.next_char().map_err(to_token_error)?;
}
Ok(val)
}
fn single_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
let val = self.quote(ch)?;
Ok(Token::SingleQuoted(pos, val)) Ok(Token::SingleQuoted(pos, val))
} }
fn double_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> { fn double_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?; let val = self.quote(ch)?;
self.input.next_char().map_err(to_token_error)?;
Ok(Token::DoubleQuoted(pos, val)) Ok(Token::DoubleQuoted(pos, val))
} }
@ -259,8 +275,8 @@ impl<'a> Tokenizer<'a> {
Some(t) => Ok(t), Some(t) => Ok(t),
None => { None => {
match ch { match ch {
CH_SINGLE_QUOTA => self.single_quota(pos, ch), CH_SINGLE_QUOTE => self.single_quote(pos, ch),
CH_DOUBLE_QUOTA => self.double_quota(pos, ch), CH_DOUBLE_QUOTE => self.double_quote(pos, ch),
CH_EQUAL => self.equal(pos, ch), CH_EQUAL => self.equal(pos, ch),
CH_GREATER => self.greater(pos, ch), CH_GREATER => self.greater(pos, ch),
CH_LITTLE => self.little(pos, ch), CH_LITTLE => self.little(pos, ch),

View File

@ -356,4 +356,12 @@ fn empty_range() {
setup(); setup();
select_and_then_compare("$[:]", json!(["first", "second"]), json!(["first", "second"])); select_and_then_compare("$[:]", json!(["first", "second"]), json!(["first", "second"]));
}
#[test]
fn quote() {
setup();
select_and_then_compare(r#"$['single\'quote']"#, json!({"single'quote":"value"}), json!(["value"]));
select_and_then_compare(r#"$["double\"quote"]"#, json!({"double\"quote":"value"}), json!(["value"]));
} }