2019-06-19 11:34:47 +09:00
|
|
|
pub mod parser;
|
2019-03-03 00:33:27 +09:00
|
|
|
mod path_reader;
|
2019-06-02 22:03:35 +09:00
|
|
|
pub(crate) mod tokenizer;
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod parser_tests {
|
2019-06-19 11:34:47 +09:00
|
|
|
use parser::parser::{FilterToken, NodeVisitor, ParseToken, Parser};
|
2019-06-02 22:03:35 +09:00
|
|
|
|
|
|
|
struct NodeVisitorTestImpl<'a> {
|
|
|
|
input: &'a str,
|
|
|
|
stack: Vec<ParseToken>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> NodeVisitorTestImpl<'a> {
|
|
|
|
fn new(input: &'a str) -> Self {
|
2019-06-19 11:34:47 +09:00
|
|
|
NodeVisitorTestImpl {
|
|
|
|
input,
|
|
|
|
stack: Vec::new(),
|
|
|
|
}
|
2019-06-02 22:03:35 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
fn start(&mut self) -> Result<Vec<ParseToken>, String> {
|
2019-06-03 18:45:26 +09:00
|
|
|
let node = Parser::compile(self.input)?;
|
2019-06-02 22:03:35 +09:00
|
|
|
self.visit(&node);
|
|
|
|
Ok(self.stack.split_off(0))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> NodeVisitor for NodeVisitorTestImpl<'a> {
|
|
|
|
fn visit_token(&mut self, token: &ParseToken) {
|
|
|
|
self.stack.push(token.clone());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn setup() {
|
|
|
|
let _ = env_logger::try_init();
|
|
|
|
}
|
|
|
|
|
|
|
|
fn run(input: &str) -> Result<Vec<ParseToken>, String> {
|
|
|
|
let mut interpreter = NodeVisitorTestImpl::new(input);
|
|
|
|
interpreter.start()
|
|
|
|
}
|
|
|
|
|
2019-06-20 17:22:46 +09:00
|
|
|
#[test]
|
|
|
|
fn parse_error() {
|
|
|
|
setup();
|
|
|
|
|
|
|
|
fn invalid(path: &str) {
|
|
|
|
if let Err(_) = run(path) {
|
|
|
|
assert!(true);
|
|
|
|
} else {
|
|
|
|
assert!(false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
invalid("$[]");
|
|
|
|
invalid("$[a]");
|
|
|
|
invalid("$[?($.a)]");
|
|
|
|
invalid("$[?(@.a > @.b]");
|
|
|
|
invalid("$[?(@.a < @.b&&(@.c < @.d)]");
|
|
|
|
invalid("@.");
|
|
|
|
invalid("$..[?(a <= @.a)]"); // invalid term value
|
|
|
|
invalid("$['a', b]");
|
|
|
|
invalid("$[0, >=]");
|
|
|
|
invalid("$[a:]");
|
|
|
|
invalid("$[:a]");
|
|
|
|
invalid("$[::a]");
|
|
|
|
invalid("$[:>]");
|
|
|
|
invalid("$[1:>]");
|
|
|
|
invalid("$[1,,]");
|
|
|
|
invalid("$[?]");
|
|
|
|
invalid("$[?(1 = 1)]");
|
|
|
|
invalid("$[?(1 = >)]");
|
|
|
|
}
|
|
|
|
|
2019-06-02 22:03:35 +09:00
|
|
|
#[test]
|
|
|
|
fn parse_path() {
|
|
|
|
setup();
|
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
assert_eq!(
|
|
|
|
run("$.aa"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("aa".to_owned())
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.00.a"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("00".to_owned()),
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned())
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.00.韓창.seok"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("00".to_owned()),
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("韓창".to_owned()),
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("seok".to_owned())
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.*"),
|
|
|
|
Ok(vec![ParseToken::Absolute, ParseToken::In, ParseToken::All])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$..*"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Leaves,
|
|
|
|
ParseToken::All
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$..[0]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Leaves,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Number(0.0),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
|
|
|
|
match run("$.") {
|
|
|
|
Ok(_) => panic!(),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
match run("$..") {
|
|
|
|
Ok(_) => panic!(),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
match run("$. a") {
|
|
|
|
Ok(_) => panic!(),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn parse_array_sytax() {
|
|
|
|
setup();
|
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
assert_eq!(
|
|
|
|
run("$.book[?(@.isbn)]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("book".to_string()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Relative,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("isbn".to_string()),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
|
|
|
|
//
|
|
|
|
// Array도 컨텍스트 In으로 간주 할거라서 중첩되면 하나만
|
|
|
|
//
|
2019-06-19 11:34:47 +09:00
|
|
|
assert_eq!(
|
|
|
|
run("$.[*]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::All,
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[*]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::All,
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[*].가"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::All,
|
|
|
|
ParseToken::ArrayEof,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("가".to_owned())
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[0][1]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Number(0_f64),
|
|
|
|
ParseToken::ArrayEof,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Number(1_f64),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[1,2]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Union(vec![1, 2]),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[10:]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Range(Some(10), None, None),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[:11]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Range(None, Some(11), None),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[-12:13]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Range(Some(-12), Some(13), None),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run(r#"$[0:3:2]"#),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Range(Some(0), Some(3), Some(2)),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run(r#"$[:3:2]"#),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Range(None, Some(3), Some(2)),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run(r#"$[:]"#),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Range(None, None, None),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run(r#"$[::]"#),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Range(None, None, None),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run(r#"$[::2]"#),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Range(None, None, Some(2)),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run(r#"$["a", 'b']"#),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Keys(vec!["a".to_string(), "b".to_string()]),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[?(1>2)]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Number(1_f64),
|
|
|
|
ParseToken::Number(2_f64),
|
|
|
|
ParseToken::Filter(FilterToken::Greater),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[?($.b>3)]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("b".to_owned()),
|
|
|
|
ParseToken::Number(3_f64),
|
|
|
|
ParseToken::Filter(FilterToken::Greater),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$[?($.c>@.d && 1==2)]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("c".to_owned()),
|
|
|
|
ParseToken::Relative,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("d".to_owned()),
|
|
|
|
ParseToken::Filter(FilterToken::Greater),
|
|
|
|
ParseToken::Number(1_f64),
|
|
|
|
ParseToken::Number(2_f64),
|
|
|
|
ParseToken::Filter(FilterToken::Equal),
|
|
|
|
ParseToken::Filter(FilterToken::And),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$[?($.c>@.d&&(1==2||3>=4))]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("c".to_owned()),
|
|
|
|
ParseToken::Relative,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("d".to_owned()),
|
|
|
|
ParseToken::Filter(FilterToken::Greater),
|
|
|
|
ParseToken::Number(1_f64),
|
|
|
|
ParseToken::Number(2_f64),
|
|
|
|
ParseToken::Filter(FilterToken::Equal),
|
|
|
|
ParseToken::Number(3_f64),
|
|
|
|
ParseToken::Number(4_f64),
|
|
|
|
ParseToken::Filter(FilterToken::GreaterOrEqual),
|
|
|
|
ParseToken::Filter(FilterToken::Or),
|
|
|
|
ParseToken::Filter(FilterToken::And),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$[?(@.a<@.b)]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Relative,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_owned()),
|
|
|
|
ParseToken::Relative,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("b".to_owned()),
|
|
|
|
ParseToken::Filter(FilterToken::Little),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$[*][*][*]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::All,
|
|
|
|
ParseToken::ArrayEof,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::All,
|
|
|
|
ParseToken::ArrayEof,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::All,
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$['a']['bb']"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Key("a".to_string()),
|
|
|
|
ParseToken::ArrayEof,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Key("bb".to_string()),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run("$.a[?(@.e==true)]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("a".to_string()),
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Relative,
|
|
|
|
ParseToken::In,
|
|
|
|
ParseToken::Key("e".to_string()),
|
|
|
|
ParseToken::Bool(true),
|
|
|
|
ParseToken::Filter(FilterToken::Equal),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
2019-06-20 17:22:46 +09:00
|
|
|
assert_eq!(
|
|
|
|
run(r#"$[?(@ > 1)]"#),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Relative,
|
|
|
|
ParseToken::Number(1_f64),
|
|
|
|
ParseToken::Filter(FilterToken::Greater),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
assert_eq!(
|
|
|
|
run("$[:]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Range(None, None, None),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run(r#"$['single\'quote']"#),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Key("single'quote".to_string()),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
run(r#"$["single\"quote"]"#),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Key(r#"single"quote"#.to_string()),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn parse_array_float() {
|
|
|
|
setup();
|
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
assert_eq!(
|
|
|
|
run("$[?(1.1<2.1)]"),
|
|
|
|
Ok(vec![
|
|
|
|
ParseToken::Absolute,
|
|
|
|
ParseToken::Array,
|
|
|
|
ParseToken::Number(1.1),
|
|
|
|
ParseToken::Number(2.1),
|
|
|
|
ParseToken::Filter(FilterToken::Little),
|
|
|
|
ParseToken::ArrayEof
|
|
|
|
])
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
|
|
|
|
match run("$[1.1]") {
|
|
|
|
Ok(_) => panic!(),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
match run("$[?(1.1<.2)]") {
|
|
|
|
Ok(_) => panic!(),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
match run("$[?(1.1<2.)]") {
|
|
|
|
Ok(_) => panic!(),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
match run("$[?(1.1<2.a)]") {
|
|
|
|
Ok(_) => panic!(),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tokenizer_tests {
|
2019-06-19 11:34:47 +09:00
|
|
|
use parser::tokenizer::{Token, TokenError, TokenReader, Tokenizer};
|
2019-06-02 22:03:35 +09:00
|
|
|
|
2019-06-17 15:42:46 +09:00
|
|
|
fn setup() {
|
|
|
|
let _ = env_logger::try_init();
|
|
|
|
}
|
|
|
|
|
2019-06-02 22:03:35 +09:00
|
|
|
fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
|
|
|
|
let mut tokenizer = Tokenizer::new(input);
|
|
|
|
let mut vec = vec![];
|
|
|
|
loop {
|
|
|
|
match tokenizer.next_token() {
|
|
|
|
Ok(t) => vec.push(t),
|
|
|
|
Err(e) => return (vec, Some(e)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn run(input: &str, expected: (Vec<Token>, Option<TokenError>)) {
|
|
|
|
let (vec, err) = collect_token(input.clone());
|
|
|
|
assert_eq!((vec, err), expected, "\"{}\"", input);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn peek() {
|
2019-06-03 18:45:26 +09:00
|
|
|
let mut tokenizer = TokenReader::new("$.a");
|
2019-06-02 22:03:35 +09:00
|
|
|
match tokenizer.next_token() {
|
|
|
|
Ok(t) => assert_eq!(Token::Absolute(0), t),
|
2019-06-19 11:34:47 +09:00
|
|
|
_ => panic!(),
|
2019-06-02 22:03:35 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
match tokenizer.peek_token() {
|
|
|
|
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
2019-06-19 11:34:47 +09:00
|
|
|
_ => panic!(),
|
2019-06-02 22:03:35 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
match tokenizer.peek_token() {
|
|
|
|
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
2019-06-19 11:34:47 +09:00
|
|
|
_ => panic!(),
|
2019-06-02 22:03:35 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
match tokenizer.next_token() {
|
|
|
|
Ok(t) => assert_eq!(Token::Dot(1), t),
|
2019-06-19 11:34:47 +09:00
|
|
|
_ => panic!(),
|
2019-06-02 22:03:35 +09:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn token() {
|
2019-06-17 15:42:46 +09:00
|
|
|
setup();
|
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
run(
|
|
|
|
"$.01.a",
|
2019-06-02 22:03:35 +09:00
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Absolute(0),
|
|
|
|
Token::Dot(1),
|
|
|
|
Token::Key(2, "01".to_string()),
|
|
|
|
Token::Dot(4),
|
2019-06-19 11:34:47 +09:00
|
|
|
Token::Key(5, "a".to_string()),
|
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
|
|
|
run(
|
|
|
|
"$. []",
|
2019-06-02 22:03:35 +09:00
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Absolute(0),
|
|
|
|
Token::Dot(1),
|
|
|
|
Token::Whitespace(2, 2),
|
|
|
|
Token::OpenArray(5),
|
2019-06-19 11:34:47 +09:00
|
|
|
Token::CloseArray(6),
|
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
|
|
|
run(
|
|
|
|
"$..",
|
2019-06-02 22:03:35 +09:00
|
|
|
(
|
2019-06-19 11:34:47 +09:00
|
|
|
vec![Token::Absolute(0), Token::Dot(1), Token::Dot(2)],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
run(
|
|
|
|
"$..ab",
|
2019-06-02 22:03:35 +09:00
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Absolute(0),
|
|
|
|
Token::Dot(1),
|
|
|
|
Token::Dot(2),
|
2019-06-19 11:34:47 +09:00
|
|
|
Token::Key(3, "ab".to_string()),
|
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
|
|
|
run(
|
|
|
|
"$..가 [",
|
2019-06-02 22:03:35 +09:00
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Absolute(0),
|
|
|
|
Token::Dot(1),
|
|
|
|
Token::Dot(2),
|
|
|
|
Token::Key(3, "가".to_string()),
|
|
|
|
Token::Whitespace(6, 0),
|
|
|
|
Token::OpenArray(7),
|
2019-06-19 11:34:47 +09:00
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
run(
|
|
|
|
"[-1, 2 ]",
|
2019-06-02 22:03:35 +09:00
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::OpenArray(0),
|
|
|
|
Token::Key(1, "-1".to_string()),
|
|
|
|
Token::Comma(3),
|
|
|
|
Token::Whitespace(4, 0),
|
|
|
|
Token::Key(5, "2".to_string()),
|
|
|
|
Token::Whitespace(6, 0),
|
|
|
|
Token::CloseArray(7),
|
2019-06-19 11:34:47 +09:00
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
run(
|
|
|
|
"[ 1 2 , 3 \"abc\" : -10 ]",
|
2019-06-02 22:03:35 +09:00
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::OpenArray(0),
|
|
|
|
Token::Whitespace(1, 0),
|
|
|
|
Token::Key(2, "1".to_string()),
|
|
|
|
Token::Whitespace(3, 0),
|
|
|
|
Token::Key(4, "2".to_string()),
|
|
|
|
Token::Whitespace(5, 0),
|
|
|
|
Token::Comma(6),
|
|
|
|
Token::Whitespace(7, 0),
|
|
|
|
Token::Key(8, "3".to_string()),
|
|
|
|
Token::Whitespace(9, 0),
|
|
|
|
Token::DoubleQuoted(10, "abc".to_string()),
|
|
|
|
Token::Whitespace(15, 0),
|
|
|
|
Token::Split(16),
|
|
|
|
Token::Whitespace(17, 0),
|
|
|
|
Token::Key(18, "-10".to_string()),
|
|
|
|
Token::Whitespace(21, 0),
|
|
|
|
Token::CloseArray(22),
|
2019-06-19 11:34:47 +09:00
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
run(
|
|
|
|
"?(@.a가 <41.01)",
|
2019-06-02 22:03:35 +09:00
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Question(0),
|
|
|
|
Token::OpenParenthesis(1),
|
|
|
|
Token::At(2),
|
|
|
|
Token::Dot(3),
|
|
|
|
Token::Key(4, "a가".to_string()),
|
|
|
|
Token::Whitespace(8, 0),
|
|
|
|
Token::Little(9),
|
|
|
|
Token::Key(10, "41".to_string()),
|
|
|
|
Token::Dot(12),
|
|
|
|
Token::Key(13, "01".to_string()),
|
|
|
|
Token::CloseParenthesis(15),
|
2019-06-19 11:34:47 +09:00
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
run(
|
|
|
|
"?(@.a <4a.01)",
|
2019-06-02 22:03:35 +09:00
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Question(0),
|
|
|
|
Token::OpenParenthesis(1),
|
|
|
|
Token::At(2),
|
|
|
|
Token::Dot(3),
|
|
|
|
Token::Key(4, "a".to_string()),
|
|
|
|
Token::Whitespace(5, 0),
|
|
|
|
Token::Little(6),
|
|
|
|
Token::Key(7, "4a".to_string()),
|
|
|
|
Token::Dot(9),
|
|
|
|
Token::Key(10, "01".to_string()),
|
|
|
|
Token::CloseParenthesis(12),
|
2019-06-19 11:34:47 +09:00
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
|
|
|
run(
|
|
|
|
"?($.c>@.d)",
|
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Question(0),
|
|
|
|
Token::OpenParenthesis(1),
|
|
|
|
Token::Absolute(2),
|
|
|
|
Token::Dot(3),
|
|
|
|
Token::Key(4, "c".to_string()),
|
|
|
|
Token::Greater(5),
|
|
|
|
Token::At(6),
|
|
|
|
Token::Dot(7),
|
|
|
|
Token::Key(8, "d".to_string()),
|
|
|
|
Token::CloseParenthesis(9),
|
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
|
|
|
run(
|
|
|
|
"$[:]",
|
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Absolute(0),
|
|
|
|
Token::OpenArray(1),
|
|
|
|
Token::Split(2),
|
|
|
|
Token::CloseArray(3),
|
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
|
|
|
run(
|
|
|
|
r#"$['single\'quote']"#,
|
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Absolute(0),
|
|
|
|
Token::OpenArray(1),
|
|
|
|
Token::SingleQuoted(2, "single\'quote".to_string()),
|
|
|
|
Token::CloseArray(17),
|
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
2019-06-20 17:22:46 +09:00
|
|
|
run(
|
|
|
|
r#"$['single\'1','single\'2']"#,
|
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Absolute(0),
|
|
|
|
Token::OpenArray(1),
|
|
|
|
Token::SingleQuoted(2, "single\'1".to_string()),
|
|
|
|
Token::Comma(13),
|
|
|
|
Token::SingleQuoted(14, "single\'2".to_string()),
|
|
|
|
Token::CloseArray(25),
|
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
2019-06-19 11:34:47 +09:00
|
|
|
run(
|
|
|
|
r#"$["double\"quote"]"#,
|
|
|
|
(
|
|
|
|
vec![
|
|
|
|
Token::Absolute(0),
|
|
|
|
Token::OpenArray(1),
|
|
|
|
Token::DoubleQuoted(2, "double\"quote".to_string()),
|
|
|
|
Token::CloseArray(17),
|
|
|
|
],
|
|
|
|
Some(TokenError::Eof),
|
|
|
|
),
|
|
|
|
);
|
2019-06-02 22:03:35 +09:00
|
|
|
}
|
2019-06-19 11:34:47 +09:00
|
|
|
}
|