mirror of
https://github.com/fluencelabs/jsonpath
synced 2025-04-25 09:22:19 +00:00
array 인자
This commit is contained in:
parent
17a4f3b479
commit
d6c35de863
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,4 +1,5 @@
|
|||||||
.idea/*
|
.idea/*
|
||||||
|
.vscode
|
||||||
!.idea/runConfigurations/
|
!.idea/runConfigurations/
|
||||||
/target/
|
/target/
|
||||||
Cargo.lock
|
Cargo.lock
|
@ -1,7 +1,7 @@
|
|||||||
<component name="ProjectRunConfigurationManager">
|
<component name="ProjectRunConfigurationManager">
|
||||||
<configuration default="false" name="filter_filter - trace" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
<configuration default="false" name="filter_return_type - trace" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
<option name="channel" value="DEFAULT" />
|
<option name="channel" value="DEFAULT" />
|
||||||
<option name="command" value="test --package rs-jsonpath --lib jsonpath::json_filter::tests::filter -- --exact" />
|
<option name="command" value="test --package rs-jsonpath --lib jsonpath::json_filter::tests::return_type -- --exact" />
|
||||||
<option name="allFeatures" value="false" />
|
<option name="allFeatures" value="false" />
|
||||||
<option name="nocapture" value="true" />
|
<option name="nocapture" value="true" />
|
||||||
<option name="backtrace" value="NO" />
|
<option name="backtrace" value="NO" />
|
@ -44,8 +44,7 @@
|
|||||||
},
|
},
|
||||||
"friends": [
|
"friends": [
|
||||||
{
|
{
|
||||||
"id": 0,
|
"id": 0
|
||||||
"name": "Millicent Norman"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -2,4 +2,3 @@ mod path_reader;
|
|||||||
mod tokenizer;
|
mod tokenizer;
|
||||||
mod parser;
|
mod parser;
|
||||||
mod json_filter;
|
mod json_filter;
|
||||||
mod utils;
|
|
@ -5,12 +5,35 @@ use super::tokenizer::{
|
|||||||
PreloadedTokenizer,
|
PreloadedTokenizer,
|
||||||
TokenError,
|
TokenError,
|
||||||
};
|
};
|
||||||
use super::utils;
|
|
||||||
|
|
||||||
const DUMMY: usize = 0;
|
const DUMMY: usize = 0;
|
||||||
|
|
||||||
type Result<T> = result::Result<T, String>;
|
type Result<T> = result::Result<T, String>;
|
||||||
|
|
||||||
|
mod utils {
|
||||||
|
use std::result;
|
||||||
|
|
||||||
|
pub fn vec_to_int<F>(vec: &Vec<char>, msg_handler: F) -> result::Result<isize, String>
|
||||||
|
where F: Fn() -> String {
|
||||||
|
match vec.iter().map(|c| *c).collect::<String>().as_str().parse::<isize>() {
|
||||||
|
Ok(n) => Ok(n),
|
||||||
|
_ => Err(msg_handler())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn vec_to_float<F>(vec: &Vec<char>, msg_handler: F) -> result::Result<f64, String>
|
||||||
|
where F: Fn() -> String {
|
||||||
|
match vec.iter().map(|c| *c).collect::<String>().as_str().parse::<f64>() {
|
||||||
|
Ok(n) => Ok(n),
|
||||||
|
_ => Err(msg_handler())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn vec_to_string(vec: &Vec<char>) -> String {
|
||||||
|
vec.iter().map(|c| *c).collect::<String>()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
pub enum ParseToken {
|
pub enum ParseToken {
|
||||||
// '$'
|
// '$'
|
||||||
@ -649,9 +672,9 @@ pub trait NodeVisitor {
|
|||||||
}
|
}
|
||||||
ParseToken::Filter(_) => {
|
ParseToken::Filter(_) => {
|
||||||
node.left.map(|n| self.visit(*n));
|
node.left.map(|n| self.visit(*n));
|
||||||
self.clean_filter_context();
|
self.end_term();
|
||||||
node.right.map(|n| self.visit(*n));
|
node.right.map(|n| self.visit(*n));
|
||||||
self.clean_filter_context();
|
self.end_term();
|
||||||
self.visit_token(node.token);
|
self.visit_token(node.token);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
@ -659,7 +682,7 @@ pub trait NodeVisitor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_token(&mut self, token: ParseToken);
|
fn visit_token(&mut self, token: ParseToken);
|
||||||
fn clean_filter_context(&mut self) {}
|
fn end_term(&mut self) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -291,27 +291,21 @@ impl<'a> PreloadedTokenizer<'a> {
|
|||||||
pub fn new(input: &'a str) -> Self {
|
pub fn new(input: &'a str) -> Self {
|
||||||
let mut tokenizer = Tokenizer::new(input);
|
let mut tokenizer = Tokenizer::new(input);
|
||||||
let mut tokens = vec![];
|
let mut tokens = vec![];
|
||||||
let mut err = TokenError::Eof;
|
|
||||||
let mut err_pos = 0;
|
|
||||||
loop {
|
loop {
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(t) => {
|
Ok(t) => {
|
||||||
tokens.insert(0, (tokenizer.current_pos(), t));
|
tokens.insert(0, (tokenizer.current_pos(), t));
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
err = e;
|
return PreloadedTokenizer {
|
||||||
err_pos = tokenizer.current_pos();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
PreloadedTokenizer {
|
|
||||||
origin_input: input.clone(),
|
origin_input: input.clone(),
|
||||||
err,
|
err: e,
|
||||||
err_pos,
|
err_pos: tokenizer.current_pos(),
|
||||||
tokens,
|
tokens,
|
||||||
curr_pos: None,
|
curr_pos: None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,21 +0,0 @@
|
|||||||
use std::result;
|
|
||||||
|
|
||||||
pub fn vec_to_int<F>(vec: &Vec<char>, msg_handler: F) -> result::Result<isize, String>
|
|
||||||
where F: Fn() -> String {
|
|
||||||
match vec.iter().map(|c| *c).collect::<String>().as_str().parse::<isize>() {
|
|
||||||
Ok(n) => Ok(n),
|
|
||||||
_ => Err(msg_handler())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn vec_to_float<F>(vec: &Vec<char>, msg_handler: F) -> result::Result<f64, String>
|
|
||||||
where F: Fn() -> String {
|
|
||||||
match vec.iter().map(|c| *c).collect::<String>().as_str().parse::<f64>() {
|
|
||||||
Ok(n) => Ok(n),
|
|
||||||
_ => Err(msg_handler())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn vec_to_string(vec: &Vec<char>) -> String {
|
|
||||||
vec.iter().map(|c| *c).collect::<String>()
|
|
||||||
}
|
|
Loading…
x
Reference in New Issue
Block a user