mirror of
https://github.com/fluencelabs/jsonpath
synced 2025-07-11 05:31:38 +00:00
Compare commits
16 Commits
Author | SHA1 | Date | |
---|---|---|---|
488e0b400f | |||
fff0e869cb | |||
6a270c9456 | |||
ebd49c2205 | |||
2537469f03 | |||
2e0f78f017 | |||
5d36a0cf15 | |||
a72a13117e | |||
964e0c00f5 | |||
de97e2f95a | |||
74666d264e | |||
51deec66d0 | |||
909c851dcc | |||
b41b9f3aa6 | |||
1a5e8cc025 | |||
5abbfba254 |
@ -1,7 +1,7 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="selector" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<configuration default="false" name="mutable" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test selector """ />
|
||||
<option name="command" value="test --package jsonpath_lib --test mutable """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
2
.idea/runConfigurations/parser.xml
generated
2
.idea/runConfigurations/parser.xml
generated
@ -1,7 +1,7 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="parser" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test parser """ />
|
||||
<option name="command" value="test --package jsonpath_lib --lib parser::parser_tests" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
|
@ -1,7 +1,7 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="serde" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<configuration default="false" name="readme" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test serde """ />
|
||||
<option name="command" value="test --package jsonpath_lib --test readme """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
2
.idea/runConfigurations/tokenizer.xml
generated
2
.idea/runConfigurations/tokenizer.xml
generated
@ -1,7 +1,7 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="tokenizer" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test tokenizer """ />
|
||||
<option name="command" value="test --package jsonpath_lib --lib parser::tokenizer_tests" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "jsonpath_lib"
|
||||
version = "0.2.0"
|
||||
version = "0.2.2"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
|
||||
description = "It is JsonPath engine written in Rust. it provide a similar API interface in Webassembly and Javascript also. - Webassembly Demo: https://freestrings.github.io/jsonpath"
|
||||
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
38
docs/bench/bootstrap.js
vendored
38
docs/bench/bootstrap.js
vendored
@ -58,23 +58,26 @@
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_error_af8a3e3880eae1c8": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_af8a3e3880eae1c8"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_call_88d2a6153573084e": function(p0i32,p1i32,p2i32,p3i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_88d2a6153573084e"](p0i32,p1i32,p2i32,p3i32);
|
||||
/******/ "__wbg_error_8015049cb5adfca2": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_8015049cb5adfca2"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_call_972de3aa550c37b2": function(p0i32,p1i32,p2i32,p3i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_972de3aa550c37b2"](p0i32,p1i32,p2i32,p3i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_is_string": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
@ -82,9 +85,6 @@
|
||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||
/******/ },
|
||||
@ -94,11 +94,11 @@
|
||||
/******/ "__wbindgen_rethrow": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper22": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper22"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper18": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper18"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper24": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper24"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper20": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper20"](p0i32,p1i32,p2i32);
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ };
|
||||
@ -198,7 +198,7 @@
|
||||
/******/ promises.push(installedWasmModuleData);
|
||||
/******/ else {
|
||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"7a2fe8020c3403dd4ce6"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"d60993d3a441db221b47"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var promise;
|
||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||
|
BIN
docs/bench/d60993d3a441db221b47.module.wasm
Normal file
BIN
docs/bench/d60993d3a441db221b47.module.wasm
Normal file
Binary file not shown.
38
docs/bootstrap.js
vendored
38
docs/bootstrap.js
vendored
@ -58,23 +58,26 @@
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_error_af8a3e3880eae1c8": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_af8a3e3880eae1c8"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_call_88d2a6153573084e": function(p0i32,p1i32,p2i32,p3i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_88d2a6153573084e"](p0i32,p1i32,p2i32,p3i32);
|
||||
/******/ "__wbg_error_8015049cb5adfca2": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_8015049cb5adfca2"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_call_972de3aa550c37b2": function(p0i32,p1i32,p2i32,p3i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_972de3aa550c37b2"](p0i32,p1i32,p2i32,p3i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_is_string": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
@ -82,9 +85,6 @@
|
||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||
/******/ },
|
||||
@ -94,11 +94,11 @@
|
||||
/******/ "__wbindgen_rethrow": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper22": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper22"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper18": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper18"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper24": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper24"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper20": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper20"](p0i32,p1i32,p2i32);
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ };
|
||||
@ -198,7 +198,7 @@
|
||||
/******/ promises.push(installedWasmModuleData);
|
||||
/******/ else {
|
||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"7a2fe8020c3403dd4ce6"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"d60993d3a441db221b47"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var promise;
|
||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||
|
BIN
docs/d60993d3a441db221b47.module.wasm
Normal file
BIN
docs/d60993d3a441db221b47.module.wasm
Normal file
Binary file not shown.
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "jsonpath4nodejs"
|
||||
version = "0.2.0"
|
||||
version = "0.2.2"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
description = "jsonpath_lib bindings for nodejs"
|
||||
keywords = ["library", "jsonpath", "json", "nodejs"]
|
||||
@ -14,7 +14,7 @@ exclude = ["artifacts.json", "index.node"]
|
||||
neon-build = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
jsonpath_lib = "0.2.0"
|
||||
jsonpath_lib = "0.2.2"
|
||||
#jsonpath_lib = { path = "../../" }
|
||||
neon = "0.2.0"
|
||||
neon-serde = "0.1.1"
|
||||
|
@ -18,7 +18,7 @@ fn select(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
|
||||
match jsonpath::select(&json, path.as_str()) {
|
||||
Ok(value) => Ok(neon_serde::to_value(&mut ctx, &value)?),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
|
||||
@ -27,7 +27,7 @@ fn select_str(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
match jsonpath::select_as_str(&json_val, path.as_str()) {
|
||||
Ok(value) => Ok(JsString::new(&mut ctx, &value).upcast()),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
|
||||
@ -35,15 +35,19 @@ fn delete(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsString>(0)?.value();
|
||||
let json: Value = match serde_json::from_str(&json_val) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
};
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
match jsonpath::delete(json, &path) {
|
||||
Ok(value) => Ok(JsString::new(&mut ctx, match serde_json::to_string(&value) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
}).upcast()),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
Ok(value) => Ok(JsString::new(
|
||||
&mut ctx,
|
||||
match serde_json::to_string(&value) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
},
|
||||
)
|
||||
.upcast()),
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
|
||||
@ -51,36 +55,43 @@ fn replace_with(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsString>(0)?.value();
|
||||
let json: Value = match serde_json::from_str(&json_val) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
};
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
let fun = ctx.argument::<JsFunction>(2)?;
|
||||
match jsonpath::replace_with(json, &path, &mut |v| {
|
||||
let json_str = JsString::new(&mut ctx, match serde_json::to_string(v) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
});
|
||||
let json_str = JsString::new(
|
||||
&mut ctx,
|
||||
match serde_json::to_string(v) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
},
|
||||
);
|
||||
|
||||
let null = ctx.null();
|
||||
let args = vec![ctx.string(json_str.value())];
|
||||
let result = match fun.call(&mut ctx, null, args) {
|
||||
Ok(result) => result,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
Err(e) => panic!("{:?}", e),
|
||||
};
|
||||
let json_str = match result.downcast::<JsString>() {
|
||||
Ok(v) => v.value(),
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
};
|
||||
match serde_json::from_str(&json_str) {
|
||||
Ok(v) => v,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
}
|
||||
}) {
|
||||
Ok(value) => Ok(JsString::new(&mut ctx, match serde_json::to_string(&value) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
}).upcast()),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
Ok(value) => Ok(JsString::new(
|
||||
&mut ctx,
|
||||
match serde_json::to_string(&value) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
},
|
||||
)
|
||||
.upcast()),
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,7 +104,7 @@ impl SelectorCls {
|
||||
fn path(&mut self, path: &str) {
|
||||
let node = match Parser::compile(path) {
|
||||
Ok(node) => node,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
Err(e) => panic!("{:?}", e),
|
||||
};
|
||||
|
||||
self.node = Some(node);
|
||||
@ -102,7 +113,7 @@ impl SelectorCls {
|
||||
fn value(&mut self, json_str: &str) {
|
||||
let value: Value = match serde_json::from_str(&json_str) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
};
|
||||
|
||||
self.value = Some(value);
|
||||
@ -111,12 +122,12 @@ impl SelectorCls {
|
||||
fn select(&self) -> String {
|
||||
let node = match &self.node {
|
||||
Some(node) => node,
|
||||
None => panic!("{:?}", JsonPathError::EmptyPath)
|
||||
None => panic!("{:?}", JsonPathError::EmptyPath),
|
||||
};
|
||||
|
||||
let value = match &self.value {
|
||||
Some(value) => value,
|
||||
None => panic!("{:?}", JsonPathError::EmptyValue)
|
||||
None => panic!("{:?}", JsonPathError::EmptyValue),
|
||||
};
|
||||
|
||||
let mut selector = Selector::new();
|
||||
@ -124,7 +135,7 @@ impl SelectorCls {
|
||||
selector.value(&value);
|
||||
match selector.select_as_str() {
|
||||
Ok(ret) => ret,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -257,13 +268,17 @@ declare_types! {
|
||||
}
|
||||
}
|
||||
register_module!(mut m, {
|
||||
m.export_class::<JsCompileFn>("CompileFn").expect("CompileFn class error");
|
||||
m.export_class::<JsSelectorFn>("SelectorFn").expect("SelectorFn class error");
|
||||
m.export_class::<JsSelector>("Selector").expect("Selector class error");
|
||||
m.export_class::<JsSelectorMut>("SelectorMut").expect("SelectorMut class error");
|
||||
m.export_class::<JsCompileFn>("CompileFn")
|
||||
.expect("CompileFn class error");
|
||||
m.export_class::<JsSelectorFn>("SelectorFn")
|
||||
.expect("SelectorFn class error");
|
||||
m.export_class::<JsSelector>("Selector")
|
||||
.expect("Selector class error");
|
||||
m.export_class::<JsSelectorMut>("SelectorMut")
|
||||
.expect("SelectorMut class error");
|
||||
m.export_function("select", select)?;
|
||||
m.export_function("deleteValue", delete)?;
|
||||
m.export_function("replaceWith", replace_with)?;
|
||||
m.export_function("selectStr", select_str)?;
|
||||
Ok(())
|
||||
});
|
||||
});
|
||||
|
2
nodejs/package-lock.json
generated
2
nodejs/package-lock.json
generated
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jsonpath-rs",
|
||||
"version": "0.2.0",
|
||||
"version": "0.2.2",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jsonpath-rs",
|
||||
"version": "0.2.0",
|
||||
"version": "0.2.2",
|
||||
"description": "It is JsonPath implementation. The core implementation is written in Rust",
|
||||
"author": "Changseok Han <freestrings@gmail.com>",
|
||||
"license": "MIT",
|
||||
|
@ -398,6 +398,93 @@ describe('filter test', () => {
|
||||
run (done, i, list[i]);
|
||||
})
|
||||
}
|
||||
|
||||
it('object equal', (done) => {
|
||||
let selector = new jsonpath.Selector();
|
||||
selector.path('$..[?(@.a == 1)]');
|
||||
selector.value({
|
||||
'a': 1,
|
||||
'b': {'a': 1},
|
||||
'c': {'a': 1},
|
||||
});
|
||||
let result = selector.select();
|
||||
if (JSON.stringify(result) === JSON.stringify([{'a': 1}, {'a': 1}])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('escaped single quote notation', (done) => {
|
||||
let result = jsonpath.select({"single'quote":"value"}, "$['single\\'quote']");
|
||||
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('escaped double quote notation', (done) => {
|
||||
let result = jsonpath.select({"single\"quote":"value"}, "$['single\"quote']");
|
||||
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[::]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third", "forth", "fifth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[::2]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::2]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "third", "fifth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[1: :]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1: :]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["second", "third", "forth", "fifth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[1:2:]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1:2:]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["second"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[1::2]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1::2]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["second", "forth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[0:3:1]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:1]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[0:3:2]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:2]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "third"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array keys', (done) => {
|
||||
let result = jsonpath.select({
|
||||
"key1": "value1",
|
||||
"key2": 2
|
||||
}, "$['key1', 'key2']");
|
||||
if (JSON.stringify(result) === JSON.stringify(["value1", 2])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('SelectorMut test', () => {
|
||||
@ -797,4 +884,13 @@ describe('README test', () => {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('ISSUE test', () => {
|
||||
it('Results do not match other implementations #6', (done) => {
|
||||
let result = jsonpath.select(["first", "second"], "$[:]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "second"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
50
src/lib.rs
50
src/lib.rs
@ -134,8 +134,8 @@ extern crate serde_json;
|
||||
use serde_json::Value;
|
||||
|
||||
pub use parser::parser::{Node, Parser};
|
||||
pub use select::{Selector, SelectorMut};
|
||||
pub use select::JsonPathError;
|
||||
pub use select::{Selector, SelectorMut};
|
||||
|
||||
#[doc(hidden)]
|
||||
mod parser;
|
||||
@ -171,14 +171,12 @@ mod select;
|
||||
/// ```
|
||||
pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> {
|
||||
let node = Parser::compile(path);
|
||||
move |json| {
|
||||
match &node {
|
||||
Ok(node) => {
|
||||
let mut selector = Selector::new();
|
||||
selector.compiled_path(node).value(json).select()
|
||||
}
|
||||
Err(e) => Err(JsonPathError::Path(e.to_string()))
|
||||
move |json| match &node {
|
||||
Ok(node) => {
|
||||
let mut selector = Selector::new();
|
||||
selector.compiled_path(node).value(json).select()
|
||||
}
|
||||
Err(e) => Err(JsonPathError::Path(e.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
@ -219,9 +217,7 @@ pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPath
|
||||
pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value>, JsonPathError> {
|
||||
let mut selector = Selector::new();
|
||||
let _ = selector.value(json);
|
||||
move |path: &str| {
|
||||
selector.str_path(path)?.reset_value().select()
|
||||
}
|
||||
move |path: &str| selector.str_path(path)?.reset_value().select()
|
||||
}
|
||||
|
||||
/// It is the same to `selector` function. but it deserialize the result as given type `T`.
|
||||
@ -270,12 +266,12 @@ pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value
|
||||
///
|
||||
/// assert_eq!(json, ret);
|
||||
/// ```
|
||||
pub fn selector_as<T: serde::de::DeserializeOwned>(json: &Value) -> impl FnMut(&str) -> Result<Vec<T>, JsonPathError> + '_ {
|
||||
pub fn selector_as<T: serde::de::DeserializeOwned>(
|
||||
json: &Value,
|
||||
) -> impl FnMut(&str) -> Result<Vec<T>, JsonPathError> + '_ {
|
||||
let mut selector = Selector::new();
|
||||
let _ = selector.value(json);
|
||||
move |path: &str| {
|
||||
selector.str_path(path)?.reset_value().select_as()
|
||||
}
|
||||
move |path: &str| selector.str_path(path)?.reset_value().select_as()
|
||||
}
|
||||
|
||||
/// It is a simple select function. but it compile the jsonpath argument every time.
|
||||
@ -374,7 +370,10 @@ pub fn select_as_str(json_str: &str, path: &str) -> Result<String, JsonPathError
|
||||
///
|
||||
/// assert_eq!(ret[0], person);
|
||||
/// ```
|
||||
pub fn select_as<T: serde::de::DeserializeOwned>(json_str: &str, path: &str) -> Result<Vec<T>, JsonPathError> {
|
||||
pub fn select_as<T: serde::de::DeserializeOwned>(
|
||||
json_str: &str,
|
||||
path: &str,
|
||||
) -> Result<Vec<T>, JsonPathError> {
|
||||
let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
|
||||
Selector::new().str_path(path)?.value(&json).select_as()
|
||||
}
|
||||
@ -413,7 +412,12 @@ pub fn select_as<T: serde::de::DeserializeOwned>(json_str: &str, path: &str) ->
|
||||
/// ```
|
||||
pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
|
||||
let mut selector = SelectorMut::new();
|
||||
let ret = selector.str_path(path)?.value(value).delete()?.take().unwrap_or(Value::Null);
|
||||
let ret = selector
|
||||
.str_path(path)?
|
||||
.value(value)
|
||||
.delete()?
|
||||
.take()
|
||||
.unwrap_or(Value::Null);
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
@ -460,9 +464,15 @@ pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
|
||||
/// ]}));
|
||||
/// ```
|
||||
pub fn replace_with<F>(value: Value, path: &str, fun: &mut F) -> Result<Value, JsonPathError>
|
||||
where F: FnMut(&Value) -> Value
|
||||
where
|
||||
F: FnMut(&Value) -> Value,
|
||||
{
|
||||
let mut selector = SelectorMut::new();
|
||||
let ret = selector.str_path(path)?.value(value).replace_with(fun)?.take().unwrap_or(Value::Null);
|
||||
let ret = selector
|
||||
.str_path(path)?
|
||||
.value(value)
|
||||
.replace_with(fun)?
|
||||
.take()
|
||||
.unwrap_or(Value::Null);
|
||||
Ok(ret)
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
pub mod parser;
|
||||
mod path_reader;
|
||||
pub(crate) mod tokenizer;
|
||||
pub mod parser;
|
||||
|
||||
#[cfg(test)]
|
||||
mod parser_tests {
|
||||
use parser::parser::{ParseToken, Parser, NodeVisitor, FilterToken};
|
||||
use parser::parser::{FilterToken, NodeVisitor, ParseToken, Parser};
|
||||
|
||||
struct NodeVisitorTestImpl<'a> {
|
||||
input: &'a str,
|
||||
@ -13,7 +13,10 @@ mod parser_tests {
|
||||
|
||||
impl<'a> NodeVisitorTestImpl<'a> {
|
||||
fn new(input: &'a str) -> Self {
|
||||
NodeVisitorTestImpl { input, stack: Vec::new() }
|
||||
NodeVisitorTestImpl {
|
||||
input,
|
||||
stack: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn start(&mut self) -> Result<Vec<ParseToken>, String> {
|
||||
@ -42,49 +45,63 @@ mod parser_tests {
|
||||
fn parse_path() {
|
||||
setup();
|
||||
|
||||
assert_eq!(run("$.aa"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("aa".to_owned())
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.aa"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("aa".to_owned())
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.00.a"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("00".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned())
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.00.a"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("00".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned())
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.00.韓창.seok"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("00".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("韓창".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("seok".to_owned())
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.00.韓창.seok"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("00".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("韓창".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("seok".to_owned())
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.*"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::All
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.*"),
|
||||
Ok(vec![ParseToken::Absolute, ParseToken::In, ParseToken::All])
|
||||
);
|
||||
|
||||
assert_eq!(run("$..*"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Leaves,
|
||||
ParseToken::All
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$..*"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Leaves,
|
||||
ParseToken::All
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$..[0]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Leaves,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(0.0),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$..[0]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Leaves,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(0.0),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
match run("$.") {
|
||||
Ok(_) => panic!(),
|
||||
@ -106,162 +123,346 @@ mod parser_tests {
|
||||
fn parse_array_sytax() {
|
||||
setup();
|
||||
|
||||
assert_eq!(run("$.book[?(@.isbn)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("book".to_string()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("isbn".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.book[?(@.isbn)]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("book".to_string()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("isbn".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
//
|
||||
// Array도 컨텍스트 In으로 간주 할거라서 중첩되면 하나만
|
||||
//
|
||||
assert_eq!(run("$.[*]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.[*]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[*]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.a[*]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[*].가"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::In, ParseToken::Key("가".to_owned())
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.a[*].가"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("가".to_owned())
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[0][1]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(0_f64),
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1_f64),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.a[0][1]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(0_f64),
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1_f64),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[1,2]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Union(vec![1, 2]),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.a[1,2]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Union(vec![1, 2]),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[10:]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(Some(10), None),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.a[10:]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(Some(10), None, None),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[:11]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(None, Some(11)),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.a[:11]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(None, Some(11), None),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[-12:13]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(Some(-12), Some(13)),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.a[-12:13]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(Some(-12), Some(13), None),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[?(1>2)]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run(r#"$[0:3:2]"#),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(Some(0), Some(3), Some(2)),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[?($.b>3)]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("b".to_owned()), ParseToken::Number(3_f64), ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run(r#"$[:3:2]"#),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(None, Some(3), Some(2)),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$[?($.c>@.d && 1==2)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::Filter(FilterToken::And),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run(r#"$[:]"#),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(None, None, None),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$[?($.c>@.d&&(1==2||3>=4))]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::Number(3_f64), ParseToken::Number(4_f64), ParseToken::Filter(FilterToken::GreaterOrEqual),
|
||||
ParseToken::Filter(FilterToken::Or),
|
||||
ParseToken::Filter(FilterToken::And),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run(r#"$[::]"#),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(None, None, None),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$[?(@.a<@.b)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("b".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Little),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run(r#"$[::2]"#),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(None, None, Some(2)),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$[*][*][*]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run(r#"$["a", 'b']"#),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Keys(vec!["a".to_string(), "b".to_string()]),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$['a']['bb']"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("a".to_string()),
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("bb".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.a[?(1>2)]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1_f64),
|
||||
ParseToken::Number(2_f64),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(run("$.a[?(@.e==true)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_string()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("e".to_string()),
|
||||
ParseToken::Bool(true),
|
||||
ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$.a[?($.b>3)]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("b".to_owned()),
|
||||
ParseToken::Number(3_f64),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run("$[?($.c>@.d && 1==2)]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("c".to_owned()),
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("d".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::Number(1_f64),
|
||||
ParseToken::Number(2_f64),
|
||||
ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::Filter(FilterToken::And),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run("$[?($.c>@.d&&(1==2||3>=4))]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("c".to_owned()),
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("d".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::Number(1_f64),
|
||||
ParseToken::Number(2_f64),
|
||||
ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::Number(3_f64),
|
||||
ParseToken::Number(4_f64),
|
||||
ParseToken::Filter(FilterToken::GreaterOrEqual),
|
||||
ParseToken::Filter(FilterToken::Or),
|
||||
ParseToken::Filter(FilterToken::And),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run("$[?(@.a<@.b)]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("b".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Little),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run("$[*][*][*]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run("$['a']['bb']"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("a".to_string()),
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("bb".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run("$.a[?(@.e==true)]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_string()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("e".to_string()),
|
||||
ParseToken::Bool(true),
|
||||
ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run("$[:]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(None, None, None),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run(r#"$['single\'quote']"#),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("single'quote".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run(r#"$["single\"quote"]"#),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key(r#"single"quote"#.to_string()),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
match run("$[") {
|
||||
Ok(_) => panic!(),
|
||||
@ -298,12 +499,17 @@ mod parser_tests {
|
||||
fn parse_array_float() {
|
||||
setup();
|
||||
|
||||
assert_eq!(run("$[?(1.1<2.1)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1.1), ParseToken::Number(2.1), ParseToken::Filter(FilterToken::Little),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
assert_eq!(
|
||||
run("$[?(1.1<2.1)]"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1.1),
|
||||
ParseToken::Number(2.1),
|
||||
ParseToken::Filter(FilterToken::Little),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
match run("$[1.1]") {
|
||||
Ok(_) => panic!(),
|
||||
@ -329,7 +535,11 @@ mod parser_tests {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tokenizer_tests {
|
||||
use parser::tokenizer::{Token, TokenError, Tokenizer, TokenReader};
|
||||
use parser::tokenizer::{Token, TokenError, TokenReader, Tokenizer};
|
||||
|
||||
fn setup() {
|
||||
let _ = env_logger::try_init();
|
||||
}
|
||||
|
||||
fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
|
||||
let mut tokenizer = Tokenizer::new(input);
|
||||
@ -352,73 +562,80 @@ mod tokenizer_tests {
|
||||
let mut tokenizer = TokenReader::new("$.a");
|
||||
match tokenizer.next_token() {
|
||||
Ok(t) => assert_eq!(Token::Absolute(0), t),
|
||||
_ => panic!()
|
||||
_ => panic!(),
|
||||
}
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
||||
_ => panic!()
|
||||
_ => panic!(),
|
||||
}
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
||||
_ => panic!()
|
||||
_ => panic!(),
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(t) => assert_eq!(Token::Dot(1), t),
|
||||
_ => panic!()
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn token() {
|
||||
run("$.01.a",
|
||||
setup();
|
||||
|
||||
run(
|
||||
"$.01.a",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Key(2, "01".to_string()),
|
||||
Token::Dot(4),
|
||||
Token::Key(5, "a".to_string())
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
Token::Key(5, "a".to_string()),
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run("$. []",
|
||||
run(
|
||||
"$. []",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Whitespace(2, 2),
|
||||
Token::OpenArray(5),
|
||||
Token::CloseArray(6)
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
Token::CloseArray(6),
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run("$..",
|
||||
run(
|
||||
"$..",
|
||||
(
|
||||
vec![Token::Absolute(0), Token::Dot(1), Token::Dot(2)],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run(
|
||||
"$..ab",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Dot(2),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
Token::Key(3, "ab".to_string()),
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run("$..ab",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Dot(2),
|
||||
Token::Key(3, "ab".to_string())
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("$..가 [",
|
||||
run(
|
||||
"$..가 [",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
@ -427,11 +644,13 @@ mod tokenizer_tests {
|
||||
Token::Key(3, "가".to_string()),
|
||||
Token::Whitespace(6, 0),
|
||||
Token::OpenArray(7),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run("[-1, 2 ]",
|
||||
run(
|
||||
"[-1, 2 ]",
|
||||
(
|
||||
vec![
|
||||
Token::OpenArray(0),
|
||||
@ -441,11 +660,13 @@ mod tokenizer_tests {
|
||||
Token::Key(5, "2".to_string()),
|
||||
Token::Whitespace(6, 0),
|
||||
Token::CloseArray(7),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run("[ 1 2 , 3 \"abc\" : -10 ]",
|
||||
run(
|
||||
"[ 1 2 , 3 \"abc\" : -10 ]",
|
||||
(
|
||||
vec![
|
||||
Token::OpenArray(0),
|
||||
@ -465,11 +686,13 @@ mod tokenizer_tests {
|
||||
Token::Key(18, "-10".to_string()),
|
||||
Token::Whitespace(21, 0),
|
||||
Token::CloseArray(22),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run("?(@.a가 <41.01)",
|
||||
run(
|
||||
"?(@.a가 <41.01)",
|
||||
(
|
||||
vec![
|
||||
Token::Question(0),
|
||||
@ -483,11 +706,13 @@ mod tokenizer_tests {
|
||||
Token::Dot(12),
|
||||
Token::Key(13, "01".to_string()),
|
||||
Token::CloseParenthesis(15),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run("?(@.a <4a.01)",
|
||||
run(
|
||||
"?(@.a <4a.01)",
|
||||
(
|
||||
vec![
|
||||
Token::Question(0),
|
||||
@ -501,24 +726,67 @@ mod tokenizer_tests {
|
||||
Token::Dot(9),
|
||||
Token::Key(10, "01".to_string()),
|
||||
Token::CloseParenthesis(12),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run("?($.c>@.d)", (
|
||||
vec![
|
||||
Token::Question(0),
|
||||
Token::OpenParenthesis(1),
|
||||
Token::Absolute(2),
|
||||
Token::Dot(3),
|
||||
Token::Key(4, "c".to_string()),
|
||||
Token::Greater(5),
|
||||
Token::At(6),
|
||||
Token::Dot(7),
|
||||
Token::Key(8, "d".to_string()),
|
||||
Token::CloseParenthesis(9)
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
run(
|
||||
"?($.c>@.d)",
|
||||
(
|
||||
vec![
|
||||
Token::Question(0),
|
||||
Token::OpenParenthesis(1),
|
||||
Token::Absolute(2),
|
||||
Token::Dot(3),
|
||||
Token::Key(4, "c".to_string()),
|
||||
Token::Greater(5),
|
||||
Token::At(6),
|
||||
Token::Dot(7),
|
||||
Token::Key(8, "d".to_string()),
|
||||
Token::CloseParenthesis(9),
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run(
|
||||
"$[:]",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::OpenArray(1),
|
||||
Token::Split(2),
|
||||
Token::CloseArray(3),
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run(
|
||||
r#"$['single\'quote']"#,
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::OpenArray(1),
|
||||
Token::SingleQuoted(2, "single\'quote".to_string()),
|
||||
Token::CloseArray(17),
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run(
|
||||
r#"$["double\"quote"]"#,
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::OpenArray(1),
|
||||
Token::DoubleQuoted(2, "double\"quote".to_string()),
|
||||
Token::CloseArray(17),
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::tokenizer::*;
|
||||
|
||||
const DUMMY: usize = 0;
|
||||
@ -5,19 +7,15 @@ const DUMMY: usize = 0;
|
||||
type ParseResult<T> = Result<T, String>;
|
||||
|
||||
mod utils {
|
||||
pub fn string_to_isize<F>(string: &String, msg_handler: F) -> Result<isize, String>
|
||||
where F: Fn() -> String {
|
||||
match string.as_str().parse::<isize>() {
|
||||
Ok(n) => Ok(n),
|
||||
_ => Err(msg_handler())
|
||||
}
|
||||
}
|
||||
use std::str::FromStr;
|
||||
|
||||
pub fn string_to_f64<F>(string: &String, msg_handler: F) -> Result<f64, String>
|
||||
where F: Fn() -> String {
|
||||
match string.as_str().parse::<f64>() {
|
||||
pub fn string_to_num<F, S: FromStr>(string: &String, msg_handler: F) -> Result<S, String>
|
||||
where
|
||||
F: Fn() -> String,
|
||||
{
|
||||
match string.as_str().parse() {
|
||||
Ok(n) => Ok(n),
|
||||
_ => Err(msg_handler())
|
||||
_ => Err(msg_handler()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -36,6 +34,7 @@ pub enum ParseToken {
|
||||
All,
|
||||
|
||||
Key(String),
|
||||
Keys(Vec<String>),
|
||||
// []
|
||||
Array,
|
||||
// 메타토큰
|
||||
@ -43,7 +42,7 @@ pub enum ParseToken {
|
||||
// ?( filter )
|
||||
Filter(FilterToken),
|
||||
// 1 : 2
|
||||
Range(Option<isize>, Option<isize>),
|
||||
Range(Option<isize>, Option<isize>, Option<usize>),
|
||||
// 1, 2, 3
|
||||
Union(Vec<isize>),
|
||||
|
||||
@ -88,9 +87,7 @@ impl Parser {
|
||||
let node = Self::node(ParseToken::Absolute);
|
||||
Self::paths(node, tokenizer)
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -107,9 +104,7 @@ impl Parser {
|
||||
let node = Self::array(prev, tokenizer)?;
|
||||
Self::paths(node, tokenizer)
|
||||
}
|
||||
_ => {
|
||||
Ok(prev)
|
||||
}
|
||||
_ => Ok(prev),
|
||||
}
|
||||
}
|
||||
|
||||
@ -124,34 +119,22 @@ impl Parser {
|
||||
| Ok(Token::Greater(_))
|
||||
| Ok(Token::GreaterOrEqual(_))
|
||||
| Ok(Token::And(_))
|
||||
| Ok(Token::Or(_)) => {
|
||||
Ok(node)
|
||||
}
|
||||
_ => {
|
||||
Self::paths(node, tokenizer)
|
||||
}
|
||||
| Ok(Token::Or(_)) => Ok(node),
|
||||
_ => Self::paths(node, tokenizer),
|
||||
}
|
||||
}
|
||||
|
||||
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => {
|
||||
Self::path_leaves(prev, tokenizer)
|
||||
}
|
||||
Ok(Token::Asterisk(_)) => {
|
||||
Self::path_in_all(prev, tokenizer)
|
||||
}
|
||||
Ok(Token::Key(_, _)) => {
|
||||
Self::path_in_key(prev, tokenizer)
|
||||
}
|
||||
Ok(Token::Dot(_)) => Self::path_leaves(prev, tokenizer),
|
||||
Ok(Token::Asterisk(_)) => Self::path_in_all(prev, tokenizer),
|
||||
Ok(Token::Key(_, _)) => Self::path_in_key(prev, tokenizer),
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::array(prev, tokenizer)
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -159,17 +142,13 @@ impl Parser {
|
||||
debug!("#path_leaves");
|
||||
Self::eat_token(tokenizer);
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Asterisk(_)) => {
|
||||
Self::path_leaves_all(prev, tokenizer)
|
||||
}
|
||||
Ok(Token::Asterisk(_)) => Self::path_leaves_all(prev, tokenizer),
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
let mut leaves_node = Self::node(ParseToken::Leaves);
|
||||
leaves_node.left = Some(Box::new(prev));
|
||||
Ok(Self::paths(leaves_node, tokenizer)?)
|
||||
}
|
||||
_ => {
|
||||
Self::path_leaves_key(prev, tokenizer)
|
||||
}
|
||||
_ => Self::path_leaves_key(prev, tokenizer),
|
||||
}
|
||||
}
|
||||
|
||||
@ -214,12 +193,8 @@ impl Parser {
|
||||
fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#key");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(_, v)) => {
|
||||
Ok(Self::node(ParseToken::Key(v)))
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
Ok(Token::Key(_, v)) => Ok(Self::node(ParseToken::Key(v))),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -229,25 +204,45 @@ impl Parser {
|
||||
Ok(Token::Key(_, v)) => {
|
||||
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn array_quota_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_quota_value");
|
||||
fn array_keys(tokenizer: &mut TokenReader, first_key: String) -> ParseResult<Node> {
|
||||
let mut keys = vec![first_key];
|
||||
while tokenizer.peek_is(COMMA) {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
if !(tokenizer.peek_is(SINGLE_QUOTE) || tokenizer.peek_is(DOUBLE_QUOTE)) {
|
||||
return Err(tokenizer.err_msg());
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
|
||||
keys.push(val);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Self::eat_whitespace(tokenizer);
|
||||
}
|
||||
|
||||
Ok(Self::node(ParseToken::Keys(keys)))
|
||||
}
|
||||
|
||||
fn array_quote_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_quote_value");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::SingleQuoted(_, val))
|
||||
| Ok(Token::DoubleQuoted(_, val)) => {
|
||||
Ok(Self::node(ParseToken::Key(val)))
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
Ok(Self::node(ParseToken::Eof))
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
|
||||
if !tokenizer.peek_is(COMMA) {
|
||||
Ok(Self::node(ParseToken::Key(val)))
|
||||
} else {
|
||||
Self::array_keys(tokenizer, val)
|
||||
}
|
||||
}
|
||||
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -270,13 +265,11 @@ impl Parser {
|
||||
right: Some(Box::new(Self::node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::array_value(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
_ => Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::array_value(tokenizer)?)),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -291,45 +284,31 @@ impl Parser {
|
||||
debug!("#array_value_key");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Comma(_)) => {
|
||||
Self::union(digit, tokenizer)
|
||||
}
|
||||
Ok(Token::Split(_)) => {
|
||||
Self::range_from(digit, tokenizer)
|
||||
}
|
||||
_ => {
|
||||
Ok(Self::node(ParseToken::Number(digit as f64)))
|
||||
}
|
||||
Ok(Token::Comma(_)) => Self::union(digit, tokenizer),
|
||||
Ok(Token::Split(_)) => Self::range_from(digit, tokenizer),
|
||||
_ => Ok(Self::node(ParseToken::Number(digit as f64))),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_value");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => {
|
||||
Self::array_value_key(tokenizer)
|
||||
}
|
||||
Ok(Token::Key(_, _)) => Self::array_value_key(tokenizer),
|
||||
Ok(Token::Split(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::range_to(tokenizer)
|
||||
}
|
||||
Ok(Token::DoubleQuoted(_, _))
|
||||
| Ok(Token::SingleQuoted(_, _)) => {
|
||||
Self::array_quota_value(tokenizer)
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
Ok(Self::node(ParseToken::Eof))
|
||||
Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => {
|
||||
Self::array_quote_value(tokenizer)
|
||||
}
|
||||
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||
_ => {
|
||||
Self::eat_token(tokenizer);
|
||||
Err(tokenizer.err_msg())
|
||||
@ -342,13 +321,13 @@ impl Parser {
|
||||
let mut values = vec![num];
|
||||
while match tokenizer.peek_token() {
|
||||
Ok(Token::Comma(_)) => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
} {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
values.push(digit);
|
||||
}
|
||||
_ => {
|
||||
@ -359,43 +338,80 @@ impl Parser {
|
||||
Ok(Self::node(ParseToken::Union(values)))
|
||||
}
|
||||
|
||||
fn range_from(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> {
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
if tokenizer.peek_is(SPLIT) {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
if tokenizer.peek_is(KEY) {
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, str_step)) => {
|
||||
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
|
||||
Ok(step) => Ok(Some(step)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn range_from(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#range_from");
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => {
|
||||
Self::range(num, tokenizer)
|
||||
}
|
||||
_ => {
|
||||
Ok(Self::node(ParseToken::Range(Some(num), None)))
|
||||
}
|
||||
Ok(Token::Key(_, _)) => Self::range(from, tokenizer),
|
||||
Ok(Token::Split(_)) => match Self::range_value(tokenizer)? {
|
||||
Some(step) => Ok(Self::node(ParseToken::Range(Some(from), None, Some(step)))),
|
||||
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
|
||||
},
|
||||
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
|
||||
}
|
||||
}
|
||||
|
||||
fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#range_to");
|
||||
|
||||
match Self::range_value(tokenizer)? {
|
||||
Some(step) => return Ok(Self::node(ParseToken::Range(None, None, Some(step)))),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::CloseArray(_)) => {
|
||||
return Ok(Self::node(ParseToken::Range(None, None, None)));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(Self::node(ParseToken::Range(None, Some(digit))))
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
Ok(Token::Key(pos, ref to_str)) => {
|
||||
let to = utils::string_to_num(to_str, || tokenizer.err_msg_with_pos(pos))?;
|
||||
let step = Self::range_value(tokenizer)?;
|
||||
Ok(Self::node(ParseToken::Range(None, Some(to), step)))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn range(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
fn range(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#range");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(Self::node(ParseToken::Range(Some(num), Some(digit))))
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
Ok(Token::Key(pos, ref str_to)) => {
|
||||
let to = utils::string_to_num(str_to, || tokenizer.err_msg_with_pos(pos))?;
|
||||
let step = Self::range_value(tokenizer)?;
|
||||
Ok(Self::node(ParseToken::Range(Some(from), Some(to), step)))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -407,12 +423,8 @@ impl Parser {
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
Ok(Self::node(ParseToken::Eof))
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -455,9 +467,7 @@ impl Parser {
|
||||
right: Some(Box::new(Self::exprs(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
Ok(prev)
|
||||
}
|
||||
_ => Ok(prev),
|
||||
}
|
||||
}
|
||||
|
||||
@ -466,7 +476,7 @@ impl Parser {
|
||||
|
||||
let has_prop_candidate = match tokenizer.peek_token() {
|
||||
Ok(Token::At(_)) => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let node = Self::term(tokenizer)?;
|
||||
@ -479,7 +489,7 @@ impl Parser {
|
||||
| Ok(Token::LittleOrEqual(_))
|
||||
| Ok(Token::Greater(_))
|
||||
| Ok(Token::GreaterOrEqual(_)) => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
} {
|
||||
Self::op(node, tokenizer)
|
||||
} else if has_prop_candidate {
|
||||
@ -492,23 +502,15 @@ impl Parser {
|
||||
fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#term_num");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, val)) => {
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => {
|
||||
Self::term_num_float(val.as_str(), tokenizer)
|
||||
}
|
||||
_ => {
|
||||
let number = utils::string_to_f64(&val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(Self::node(ParseToken::Number(number)))
|
||||
}
|
||||
Ok(Token::Key(pos, val)) => match tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => Self::term_num_float(val.as_str(), tokenizer),
|
||||
_ => {
|
||||
let number = utils::string_to_num(&val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(Self::node(ParseToken::Number(number)))
|
||||
}
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
Ok(Self::node(ParseToken::Eof))
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
},
|
||||
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -521,12 +523,10 @@ impl Parser {
|
||||
f.push_str(&mut num);
|
||||
f.push('.');
|
||||
f.push_str(frac.as_str());
|
||||
let number = utils::string_to_f64(&f, || tokenizer.err_msg_with_pos(pos))?;
|
||||
let number = utils::string_to_num(&f, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(Self::node(ParseToken::Number(number)))
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -550,9 +550,7 @@ impl Parser {
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Ok(node)
|
||||
}
|
||||
_ => {
|
||||
Self::paths(node, tokenizer)
|
||||
}
|
||||
_ => Self::paths(node, tokenizer),
|
||||
};
|
||||
}
|
||||
|
||||
@ -560,8 +558,8 @@ impl Parser {
|
||||
return Self::json_path(tokenizer);
|
||||
}
|
||||
|
||||
if tokenizer.peek_is(DOUBLE_QUOTA) || tokenizer.peek_is(SINGLE_QUOTA) {
|
||||
return Self::array_quota_value(tokenizer);
|
||||
if tokenizer.peek_is(DOUBLE_QUOTE) || tokenizer.peek_is(SINGLE_QUOTE) {
|
||||
return Self::array_quote_value(tokenizer);
|
||||
}
|
||||
|
||||
if tokenizer.peek_is(KEY) {
|
||||
@ -569,11 +567,11 @@ impl Parser {
|
||||
Some(key) => match key.chars().next() {
|
||||
Some(ch) => match ch {
|
||||
'-' | '0'...'9' => Self::term_num(tokenizer),
|
||||
_ => Self::boolean(tokenizer)
|
||||
}
|
||||
_ => Err(tokenizer.err_msg())
|
||||
_ => Self::boolean(tokenizer),
|
||||
},
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
},
|
||||
_ => Err(tokenizer.err_msg())
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
};
|
||||
}
|
||||
|
||||
@ -583,27 +581,13 @@ impl Parser {
|
||||
fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#op");
|
||||
let token = match tokenizer.next_token() {
|
||||
Ok(Token::Equal(_)) => {
|
||||
ParseToken::Filter(FilterToken::Equal)
|
||||
}
|
||||
Ok(Token::NotEqual(_)) => {
|
||||
ParseToken::Filter(FilterToken::NotEqual)
|
||||
}
|
||||
Ok(Token::Little(_)) => {
|
||||
ParseToken::Filter(FilterToken::Little)
|
||||
}
|
||||
Ok(Token::LittleOrEqual(_)) => {
|
||||
ParseToken::Filter(FilterToken::LittleOrEqual)
|
||||
}
|
||||
Ok(Token::Greater(_)) => {
|
||||
ParseToken::Filter(FilterToken::Greater)
|
||||
}
|
||||
Ok(Token::GreaterOrEqual(_)) => {
|
||||
ParseToken::Filter(FilterToken::GreaterOrEqual)
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
ParseToken::Eof
|
||||
}
|
||||
Ok(Token::Equal(_)) => ParseToken::Filter(FilterToken::Equal),
|
||||
Ok(Token::NotEqual(_)) => ParseToken::Filter(FilterToken::NotEqual),
|
||||
Ok(Token::Little(_)) => ParseToken::Filter(FilterToken::Little),
|
||||
Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual),
|
||||
Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater),
|
||||
Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual),
|
||||
Err(TokenError::Eof) => ParseToken::Eof,
|
||||
_ => {
|
||||
return Err(tokenizer.err_msg());
|
||||
}
|
||||
@ -629,18 +613,18 @@ impl Parser {
|
||||
}
|
||||
|
||||
fn node(token: ParseToken) -> Node {
|
||||
Node { left: None, right: None, token }
|
||||
Node {
|
||||
left: None,
|
||||
right: None,
|
||||
token,
|
||||
}
|
||||
}
|
||||
|
||||
fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#close_token");
|
||||
match tokenizer.next_token() {
|
||||
Ok(ref t) if t.partial_eq(token) => {
|
||||
Ok(ret)
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
Ok(ref t) if t.partial_eq(token) => Ok(ret),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -652,14 +636,14 @@ pub trait NodeVisitor {
|
||||
| ParseToken::Relative
|
||||
| ParseToken::All
|
||||
| ParseToken::Key(_)
|
||||
| ParseToken::Range(_, _)
|
||||
| ParseToken::Keys(_)
|
||||
| ParseToken::Range(_, _, _)
|
||||
| ParseToken::Union(_)
|
||||
| ParseToken::Number(_)
|
||||
| ParseToken::Bool(_) => {
|
||||
self.visit_token(&node.token);
|
||||
}
|
||||
ParseToken::In
|
||||
| ParseToken::Leaves => {
|
||||
ParseToken::In | ParseToken::Leaves => {
|
||||
match &node.left {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
@ -686,8 +670,7 @@ pub trait NodeVisitor {
|
||||
}
|
||||
self.visit_token(&ParseToken::ArrayEof);
|
||||
}
|
||||
ParseToken::Filter(FilterToken::And)
|
||||
| ParseToken::Filter(FilterToken::Or) => {
|
||||
ParseToken::Filter(FilterToken::And) | ParseToken::Filter(FilterToken::Or) => {
|
||||
match &node.left {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
@ -723,4 +706,4 @@ pub trait NodeVisitor {
|
||||
|
||||
fn visit_token(&mut self, token: &ParseToken);
|
||||
fn end_term(&mut self) {}
|
||||
}
|
||||
}
|
||||
|
@ -12,10 +12,7 @@ pub struct PathReader<'a> {
|
||||
|
||||
impl<'a> PathReader<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
PathReader {
|
||||
input,
|
||||
pos: 0,
|
||||
}
|
||||
PathReader { input, pos: 0 }
|
||||
}
|
||||
|
||||
pub fn peek_char(&self) -> Result<(usize, char), ReaderError> {
|
||||
@ -24,8 +21,8 @@ impl<'a> PathReader<'a> {
|
||||
}
|
||||
|
||||
pub fn take_while<F>(&mut self, fun: F) -> Result<(usize, String), ReaderError>
|
||||
where
|
||||
F: Fn(&char) -> bool
|
||||
where
|
||||
F: Fn(&char) -> bool,
|
||||
{
|
||||
let mut char_len: usize = 0;
|
||||
let mut ret = String::new();
|
||||
@ -53,4 +50,4 @@ impl<'a> PathReader<'a> {
|
||||
pub fn current_pos(&self) -> usize {
|
||||
self.pos
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
use std::io::Write;
|
||||
use std::result::Result;
|
||||
|
||||
use super::path_reader::{PathReader, ReaderError};
|
||||
@ -15,8 +14,8 @@ pub const SPLIT: &'static str = ":";
|
||||
pub const OPEN_PARENTHESIS: &'static str = "(";
|
||||
pub const CLOSE_PARENTHESIS: &'static str = ")";
|
||||
pub const KEY: &'static str = "Key";
|
||||
pub const DOUBLE_QUOTA: &'static str = "\"";
|
||||
pub const SINGLE_QUOTA: &'static str = "'";
|
||||
pub const DOUBLE_QUOTE: &'static str = "\"";
|
||||
pub const SINGLE_QUOTE: &'static str = "'";
|
||||
pub const EQUAL: &'static str = "==";
|
||||
pub const GREATER_OR_EQUAL: &'static str = ">=";
|
||||
pub const GREATER: &'static str = ">";
|
||||
@ -44,8 +43,8 @@ const CH_PIPE: char = '|';
|
||||
const CH_LITTLE: char = '<';
|
||||
const CH_GREATER: char = '>';
|
||||
const CH_EXCLAMATION: char = '!';
|
||||
const CH_SINGLE_QUOTA: char = '\'';
|
||||
const CH_DOUBLE_QUOTA: char = '"';
|
||||
const CH_SINGLE_QUOTE: char = '\'';
|
||||
const CH_DOUBLE_QUOTE: char = '"';
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum TokenError {
|
||||
@ -55,7 +54,7 @@ pub enum TokenError {
|
||||
|
||||
fn to_token_error(read_err: ReaderError) -> TokenError {
|
||||
match read_err {
|
||||
ReaderError::Eof => TokenError::Eof
|
||||
ReaderError::Eof => TokenError::Eof,
|
||||
}
|
||||
}
|
||||
|
||||
@ -109,8 +108,8 @@ impl Token {
|
||||
Token::OpenParenthesis(_) => OPEN_PARENTHESIS,
|
||||
Token::CloseParenthesis(_) => CLOSE_PARENTHESIS,
|
||||
Token::Key(_, _) => KEY,
|
||||
Token::DoubleQuoted(_, _) => DOUBLE_QUOTA,
|
||||
Token::SingleQuoted(_, _) => SINGLE_QUOTA,
|
||||
Token::DoubleQuoted(_, _) => DOUBLE_QUOTE,
|
||||
Token::SingleQuoted(_, _) => SINGLE_QUOTE,
|
||||
Token::Equal(_) => EQUAL,
|
||||
Token::GreaterOrEqual(_) => GREATER_OR_EQUAL,
|
||||
Token::Greater(_) => GREATER,
|
||||
@ -119,7 +118,7 @@ impl Token {
|
||||
Token::NotEqual(_) => NOT_EQUAL,
|
||||
Token::And(_) => AND,
|
||||
Token::Or(_) => OR,
|
||||
Token::Whitespace(_, _) => WHITESPACE
|
||||
Token::Whitespace(_, _) => WHITESPACE,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -137,7 +136,7 @@ fn simple_matched_token(ch: char, pos: usize) -> Option<Token> {
|
||||
CH_QUESTION => Some(Token::Question(pos)),
|
||||
CH_COMMA => Some(Token::Comma(pos)),
|
||||
CH_SEMICOLON => Some(Token::Split(pos)),
|
||||
_ => None
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,20 +146,42 @@ pub struct Tokenizer<'a> {
|
||||
|
||||
impl<'a> Tokenizer<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
trace!("input: {}", input);
|
||||
Tokenizer {
|
||||
input: PathReader::new(input),
|
||||
}
|
||||
}
|
||||
|
||||
fn single_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
fn quote(&mut self, ch: char) -> Result<String, TokenError> {
|
||||
let (_, mut val) = self
|
||||
.input
|
||||
.take_while(|c| *c != ch)
|
||||
.map_err(to_token_error)?;
|
||||
|
||||
if let Some('\\') = val.chars().last() {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
let _ = val.pop();
|
||||
let (_, mut val_remain) = self
|
||||
.input
|
||||
.take_while(|c| *c != ch)
|
||||
.map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
val.push(ch);
|
||||
val.push_str(val_remain.as_str());
|
||||
} else {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
}
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
|
||||
fn single_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let val = self.quote(ch)?;
|
||||
Ok(Token::SingleQuoted(pos, val))
|
||||
}
|
||||
|
||||
fn double_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
fn double_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let val = self.quote(ch)?;
|
||||
Ok(Token::DoubleQuoted(pos, val))
|
||||
}
|
||||
|
||||
@ -171,7 +192,7 @@ impl<'a> Tokenizer<'a> {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
Ok(Token::Equal(pos))
|
||||
}
|
||||
_ => Err(TokenError::Position(pos))
|
||||
_ => Err(TokenError::Position(pos)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -182,7 +203,7 @@ impl<'a> Tokenizer<'a> {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
Ok(Token::NotEqual(pos))
|
||||
}
|
||||
_ => Err(TokenError::Position(pos))
|
||||
_ => Err(TokenError::Position(pos)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -215,7 +236,7 @@ impl<'a> Tokenizer<'a> {
|
||||
let _ = self.input.next_char().map_err(to_token_error);
|
||||
Ok(Token::And(pos))
|
||||
}
|
||||
_ => Err(TokenError::Position(pos))
|
||||
_ => Err(TokenError::Position(pos)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -226,27 +247,31 @@ impl<'a> Tokenizer<'a> {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
Ok(Token::Or(pos))
|
||||
}
|
||||
_ => Err(TokenError::Position(pos))
|
||||
_ => Err(TokenError::Position(pos)),
|
||||
}
|
||||
}
|
||||
|
||||
fn whitespace(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||
let (_, vec) = self.input.take_while(|c| c.is_whitespace()).map_err(to_token_error)?;
|
||||
let (_, vec) = self
|
||||
.input
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.map_err(to_token_error)?;
|
||||
Ok(Token::Whitespace(pos, vec.len()))
|
||||
}
|
||||
|
||||
fn other(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let fun = |c: &char| {
|
||||
match simple_matched_token(*c, pos) {
|
||||
Some(_) => false,
|
||||
_ if c == &CH_LITTLE
|
||||
|| c == &CH_GREATER
|
||||
|| c == &CH_EQUAL
|
||||
|| c == &CH_AMPERSAND
|
||||
|| c == &CH_PIPE
|
||||
|| c == &CH_EXCLAMATION => false,
|
||||
_ => !c.is_whitespace()
|
||||
let fun = |c: &char| match simple_matched_token(*c, pos) {
|
||||
Some(_) => false,
|
||||
_ if c == &CH_LITTLE
|
||||
|| c == &CH_GREATER
|
||||
|| c == &CH_EQUAL
|
||||
|| c == &CH_AMPERSAND
|
||||
|| c == &CH_PIPE
|
||||
|| c == &CH_EXCLAMATION =>
|
||||
{
|
||||
false
|
||||
}
|
||||
_ => !c.is_whitespace(),
|
||||
};
|
||||
let (_, mut vec) = self.input.take_while(fun).map_err(to_token_error)?;
|
||||
vec.insert(0, ch);
|
||||
@ -257,20 +282,18 @@ impl<'a> Tokenizer<'a> {
|
||||
let (pos, ch) = self.input.next_char().map_err(to_token_error)?;
|
||||
match simple_matched_token(ch, pos) {
|
||||
Some(t) => Ok(t),
|
||||
None => {
|
||||
match ch {
|
||||
CH_SINGLE_QUOTA => self.single_quota(pos, ch),
|
||||
CH_DOUBLE_QUOTA => self.double_quota(pos, ch),
|
||||
CH_EQUAL => self.equal(pos, ch),
|
||||
CH_GREATER => self.greater(pos, ch),
|
||||
CH_LITTLE => self.little(pos, ch),
|
||||
CH_AMPERSAND => self.and(pos, ch),
|
||||
CH_PIPE => self.or(pos, ch),
|
||||
CH_EXCLAMATION => self.not_equal(pos, ch),
|
||||
_ if ch.is_whitespace() => self.whitespace(pos, ch),
|
||||
_ => self.other(pos, ch),
|
||||
}
|
||||
}
|
||||
None => match ch {
|
||||
CH_SINGLE_QUOTE => self.single_quote(pos, ch),
|
||||
CH_DOUBLE_QUOTE => self.double_quote(pos, ch),
|
||||
CH_EQUAL => self.equal(pos, ch),
|
||||
CH_GREATER => self.greater(pos, ch),
|
||||
CH_LITTLE => self.little(pos, ch),
|
||||
CH_AMPERSAND => self.and(pos, ch),
|
||||
CH_PIPE => self.or(pos, ch),
|
||||
CH_EXCLAMATION => self.not_equal(pos, ch),
|
||||
_ if ch.is_whitespace() => self.whitespace(pos, ch),
|
||||
_ => self.other(pos, ch),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -312,7 +335,7 @@ impl<'a> TokenReader<'a> {
|
||||
pub fn peek_is(&self, simple_token: &str) -> bool {
|
||||
match self.peek_token() {
|
||||
Ok(t) => t.simple_eq(simple_token),
|
||||
_ => false
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
@ -344,23 +367,13 @@ impl<'a> TokenReader<'a> {
|
||||
}
|
||||
|
||||
pub fn err_msg_with_pos(&self, pos: usize) -> String {
|
||||
let mut w = Vec::new();
|
||||
writeln!(&mut w, "{}", self.origin_input).unwrap();
|
||||
writeln!(&mut w, "{}", "^".repeat(pos)).unwrap();
|
||||
match std::str::from_utf8(&w[..]) {
|
||||
Ok(s) => s.to_owned(),
|
||||
Err(_) => panic!("Invalid UTF-8")
|
||||
}
|
||||
format!("{}\n{}", self.origin_input, "^".repeat(pos))
|
||||
}
|
||||
|
||||
pub fn err_msg(&self) -> String {
|
||||
match self.curr_pos {
|
||||
Some(pos) => {
|
||||
self.err_msg_with_pos(pos)
|
||||
}
|
||||
_ => {
|
||||
self.err_msg_with_pos(self.err_pos)
|
||||
}
|
||||
Some(pos) => self.err_msg_with_pos(pos),
|
||||
_ => self.err_msg_with_pos(self.err_pos),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::collections::HashSet;
|
||||
use std::fmt;
|
||||
|
||||
use array_tool::vec::{Intersect, Union};
|
||||
use indexmap::IndexMap;
|
||||
@ -25,7 +26,9 @@ trait Cmp {
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &Vec<&'a Value>, v2: &Vec<&'a Value>) -> Vec<&'a Value>;
|
||||
|
||||
fn default(&self) -> bool { false }
|
||||
fn default(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
struct CmpEq;
|
||||
@ -200,59 +203,63 @@ impl<'a> ExprTerm<'a> {
|
||||
fn is_string(&self) -> bool {
|
||||
match &self {
|
||||
ExprTerm::String(_) => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_number(&self) -> bool {
|
||||
match &self {
|
||||
ExprTerm::Number(_) => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_bool(&self) -> bool {
|
||||
match &self {
|
||||
ExprTerm::Bool(_) => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_json(&self) -> bool {
|
||||
match &self {
|
||||
ExprTerm::Json(_, _) => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn cmp<C1: Cmp, C2: Cmp>(&self, other: &Self, cmp_fn: &C1, reverse_cmp_fn: &C2) -> ExprTerm<'a> {
|
||||
fn cmp<C1: Cmp, C2: Cmp>(
|
||||
&self,
|
||||
other: &Self,
|
||||
cmp_fn: &C1,
|
||||
reverse_cmp_fn: &C2,
|
||||
) -> ExprTerm<'a> {
|
||||
match &self {
|
||||
ExprTerm::String(s1) => match &other {
|
||||
ExprTerm::String(s2) => ExprTerm::Bool(cmp_fn.cmp_string(s1, s2)),
|
||||
ExprTerm::Json(_, _) => {
|
||||
other.cmp(&self, reverse_cmp_fn, cmp_fn)
|
||||
}
|
||||
_ => ExprTerm::Bool(cmp_fn.default())
|
||||
}
|
||||
ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||
},
|
||||
ExprTerm::Number(n1) => match &other {
|
||||
ExprTerm::Number(n2) => ExprTerm::Bool(cmp_fn.cmp_f64(&to_f64(n1), &to_f64(n2))),
|
||||
ExprTerm::Json(_, _) => {
|
||||
other.cmp(&self, reverse_cmp_fn, cmp_fn)
|
||||
}
|
||||
_ => ExprTerm::Bool(cmp_fn.default())
|
||||
}
|
||||
ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||
},
|
||||
ExprTerm::Bool(b1) => match &other {
|
||||
ExprTerm::Bool(b2) => ExprTerm::Bool(cmp_fn.cmp_bool(b1, b2)),
|
||||
ExprTerm::Json(_, _) => {
|
||||
other.cmp(&self, reverse_cmp_fn, cmp_fn)
|
||||
}
|
||||
_ => ExprTerm::Bool(cmp_fn.default())
|
||||
}
|
||||
ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||
},
|
||||
ExprTerm::Json(fk1, vec1) if other.is_string() => {
|
||||
let s2 = if let ExprTerm::String(s2) = &other { s2 } else { unreachable!() };
|
||||
let s2 = if let ExprTerm::String(s2) = &other {
|
||||
s2
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
let ret: Vec<&Value> = vec1.iter().filter(|v1| {
|
||||
match v1 {
|
||||
let ret: Vec<&Value> = vec1
|
||||
.iter()
|
||||
.filter(|v1| match v1 {
|
||||
Value::String(s1) => cmp_fn.cmp_string(s1, s2),
|
||||
Value::Object(map1) => {
|
||||
if let Some(FilterKey::String(k)) = fk1 {
|
||||
@ -262,16 +269,26 @@ impl<'a> ExprTerm<'a> {
|
||||
}
|
||||
cmp_fn.default()
|
||||
}
|
||||
_ => cmp_fn.default()
|
||||
}
|
||||
}).map(|v| *v).collect();
|
||||
_ => cmp_fn.default(),
|
||||
})
|
||||
.map(|v| *v)
|
||||
.collect();
|
||||
|
||||
if ret.is_empty() { ExprTerm::Bool(cmp_fn.default()) } else { ExprTerm::Json(None, ret) }
|
||||
if ret.is_empty() {
|
||||
ExprTerm::Bool(cmp_fn.default())
|
||||
} else {
|
||||
ExprTerm::Json(None, ret)
|
||||
}
|
||||
}
|
||||
ExprTerm::Json(fk1, vec1) if other.is_number() => {
|
||||
let n2 = if let ExprTerm::Number(n2) = &other { n2 } else { unreachable!() };
|
||||
let ret: Vec<&Value> = vec1.iter().filter(|v1| {
|
||||
match v1 {
|
||||
let n2 = if let ExprTerm::Number(n2) = &other {
|
||||
n2
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
let ret: Vec<&Value> = vec1
|
||||
.iter()
|
||||
.filter(|v1| match v1 {
|
||||
Value::Number(n1) => cmp_fn.cmp_f64(&to_f64(n1), &to_f64(n2)),
|
||||
Value::Object(map1) => {
|
||||
if let Some(FilterKey::String(k)) = fk1 {
|
||||
@ -281,16 +298,26 @@ impl<'a> ExprTerm<'a> {
|
||||
}
|
||||
cmp_fn.default()
|
||||
}
|
||||
_ => cmp_fn.default()
|
||||
}
|
||||
}).map(|v| *v).collect();
|
||||
_ => cmp_fn.default(),
|
||||
})
|
||||
.map(|v| *v)
|
||||
.collect();
|
||||
|
||||
if ret.is_empty() { ExprTerm::Bool(cmp_fn.default()) } else { ExprTerm::Json(None, ret) }
|
||||
if ret.is_empty() {
|
||||
ExprTerm::Bool(cmp_fn.default())
|
||||
} else {
|
||||
ExprTerm::Json(None, ret)
|
||||
}
|
||||
}
|
||||
ExprTerm::Json(fk1, vec1) if other.is_bool() => {
|
||||
let b2 = if let ExprTerm::Bool(b2) = &other { b2 } else { unreachable!() };
|
||||
let ret: Vec<&Value> = vec1.iter().filter(|v1| {
|
||||
match v1 {
|
||||
let b2 = if let ExprTerm::Bool(b2) = &other {
|
||||
b2
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
let ret: Vec<&Value> = vec1
|
||||
.iter()
|
||||
.filter(|v1| match v1 {
|
||||
Value::Bool(b1) => cmp_fn.cmp_bool(b1, b2),
|
||||
Value::Object(map1) => {
|
||||
if let Some(FilterKey::String(k)) = fk1 {
|
||||
@ -300,22 +327,29 @@ impl<'a> ExprTerm<'a> {
|
||||
}
|
||||
cmp_fn.default()
|
||||
}
|
||||
_ => cmp_fn.default()
|
||||
}
|
||||
}).map(|v| *v).collect();
|
||||
_ => cmp_fn.default(),
|
||||
})
|
||||
.map(|v| *v)
|
||||
.collect();
|
||||
|
||||
if ret.is_empty() { ExprTerm::Bool(cmp_fn.default()) } else { ExprTerm::Json(None, ret) }
|
||||
}
|
||||
ExprTerm::Json(_, vec1) if other.is_json() => {
|
||||
match &other {
|
||||
ExprTerm::Json(_, vec2) => {
|
||||
let vec = cmp_fn.cmp_json(vec1, vec2);
|
||||
if vec.is_empty() { ExprTerm::Bool(cmp_fn.default()) } else { ExprTerm::Json(None, vec) }
|
||||
}
|
||||
_ => unreachable!()
|
||||
if ret.is_empty() {
|
||||
ExprTerm::Bool(cmp_fn.default())
|
||||
} else {
|
||||
ExprTerm::Json(None, ret)
|
||||
}
|
||||
}
|
||||
_ => unreachable!()
|
||||
ExprTerm::Json(_, vec1) if other.is_json() => match &other {
|
||||
ExprTerm::Json(_, vec2) => {
|
||||
let vec = cmp_fn.cmp_json(vec1, vec2);
|
||||
if vec.is_empty() {
|
||||
ExprTerm::Bool(cmp_fn.default())
|
||||
} else {
|
||||
ExprTerm::Json(None, vec)
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
},
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -399,30 +433,31 @@ impl<'a> Into<ExprTerm<'a>> for &Vec<&'a Value> {
|
||||
}
|
||||
}
|
||||
|
||||
fn walk_all_with_str<'a>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>, key: &str, is_filter: bool) {
|
||||
fn walk_all_with_str<'a>(
|
||||
vec: &Vec<&'a Value>,
|
||||
tmp: &mut Vec<&'a Value>,
|
||||
key: &str,
|
||||
is_filter: bool,
|
||||
) {
|
||||
if is_filter {
|
||||
walk(vec, tmp, &|v| match v {
|
||||
Value::Object(map) if map.contains_key(key) => {
|
||||
Some(vec![v])
|
||||
}
|
||||
_ => None
|
||||
Value::Object(map) if map.contains_key(key) => Some(vec![v]),
|
||||
_ => None,
|
||||
});
|
||||
} else {
|
||||
walk(vec, tmp, &|v| match v {
|
||||
Value::Object(map) => match map.get(key) {
|
||||
Some(v) => Some(vec![v]),
|
||||
_ => None
|
||||
}
|
||||
_ => None
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn walk_all<'a>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>) {
|
||||
walk(vec, tmp, &|v| match v {
|
||||
Value::Array(vec) => {
|
||||
Some(vec.iter().collect())
|
||||
}
|
||||
Value::Array(vec) => Some(vec.iter().collect()),
|
||||
Value::Object(map) => {
|
||||
let mut tmp = Vec::new();
|
||||
for (_, v) in map {
|
||||
@ -430,15 +465,17 @@ fn walk_all<'a>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>) {
|
||||
}
|
||||
Some(tmp)
|
||||
}
|
||||
_ => None
|
||||
_ => None,
|
||||
});
|
||||
}
|
||||
|
||||
fn walk<'a, F>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>, fun: &F)
|
||||
where F: Fn(&Value) -> Option<Vec<&Value>>
|
||||
where
|
||||
F: Fn(&Value) -> Option<Vec<&Value>>,
|
||||
{
|
||||
fn _walk<'a, F>(v: &'a Value, tmp: &mut Vec<&'a Value>, fun: &F)
|
||||
where F: Fn(&Value) -> Option<Vec<&Value>>
|
||||
where
|
||||
F: Fn(&Value) -> Option<Vec<&Value>>,
|
||||
{
|
||||
if let Some(mut ret) = fun(v) {
|
||||
tmp.append(&mut ret);
|
||||
@ -478,7 +515,6 @@ enum FilterKey {
|
||||
All,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum JsonPathError {
|
||||
EmptyPath,
|
||||
EmptyValue,
|
||||
@ -486,6 +522,23 @@ pub enum JsonPathError {
|
||||
Serde(String),
|
||||
}
|
||||
|
||||
impl fmt::Debug for JsonPathError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for JsonPathError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
JsonPathError::EmptyPath => f.write_str("path not set"),
|
||||
JsonPathError::EmptyValue => f.write_str("json value not set"),
|
||||
JsonPathError::Path(msg) => f.write_str(&format!("path error: \n{}\n", msg)),
|
||||
JsonPathError::Serde(msg) => f.write_str(&format!("serde error: \n{}\n", msg)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Selector<'a, 'b> {
|
||||
node: Option<Node>,
|
||||
@ -575,12 +628,12 @@ impl<'a, 'b> Selector<'a, 'b> {
|
||||
for v in vec {
|
||||
match T::deserialize(*v) {
|
||||
Ok(v) => ret.push(v),
|
||||
Err(e) => return Err(JsonPathError::Serde(e.to_string()))
|
||||
Err(e) => return Err(JsonPathError::Serde(e.to_string())),
|
||||
}
|
||||
}
|
||||
Ok(ret)
|
||||
}
|
||||
_ => Err(JsonPathError::EmptyValue)
|
||||
_ => Err(JsonPathError::EmptyValue),
|
||||
}
|
||||
}
|
||||
|
||||
@ -589,10 +642,9 @@ impl<'a, 'b> Selector<'a, 'b> {
|
||||
|
||||
match &self.current {
|
||||
Some(r) => {
|
||||
Ok(serde_json::to_string(r)
|
||||
.map_err(|e| JsonPathError::Serde(e.to_string()))?)
|
||||
Ok(serde_json::to_string(r).map_err(|e| JsonPathError::Serde(e.to_string()))?)
|
||||
}
|
||||
_ => Err(JsonPathError::EmptyValue)
|
||||
_ => Err(JsonPathError::EmptyValue),
|
||||
}
|
||||
}
|
||||
|
||||
@ -601,7 +653,7 @@ impl<'a, 'b> Selector<'a, 'b> {
|
||||
|
||||
match &self.current {
|
||||
Some(r) => Ok(r.to_vec()),
|
||||
_ => Err(JsonPathError::EmptyValue)
|
||||
_ => Err(JsonPathError::EmptyValue),
|
||||
}
|
||||
}
|
||||
|
||||
@ -622,7 +674,7 @@ impl<'a, 'b> Selector<'a, 'b> {
|
||||
let filter_key = fun(&vec, &mut tmp);
|
||||
self.terms.push(Some(ExprTerm::Json(Some(filter_key), tmp)));
|
||||
}
|
||||
_ => unreachable!()
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
@ -649,17 +701,26 @@ impl<'a, 'b> Selector<'a, 'b> {
|
||||
}
|
||||
|
||||
fn next_in_filter_with_str(&mut self, key: &str) {
|
||||
fn _collect<'a>(v: &'a Value, tmp: &mut Vec<&'a Value>, key: &str, visited: &mut HashSet<*const Value>) {
|
||||
fn _collect<'a>(
|
||||
v: &'a Value,
|
||||
tmp: &mut Vec<&'a Value>,
|
||||
key: &str,
|
||||
visited: &mut HashSet<*const Value>,
|
||||
) {
|
||||
match v {
|
||||
Value::Object(map) => if map.contains_key(key) {
|
||||
let ptr = v as *const Value;
|
||||
if !visited.contains(&ptr) {
|
||||
visited.insert(ptr);
|
||||
tmp.push(v)
|
||||
Value::Object(map) => {
|
||||
if map.contains_key(key) {
|
||||
let ptr = v as *const Value;
|
||||
if !visited.contains(&ptr) {
|
||||
visited.insert(ptr);
|
||||
tmp.push(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Array(vec) => {
|
||||
for v in vec {
|
||||
_collect(v, tmp, key, visited);
|
||||
}
|
||||
},
|
||||
Value::Array(vec) => for v in vec {
|
||||
_collect(v, tmp, key, visited);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -690,23 +751,35 @@ impl<'a, 'b> Selector<'a, 'b> {
|
||||
self.current = Some(tmp);
|
||||
}
|
||||
|
||||
debug!("next_from_current_with_num : {:?}, {:?}", &index, self.current);
|
||||
debug!(
|
||||
"next_from_current_with_num : {:?}, {:?}",
|
||||
&index, self.current
|
||||
);
|
||||
}
|
||||
|
||||
fn next_from_current_with_str(&mut self, key: &str) {
|
||||
fn _collect<'a>(v: &'a Value, tmp: &mut Vec<&'a Value>, key: &str, visited: &mut HashSet<*const Value>) {
|
||||
fn next_from_current_with_str(&mut self, keys: &Vec<String>) {
|
||||
fn _collect<'a>(
|
||||
v: &'a Value,
|
||||
tmp: &mut Vec<&'a Value>,
|
||||
keys: &Vec<String>,
|
||||
visited: &mut HashSet<*const Value>,
|
||||
) {
|
||||
match v {
|
||||
Value::Object(map) => {
|
||||
if let Some(v) = map.get(key) {
|
||||
let ptr = v as *const Value;
|
||||
if !visited.contains(&ptr) {
|
||||
visited.insert(ptr);
|
||||
tmp.push(v)
|
||||
for key in keys {
|
||||
if let Some(v) = map.get(key) {
|
||||
let ptr = v as *const Value;
|
||||
if !visited.contains(&ptr) {
|
||||
visited.insert(ptr);
|
||||
tmp.push(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Array(vec) => for v in vec {
|
||||
_collect(v, tmp, key, visited);
|
||||
Value::Array(vec) => {
|
||||
for v in vec {
|
||||
_collect(v, tmp, keys, visited);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -716,12 +789,15 @@ impl<'a, 'b> Selector<'a, 'b> {
|
||||
let mut tmp = Vec::new();
|
||||
let mut visited = HashSet::new();
|
||||
for c in current {
|
||||
_collect(c, &mut tmp, key, &mut visited);
|
||||
_collect(c, &mut tmp, keys, &mut visited);
|
||||
}
|
||||
self.current = Some(tmp);
|
||||
}
|
||||
|
||||
debug!("next_from_current_with_str : {}, {:?}", key, self.current);
|
||||
debug!(
|
||||
"next_from_current_with_str : {:?}, {:?}",
|
||||
keys, self.current
|
||||
);
|
||||
}
|
||||
|
||||
fn next_all_from_current(&mut self) {
|
||||
@ -732,8 +808,10 @@ impl<'a, 'b> Selector<'a, 'b> {
|
||||
tmp.push(v)
|
||||
}
|
||||
}
|
||||
Value::Array(vec) => for v in vec {
|
||||
_collect(v, tmp);
|
||||
Value::Array(vec) => {
|
||||
for v in vec {
|
||||
_collect(v, tmp);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -838,7 +916,7 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
||||
self.next_from_current_with_num(to_f64(&n));
|
||||
}
|
||||
ExprTerm::String(key) => {
|
||||
self.next_from_current_with_str(&key);
|
||||
self.next_from_current_with_str(&vec![key]);
|
||||
}
|
||||
ExprTerm::Json(_, v) => {
|
||||
if v.is_empty() {
|
||||
@ -856,19 +934,17 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
||||
|
||||
self.tokens.pop();
|
||||
}
|
||||
ParseToken::All => {
|
||||
match self.tokens.last() {
|
||||
Some(ParseToken::Leaves) => {
|
||||
self.tokens.pop();
|
||||
self.all_from_current();
|
||||
}
|
||||
Some(ParseToken::In) => {
|
||||
self.tokens.pop();
|
||||
self.next_all_from_current();
|
||||
}
|
||||
_ => {}
|
||||
ParseToken::All => match self.tokens.last() {
|
||||
Some(ParseToken::Leaves) => {
|
||||
self.tokens.pop();
|
||||
self.all_from_current();
|
||||
}
|
||||
}
|
||||
Some(ParseToken::In) => {
|
||||
self.tokens.pop();
|
||||
self.next_all_from_current();
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
ParseToken::Bool(b) => {
|
||||
self.terms.push(Some(ExprTerm::Bool(*b)));
|
||||
}
|
||||
@ -882,11 +958,9 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
||||
Some(t) => {
|
||||
if self.terms.is_empty() {
|
||||
match t {
|
||||
ParseToken::Leaves => {
|
||||
self.all_from_current_with_str(key.as_str())
|
||||
}
|
||||
ParseToken::Leaves => self.all_from_current_with_str(key.as_str()),
|
||||
ParseToken::In => {
|
||||
self.next_from_current_with_str(key.as_str())
|
||||
self.next_from_current_with_str(&vec![key.clone()])
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -905,8 +979,20 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
ParseToken::Keys(keys) => {
|
||||
if !self.terms.is_empty() {
|
||||
unimplemented!("keys in filter");
|
||||
}
|
||||
|
||||
if let Some(ParseToken::Array) = self.tokens.pop() {
|
||||
self.next_from_current_with_str(keys);
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
ParseToken::Number(v) => {
|
||||
self.terms.push(Some(ExprTerm::Number(Number::from_f64(*v).unwrap())));
|
||||
self.terms
|
||||
.push(Some(ExprTerm::Number(Number::from_f64(*v).unwrap())));
|
||||
}
|
||||
ParseToken::Filter(ref ft) => {
|
||||
if let Some(Some(ref right)) = self.terms.pop() {
|
||||
@ -933,7 +1019,7 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
ParseToken::Range(from, to) => {
|
||||
ParseToken::Range(from, to, step) => {
|
||||
if !self.terms.is_empty() {
|
||||
unimplemented!("range syntax in filter");
|
||||
}
|
||||
@ -955,7 +1041,10 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
||||
vec.len()
|
||||
};
|
||||
|
||||
for i in from..to {
|
||||
for i in (from..to).step_by(match step {
|
||||
Some(step) => *step,
|
||||
_ => 1,
|
||||
}) {
|
||||
if let Some(v) = vec.get(i) {
|
||||
tmp.push(v);
|
||||
}
|
||||
@ -1049,7 +1138,10 @@ fn replace_value<F: FnMut(&Value) -> Value>(tokens: Vec<String>, value: &mut Val
|
||||
|
||||
impl SelectorMut {
|
||||
pub fn new() -> Self {
|
||||
SelectorMut { path: None, value: None }
|
||||
SelectorMut {
|
||||
path: None,
|
||||
value: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn str_path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> {
|
||||
@ -1067,7 +1159,12 @@ impl SelectorMut {
|
||||
}
|
||||
|
||||
fn compute_paths(&self, mut result: Vec<&Value>) -> Vec<Vec<String>> {
|
||||
fn _walk(origin: &Value, target: &mut Vec<&Value>, tokens: &mut Vec<String>, visited: &mut IndexMap<*const Value, Vec<String>>) -> bool {
|
||||
fn _walk(
|
||||
origin: &Value,
|
||||
target: &mut Vec<&Value>,
|
||||
tokens: &mut Vec<String>,
|
||||
visited: &mut IndexMap<*const Value, Vec<String>>,
|
||||
) -> bool {
|
||||
trace!("{:?}, {:?}", target, tokens);
|
||||
|
||||
if target.is_empty() {
|
||||
@ -1084,19 +1181,23 @@ impl SelectorMut {
|
||||
});
|
||||
|
||||
match origin {
|
||||
Value::Array(vec) => for (i, v) in vec.iter().enumerate() {
|
||||
tokens.push(i.to_string());
|
||||
if _walk(v, target, tokens, visited) {
|
||||
return true;
|
||||
Value::Array(vec) => {
|
||||
for (i, v) in vec.iter().enumerate() {
|
||||
tokens.push(i.to_string());
|
||||
if _walk(v, target, tokens, visited) {
|
||||
return true;
|
||||
}
|
||||
tokens.pop();
|
||||
}
|
||||
tokens.pop();
|
||||
},
|
||||
Value::Object(map) => for (k, v) in map {
|
||||
tokens.push(k.clone());
|
||||
if _walk(v, target, tokens, visited) {
|
||||
return true;
|
||||
}
|
||||
Value::Object(map) => {
|
||||
for (k, v) in map {
|
||||
tokens.push(k.clone());
|
||||
if _walk(v, target, tokens, visited) {
|
||||
return true;
|
||||
}
|
||||
tokens.pop();
|
||||
}
|
||||
tokens.pop();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -1133,7 +1234,10 @@ impl SelectorMut {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn replace_with<F: FnMut(&Value) -> Value>(&mut self, fun: &mut F) -> Result<&mut Self, JsonPathError> {
|
||||
pub fn replace_with<F: FnMut(&Value) -> Value>(
|
||||
&mut self,
|
||||
fun: &mut F,
|
||||
) -> Result<&mut Self, JsonPathError> {
|
||||
let paths = {
|
||||
let result = self.select()?;
|
||||
self.compute_paths(result)
|
||||
@ -1147,4 +1251,4 @@ impl SelectorMut {
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -31,11 +31,22 @@ pub fn read_contents(path: &str) -> String {
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) {
|
||||
let mut s = Selector::new();
|
||||
let _ = s.str_path(path);
|
||||
let _ = s.value(&json);
|
||||
let result = serde_json::to_value(s.select().unwrap()).unwrap();
|
||||
assert_eq!(result, target, "{}", path);
|
||||
let mut selector = Selector::new();
|
||||
let result = selector
|
||||
.str_path(path)
|
||||
.unwrap()
|
||||
.value(&json)
|
||||
.select_as::<Value>()
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
result,
|
||||
match target {
|
||||
Value::Array(vec) => vec.clone(),
|
||||
_ => panic!("Give me the Array!"),
|
||||
},
|
||||
"{}",
|
||||
path
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
771
tests/filter.rs
771
tests/filter.rs
@ -11,142 +11,226 @@ mod common;
|
||||
fn array() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.school.friends[1, 2]", read_json("./benches/data_obj.json"), json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]));
|
||||
select_and_then_compare(
|
||||
"$.school.friends[1, 2]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare("$.school.friends[1: ]", read_json("./benches/data_obj.json"), json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]));
|
||||
select_and_then_compare(
|
||||
"$.school.friends[1: ]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare("$.school.friends[:-2]", read_json("./benches/data_obj.json"), json!([
|
||||
{"id": 0, "name": "Millicent Norman"}
|
||||
]));
|
||||
select_and_then_compare(
|
||||
"$.school.friends[:-2]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{"id": 0, "name": "Millicent Norman"}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare("$..friends[2].name", read_json("./benches/data_obj.json"), json!([
|
||||
"Gray Berry", "Gray Berry"
|
||||
]));
|
||||
select_and_then_compare(
|
||||
"$..friends[2].name",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!(["Gray Berry", "Gray Berry"]),
|
||||
);
|
||||
|
||||
select_and_then_compare("$..friends[*].name", read_json("./benches/data_obj.json"), json!([
|
||||
"Vincent Cannon","Gray Berry","Millicent Norman","Vincent Cannon","Gray Berry"
|
||||
]));
|
||||
select_and_then_compare(
|
||||
"$..friends[*].name",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
"Vincent Cannon",
|
||||
"Gray Berry",
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare("$['school']['friends'][*].['name']", read_json("./benches/data_obj.json"), json!([
|
||||
"Millicent Norman","Vincent Cannon","Gray Berry"
|
||||
]));
|
||||
select_and_then_compare(
|
||||
"$['school']['friends'][*].['name']",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!(["Millicent Norman", "Vincent Cannon", "Gray Berry"]),
|
||||
);
|
||||
|
||||
select_and_then_compare("$['school']['friends'][0].['name']", read_json("./benches/data_obj.json"), json!([
|
||||
"Millicent Norman"
|
||||
]));
|
||||
select_and_then_compare(
|
||||
"$['school']['friends'][0].['name']",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!(["Millicent Norman"]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.["eyeColor", "name"]"#,
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!(["blue", "Leonor Herman"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.school", read_json("./benches/data_obj.json"), json!([{
|
||||
"friends": [
|
||||
select_and_then_compare(
|
||||
"$.school",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(@.friends[0])]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(@.friends[10])]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(1==1)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[?(1==1)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([[
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]));
|
||||
|
||||
select_and_then_compare("$.school[?(@.friends[0])]", read_json("./benches/data_obj.json"), json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]));
|
||||
|
||||
select_and_then_compare("$.school[?(@.friends[10])]", read_json("./benches/data_obj.json"), json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]));
|
||||
|
||||
select_and_then_compare("$.school[?(1==1)]", read_json("./benches/data_obj.json"), json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]));
|
||||
|
||||
select_and_then_compare("$.school.friends[?(1==1)]", read_json("./benches/data_obj.json"), json!([[
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]]));
|
||||
]]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_default() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.school[?(@.friends == @.friends)]", read_json("./benches/data_obj.json"), json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]));
|
||||
|
||||
select_and_then_compare("$.friends[?(@.name)]", read_json("./benches/data_obj.json"), json!([
|
||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]));
|
||||
|
||||
select_and_then_compare("$.friends[?(@.id >= 2)]", read_json("./benches/data_obj.json"), json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]));
|
||||
|
||||
select_and_then_compare("$.friends[?(@.id >= 2 || @.id == 1)]", read_json("./benches/data_obj.json"), json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" },
|
||||
{ "id" : 1, "name" : "Vincent Cannon" }
|
||||
]));
|
||||
|
||||
select_and_then_compare("$.friends[?( (@.id >= 2 || @.id == 1) && @.id == 0)]", read_json("./benches/data_obj.json"), json!([
|
||||
Value::Null
|
||||
]));
|
||||
|
||||
select_and_then_compare("$..friends[?(@.id == $.index)].id", read_json("./benches/data_obj.json"), json!([
|
||||
0, 0
|
||||
]));
|
||||
|
||||
select_and_then_compare("$..book[?($.store.bicycle.price < @.price)].price", read_json("./benches/example.json"), json!([
|
||||
22.99
|
||||
]));
|
||||
|
||||
select_and_then_compare("$..book[?( (@.price == 12.99 || @.category == 'reference') && @.price > 10)].price", read_json("./benches/example.json"), json!([
|
||||
12.99
|
||||
]));
|
||||
|
||||
select_and_then_compare("$..[?(@.age > 40)]", json!([
|
||||
{ "name": "이름1", "age": 40, "phone": "+33 12341234" },
|
||||
{ "name": "이름2", "age": 42, "phone": "++44 12341234" }
|
||||
]), json!([
|
||||
{ "name" : "이름2", "age" : 42, "phone" : "++44 12341234" }
|
||||
]));
|
||||
|
||||
select_and_then_compare("$..[?(@.age >= 30)]", json!({
|
||||
"school": {
|
||||
select_and_then_compare(
|
||||
"$.school[?(@.friends == @.friends)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}), json!([
|
||||
{ "name" : "친구3", "age" : 30 }
|
||||
]));
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?(@.name)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?(@.id >= 2)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?(@.id >= 2 || @.id == 1)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" },
|
||||
{ "id" : 1, "name" : "Vincent Cannon" }
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?( (@.id >= 2 || @.id == 1) && @.id == 0)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..friends[?(@.id == $.index)].id",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([0, 0]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..book[?($.store.bicycle.price < @.price)].price",
|
||||
read_json("./benches/example.json"),
|
||||
json!([22.99]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..book[?( (@.price == 12.99 || @.category == 'reference') && @.price > 10)].price",
|
||||
read_json("./benches/example.json"),
|
||||
json!([12.99]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.age > 40)]",
|
||||
json!([
|
||||
{ "name": "이름1", "age": 40, "phone": "+33 12341234" },
|
||||
{ "name": "이름2", "age": 42, "phone": "++44 12341234" }
|
||||
]),
|
||||
json!([
|
||||
{ "name" : "이름2", "age" : 42, "phone" : "++44 12341234" }
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.age >= 30)]",
|
||||
json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}),
|
||||
json!([
|
||||
{ "name" : "친구3", "age" : 30 }
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -165,188 +249,357 @@ fn op_number() {
|
||||
fn op_string() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(r#"$.[?(@.a == "b")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]));
|
||||
select_and_then_compare(r#"$.[?(@.a != "c")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]));
|
||||
select_and_then_compare(r#"$.[?(@.a < "b")]"#, json!({ "a": "b" }), json!([Value::Null]));
|
||||
select_and_then_compare(r#"$.[?(@.a <= "b")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]));
|
||||
select_and_then_compare(r#"$.[?(@.a > "b")]"#, json!({ "a": "b" }), json!([Value::Null]));
|
||||
select_and_then_compare(r#"$.[?(@.a >= "b")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]));
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a == "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([{ "a": "b" }]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a != "c")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([{ "a": "b" }]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a < "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a <= "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([{ "a": "b" }]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a > "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a >= "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([{ "a": "b" }]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_object() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(r#"$.[?(@.a == @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([{"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}]));
|
||||
select_and_then_compare(r#"$.[?(@.a != @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]));
|
||||
select_and_then_compare(r#"$.[?(@.a < @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]));
|
||||
select_and_then_compare(r#"$.[?(@.a <= @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]));
|
||||
select_and_then_compare(r#"$.[?(@.a > @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]));
|
||||
select_and_then_compare(r#"$.[?(@.a >= @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]));
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a == @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([{"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a != @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a < @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a <= @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a > @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a >= @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_complex() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(r#"$.[?(1 == @.a)]"#, json!({ "a": { "b": 1 } }), json!([Value::Null]));
|
||||
select_and_then_compare(r#"$.[?("1" != @.a)]"#, json!({ "a": { "b": 1 } }), json!([Value::Null]));
|
||||
select_and_then_compare(r#"$.[?(@.a <= 1)]"#, json!({ "a": { "b": 1 } }), json!([Value::Null]));
|
||||
select_and_then_compare(r#"$.[?(@.a > "1")]"#, json!({ "a": { "b": 1 } }), json!([Value::Null]));
|
||||
select_and_then_compare(
|
||||
r#"$.[?(1 == @.a)]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?("1" != @.a)]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a <= 1)]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a > "1")]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn example() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(r#"$.store.book[*].author"#, read_json("./benches/example.json"), json!([
|
||||
"Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"
|
||||
]));
|
||||
select_and_then_compare(
|
||||
r#"$.store.book[*].author"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$..author"#, read_json("./benches/example.json"), json!([
|
||||
"Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"
|
||||
]));
|
||||
select_and_then_compare(
|
||||
r#"$..author"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$.store.*"#, read_json("./benches/example.json"), json!([
|
||||
[
|
||||
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||
{"category" : "fiction", "author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
],
|
||||
{"color" : "red","price" : 19.95},
|
||||
]));
|
||||
select_and_then_compare(
|
||||
r#"$.store.*"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
[
|
||||
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||
{"category" : "fiction", "author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
],
|
||||
{"color" : "red","price" : 19.95},
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$.store..price"#, read_json("./benches/example.json"), json!([
|
||||
8.95, 12.99, 8.99, 22.99, 19.95
|
||||
]));
|
||||
select_and_then_compare(
|
||||
r#"$.store..price"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([8.95, 12.99, 8.99, 22.99, 19.95]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$..book[2]"#, read_json("./benches/example.json"), json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]));
|
||||
|
||||
select_and_then_compare(r#"$..book[-2]"#, read_json("./benches/example.json"), json!([
|
||||
{
|
||||
select_and_then_compare(
|
||||
r#"$..book[2]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]));
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$..book[0, 1]"#, read_json("./benches/example.json"), json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]));
|
||||
select_and_then_compare(
|
||||
r#"$..book[-2]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$..book[:2]"#, read_json("./benches/example.json"), json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]));
|
||||
select_and_then_compare(
|
||||
r#"$..book[0, 1]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$..book[2:]"#, read_json("./benches/example.json"), json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]));
|
||||
select_and_then_compare(
|
||||
r#"$..book[:2]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$..book[?(@.isbn)]"#, read_json("./benches/example.json"), json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]));
|
||||
select_and_then_compare(
|
||||
r#"$..book[2:]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$.store.book[?(@.price < 10)]"#, read_json("./benches/example.json"), json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]));
|
||||
select_and_then_compare(
|
||||
r#"$..book[?(@.isbn)]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(r#"$..*"#, read_json("./benches/example.json"),
|
||||
read_json("./benches/giveme_every_thing_result.json"));
|
||||
select_and_then_compare(
|
||||
r#"$.store.book[?(@.price < 10)]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..*"#,
|
||||
read_json("./benches/example.json"),
|
||||
read_json("./benches/giveme_every_thing_result.json"),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filer_same_obj() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(r#"$..[?(@.a == 1)]"#, json!({
|
||||
"a": 1,
|
||||
"b" : {"a": 1},
|
||||
"c" : {"a": 1}
|
||||
}), json!([
|
||||
{"a": 1},
|
||||
{"a": 1}
|
||||
]));
|
||||
}
|
||||
select_and_then_compare(
|
||||
r#"$..[?(@.a == 1)]"#,
|
||||
json!({
|
||||
"a": 1,
|
||||
"b" : {"a": 1},
|
||||
"c" : {"a": 1}
|
||||
}),
|
||||
json!([
|
||||
{"a": 1},
|
||||
{"a": 1}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[ : ]",
|
||||
json!(["first", "second"]),
|
||||
json!(["first", "second"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[::]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[::2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "third", "fifth"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[1::]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second", "third", "forth", "fifth"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[1:2:]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[1::2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second", "forth"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[0:3:1]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "second", "third"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[0:3:2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn quote() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$['single\'quote']"#,
|
||||
json!({"single'quote":"value"}),
|
||||
json!(["value"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$["double\"quote"]"#,
|
||||
json!({"double\"quote":"value"}),
|
||||
json!(["value"]),
|
||||
);
|
||||
}
|
||||
|
81
tests/lib.rs
81
tests/lib.rs
@ -18,17 +18,17 @@ fn compile() {
|
||||
let json_obj = read_json("./benches/data_obj.json");
|
||||
let json = template(&json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
|
||||
let json_obj = read_json("./benches/data_array.json");
|
||||
let json = template(&json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Rosetta Erickson"}
|
||||
]);
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Rosetta Erickson"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
}
|
||||
|
||||
@ -40,16 +40,16 @@ fn selector() {
|
||||
let mut reader = jsonpath::selector(&json_obj);
|
||||
let json = reader("$..friends[2]").unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
|
||||
let json = reader("$..friends[0]").unwrap();
|
||||
let ret = json!([
|
||||
{"id": 0},
|
||||
{"id": 0,"name": "Millicent Norman"}
|
||||
]);
|
||||
{"id": 0},
|
||||
{"id": 0,"name": "Millicent Norman"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
}
|
||||
|
||||
@ -65,17 +65,26 @@ fn selector_as() {
|
||||
let mut selector = jsonpath::selector_as::<Friend>(&json_obj);
|
||||
let json = selector("$..friends[2]").unwrap();
|
||||
|
||||
let ret = vec!(
|
||||
Friend { id: 2, name: Some("Gray Berry".to_string()) },
|
||||
Friend { id: 2, name: Some("Gray Berry".to_string()) },
|
||||
);
|
||||
let ret = vec![
|
||||
Friend {
|
||||
id: 2,
|
||||
name: Some("Gray Berry".to_string()),
|
||||
},
|
||||
Friend {
|
||||
id: 2,
|
||||
name: Some("Gray Berry".to_string()),
|
||||
},
|
||||
];
|
||||
assert_eq!(json, ret);
|
||||
|
||||
let json = selector("$..friends[0]").unwrap();
|
||||
let ret = vec!(
|
||||
let ret = vec![
|
||||
Friend { id: 0, name: None },
|
||||
Friend { id: 0, name: Some("Millicent Norman".to_string()) },
|
||||
);
|
||||
Friend {
|
||||
id: 0,
|
||||
name: Some("Millicent Norman".to_string()),
|
||||
},
|
||||
];
|
||||
assert_eq!(json, ret);
|
||||
}
|
||||
|
||||
@ -84,12 +93,12 @@ fn select() {
|
||||
let json_obj = read_json("./benches/example.json");
|
||||
let json = jsonpath::select(&json_obj, "$..book[2]").unwrap();
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
compare_result(json, ret);
|
||||
}
|
||||
|
||||
@ -98,12 +107,12 @@ fn select_str() {
|
||||
let json_str = read_contents("./benches/example.json");
|
||||
let result_str = jsonpath::select_as_str(&json_str, "$..book[2]").unwrap();
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
let json: Value = serde_json::from_str(&result_str).unwrap();
|
||||
assert_eq!(json, ret);
|
||||
}
|
||||
@ -117,7 +126,8 @@ fn test_to_struct() {
|
||||
phones: Vec<String>,
|
||||
}
|
||||
|
||||
let ret: Vec<Person> = jsonpath::select_as(r#"
|
||||
let ret: Vec<Person> = jsonpath::select_as(
|
||||
r#"
|
||||
{
|
||||
"person":
|
||||
{
|
||||
@ -129,7 +139,10 @@ fn test_to_struct() {
|
||||
]
|
||||
}
|
||||
}
|
||||
"#, "$.person").unwrap();
|
||||
"#,
|
||||
"$.person",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let person = Person {
|
||||
name: "Doe John".to_string(),
|
||||
@ -138,4 +151,4 @@ fn test_to_struct() {
|
||||
};
|
||||
|
||||
assert_eq!(vec![person], ret);
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ extern crate jsonpath_lib as jsonpath;
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{read_json, setup};
|
||||
use jsonpath::{SelectorMut, Selector};
|
||||
use jsonpath::{Selector, SelectorMut};
|
||||
use serde_json::Value;
|
||||
|
||||
mod common;
|
||||
@ -16,7 +16,8 @@ fn selector_mut() {
|
||||
|
||||
let mut nums = Vec::new();
|
||||
let result = selector_mut
|
||||
.str_path(r#"$.store..price"#).unwrap()
|
||||
.str_path(r#"$.store..price"#)
|
||||
.unwrap()
|
||||
.value(read_json("./benches/example.json"))
|
||||
.replace_with(&mut |v| {
|
||||
match v {
|
||||
@ -26,15 +27,32 @@ fn selector_mut() {
|
||||
_ => {}
|
||||
}
|
||||
Value::String("a".to_string())
|
||||
}).unwrap()
|
||||
.take().unwrap();
|
||||
})
|
||||
.unwrap()
|
||||
.take()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(nums, vec![8.95_f64, 12.99_f64, 8.99_f64, 22.99_f64, 19.95_f64]);
|
||||
assert_eq!(
|
||||
nums,
|
||||
vec![8.95_f64, 12.99_f64, 8.99_f64, 22.99_f64, 19.95_f64]
|
||||
);
|
||||
|
||||
let mut selector = Selector::new();
|
||||
let result = selector.str_path(r#"$.store..price"#).unwrap()
|
||||
let result = selector
|
||||
.str_path(r#"$.store..price"#)
|
||||
.unwrap()
|
||||
.value(&result)
|
||||
.select().unwrap();
|
||||
.select()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(vec![&json!("a"), &json!("a"), &json!("a"), &json!("a"), &json!("a")], result);
|
||||
}
|
||||
assert_eq!(
|
||||
vec![
|
||||
&json!("a"),
|
||||
&json!("a"),
|
||||
&json!("a"),
|
||||
&json!("a"),
|
||||
&json!("a")
|
||||
],
|
||||
result
|
||||
);
|
||||
}
|
||||
|
390
tests/readme.rs
390
tests/readme.rs
@ -52,83 +52,105 @@ fn readme() {
|
||||
|
||||
let mut selector = jsonpath::selector(&json_obj);
|
||||
|
||||
assert_eq!(selector("$.store.book[*].author").unwrap(),
|
||||
vec![
|
||||
"Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$.store.book[*].author").unwrap(),
|
||||
vec![
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$..author").unwrap(),
|
||||
vec![
|
||||
"Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$..author").unwrap(),
|
||||
vec![
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$.store.*").unwrap(),
|
||||
vec![
|
||||
&json!([
|
||||
{ "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95 },
|
||||
{ "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99 },
|
||||
{ "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99 },
|
||||
{ "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99 }
|
||||
]),
|
||||
&json!({ "color": "red", "price": 19.95 })
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$.store.*").unwrap(),
|
||||
vec![
|
||||
&json!([
|
||||
{ "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95 },
|
||||
{ "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99 },
|
||||
{ "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99 },
|
||||
{ "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99 }
|
||||
]),
|
||||
&json!({ "color": "red", "price": 19.95 })
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$.store..price").unwrap(),
|
||||
vec![
|
||||
8.95, 12.99, 8.99, 22.99, 19.95
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$.store..price").unwrap(),
|
||||
vec![8.95, 12.99, 8.99, 22.99, 19.95]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$..book[2]").unwrap(),
|
||||
vec![
|
||||
&json!({
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
})
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$..book[2]").unwrap(),
|
||||
vec![&json!({
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
})]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$..book[-2]").unwrap(),
|
||||
vec![
|
||||
&json!({
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
})
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$..book[-2]").unwrap(),
|
||||
vec![&json!({
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
})]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$..book[0,1]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$..book[0,1]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$..book[:2]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$..book[:2]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$..book[:2]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$..book[:2]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$..book[?(@.isbn)]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99}),
|
||||
&json!({"category" : "fiction","author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99})
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$..book[?(@.isbn)]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99}),
|
||||
&json!({"category" : "fiction","author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99})
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(selector("$.store.book[?(@.price < 10)]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
&json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99})
|
||||
]);
|
||||
assert_eq!(
|
||||
selector("$.store.book[?(@.price < 10)]").unwrap(),
|
||||
vec![
|
||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
&json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99})
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -154,9 +176,11 @@ fn readme_selector() {
|
||||
let mut selector = Selector::new();
|
||||
|
||||
let result = selector
|
||||
.str_path("$..[?(@.age >= 30)]").unwrap()
|
||||
.str_path("$..[?(@.age >= 30)]")
|
||||
.unwrap()
|
||||
.value(&json_obj)
|
||||
.select().unwrap();
|
||||
.select()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(vec![&json!({"name": "친구3", "age": 30})], result);
|
||||
|
||||
@ -164,7 +188,13 @@ fn readme_selector() {
|
||||
assert_eq!(r#"[{"name":"친구3","age":30}]"#, result);
|
||||
|
||||
let result = selector.select_as::<Friend>().unwrap();
|
||||
assert_eq!(vec![Friend { name: "친구3".to_string(), age: Some(30) }], result);
|
||||
assert_eq!(
|
||||
vec![Friend {
|
||||
name: "친구3".to_string(),
|
||||
age: Some(30)
|
||||
}],
|
||||
result
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -184,7 +214,8 @@ fn readme_selector_mut() {
|
||||
let mut selector_mut = SelectorMut::new();
|
||||
|
||||
let result = selector_mut
|
||||
.str_path("$..[?(@.age == 20)].age").unwrap()
|
||||
.str_path("$..[?(@.age == 20)].age")
|
||||
.unwrap()
|
||||
.value(json_obj)
|
||||
.replace_with(&mut |v| {
|
||||
let age = if let Value::Number(n) = v {
|
||||
@ -194,20 +225,25 @@ fn readme_selector_mut() {
|
||||
};
|
||||
|
||||
json!(age)
|
||||
}).unwrap()
|
||||
.take().unwrap();
|
||||
})
|
||||
.unwrap()
|
||||
.take()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(result, json!({
|
||||
"school": {
|
||||
assert_eq!(
|
||||
result,
|
||||
json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 40},
|
||||
{"name": "친구2", "age": 40}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 40},
|
||||
{"name": "친구2", "age": 40}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}));
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -226,15 +262,19 @@ fn readme_select() {
|
||||
|
||||
let json = jsonpath::select(&json_obj, "$..friends[0]").unwrap();
|
||||
|
||||
assert_eq!(json, vec![
|
||||
&json!({"name": "친구3", "age": 30}),
|
||||
&json!({"name": "친구1", "age": 20})
|
||||
]);
|
||||
assert_eq!(
|
||||
json,
|
||||
vec![
|
||||
&json!({"name": "친구3", "age": 30}),
|
||||
&json!({"name": "친구1", "age": 20})
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn readme_select_as_str() {
|
||||
let ret = jsonpath::select_as_str(r#"
|
||||
let ret = jsonpath::select_as_str(
|
||||
r#"
|
||||
{
|
||||
"school": {
|
||||
"friends": [
|
||||
@ -247,9 +287,15 @@ fn readme_select_as_str() {
|
||||
{"name": "친구4"}
|
||||
]
|
||||
}
|
||||
"#, "$..friends[0]").unwrap();
|
||||
"#,
|
||||
"$..friends[0]",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(ret, r#"[{"name":"친구3","age":30},{"name":"친구1","age":20}]"#);
|
||||
assert_eq!(
|
||||
ret,
|
||||
r#"[{"name":"친구3","age":30},{"name":"친구1","age":20}]"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -261,19 +307,21 @@ fn readme_select_as() {
|
||||
phones: Vec<String>,
|
||||
}
|
||||
|
||||
let ret: Vec<Person> = jsonpath::select_as(r#"
|
||||
{
|
||||
"person":
|
||||
{
|
||||
"name": "Doe John",
|
||||
"age": 44,
|
||||
"phones": [
|
||||
"+44 1234567",
|
||||
"+44 2345678"
|
||||
]
|
||||
}
|
||||
}
|
||||
"#, "$.person").unwrap();
|
||||
let ret: Vec<Person> = jsonpath::select_as(
|
||||
r#"{
|
||||
"person":
|
||||
{
|
||||
"name": "Doe John",
|
||||
"age": 44,
|
||||
"phones": [
|
||||
"+44 1234567",
|
||||
"+44 2345678"
|
||||
]
|
||||
}
|
||||
}"#,
|
||||
"$.person",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let person = Person {
|
||||
name: "Doe John".to_string(),
|
||||
@ -302,10 +350,13 @@ fn readme_compile() {
|
||||
|
||||
let json = first_firend(&json_obj).unwrap();
|
||||
|
||||
assert_eq!(json, vec![
|
||||
&json!({"name": "친구3", "age": 30}),
|
||||
&json!({"name": "친구1", "age": 20})
|
||||
]);
|
||||
assert_eq!(
|
||||
json,
|
||||
vec![
|
||||
&json!({"name": "친구3", "age": 30}),
|
||||
&json!({"name": "친구1", "age": 20})
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -326,17 +377,23 @@ fn readme_selector_fn() {
|
||||
|
||||
let json = selector("$..friends[0]").unwrap();
|
||||
|
||||
assert_eq!(json, vec![
|
||||
&json!({"name": "친구3", "age": 30}),
|
||||
&json!({"name": "친구1", "age": 20})
|
||||
]);
|
||||
assert_eq!(
|
||||
json,
|
||||
vec![
|
||||
&json!({"name": "친구3", "age": 30}),
|
||||
&json!({"name": "친구1", "age": 20})
|
||||
]
|
||||
);
|
||||
|
||||
let json = selector("$..friends[1]").unwrap();
|
||||
|
||||
assert_eq!(json, vec![
|
||||
&json!({"name": "친구4"}),
|
||||
&json!({"name": "친구2", "age": 20})
|
||||
]);
|
||||
assert_eq!(
|
||||
json,
|
||||
vec![
|
||||
&json!({"name": "친구4"}),
|
||||
&json!({"name": "친구2", "age": 20})
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -363,23 +420,34 @@ fn readme_selector_as() {
|
||||
|
||||
let json = selector("$..friends[0]").unwrap();
|
||||
|
||||
let ret = vec!(
|
||||
Friend { name: "친구3".to_string(), age: Some(30) },
|
||||
Friend { name: "친구1".to_string(), age: Some(20) }
|
||||
);
|
||||
let ret = vec![
|
||||
Friend {
|
||||
name: "친구3".to_string(),
|
||||
age: Some(30),
|
||||
},
|
||||
Friend {
|
||||
name: "친구1".to_string(),
|
||||
age: Some(20),
|
||||
},
|
||||
];
|
||||
assert_eq!(json, ret);
|
||||
|
||||
let json = selector("$..friends[1]").unwrap();
|
||||
|
||||
let ret = vec!(
|
||||
Friend { name: "친구4".to_string(), age: None },
|
||||
Friend { name: "친구2".to_string(), age: Some(20) }
|
||||
);
|
||||
let ret = vec![
|
||||
Friend {
|
||||
name: "친구4".to_string(),
|
||||
age: None,
|
||||
},
|
||||
Friend {
|
||||
name: "친구2".to_string(),
|
||||
age: Some(20),
|
||||
},
|
||||
];
|
||||
|
||||
assert_eq!(json, ret);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn readme_delete() {
|
||||
let json_obj = json!({
|
||||
@ -396,17 +464,20 @@ fn readme_delete() {
|
||||
|
||||
let ret = jsonpath::delete(json_obj, "$..[?(20 == @.age)]").unwrap();
|
||||
|
||||
assert_eq!(ret, json!({
|
||||
"school": {
|
||||
assert_eq!(
|
||||
ret,
|
||||
json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}));
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -417,16 +488,19 @@ fn readme_delete2() {
|
||||
|
||||
println!("{:?}", ret);
|
||||
|
||||
assert_eq!(ret, json!({
|
||||
"store": {
|
||||
"book": null,
|
||||
"bicycle": {
|
||||
"color": "red",
|
||||
"price": 19.95
|
||||
}
|
||||
},
|
||||
"expensive": 10
|
||||
}));
|
||||
assert_eq!(
|
||||
ret,
|
||||
json!({
|
||||
"store": {
|
||||
"book": null,
|
||||
"bicycle": {
|
||||
"color": "red",
|
||||
"price": 19.95
|
||||
}
|
||||
},
|
||||
"expensive": 10
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -451,17 +525,21 @@ fn readme_replace_with() {
|
||||
};
|
||||
|
||||
json!(age)
|
||||
}).unwrap();
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(result, json!({
|
||||
"school": {
|
||||
assert_eq!(
|
||||
result,
|
||||
json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 40},
|
||||
{"name": "친구2", "age": 40}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 40},
|
||||
{"name": "친구2", "age": 40}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}));
|
||||
}
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]})
|
||||
);
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "jsonpath-wasm"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
description = "It is Webassembly version of jsonpath_lib that is JsonPath engine written in Rust - Demo: https://freestrings.github.io/jsonpath"
|
||||
keywords = ["jsonpath", "json", "webassembly", "parsing", "rust"]
|
||||
|
143
wasm/src/lib.rs
143
wasm/src/lib.rs
@ -5,9 +5,9 @@ extern crate serde_json;
|
||||
extern crate wasm_bindgen;
|
||||
|
||||
use cfg_if::cfg_if;
|
||||
use jsonpath::{JsonPathError, Parser};
|
||||
use jsonpath::Selector as _Selector;
|
||||
use jsonpath::SelectorMut as _SelectorMut;
|
||||
use jsonpath::{JsonPathError, Parser};
|
||||
use serde_json::Value;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
@ -40,40 +40,37 @@ macro_rules! console_error {
|
||||
}
|
||||
|
||||
fn into_serde_json<D>(js_value: &JsValue) -> Result<D, String>
|
||||
where D: for<'a> serde::de::Deserialize<'a>
|
||||
where
|
||||
D: for<'a> serde::de::Deserialize<'a>,
|
||||
{
|
||||
if js_value.is_string() {
|
||||
match serde_json::from_str(js_value.as_string().unwrap().as_str()) {
|
||||
Ok(json) => Ok(json),
|
||||
Err(e) => Err(e.to_string())
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
} else {
|
||||
match js_value.into_serde() {
|
||||
Ok(json) => Ok(json),
|
||||
Err(e) => Err(e.to_string())
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn replace_fun(v: &Value, fun: &js_sys::Function) -> Value {
|
||||
match JsValue::from_serde(v) {
|
||||
Ok(js_v) => {
|
||||
match fun.call1(&JsValue::NULL, &js_v) {
|
||||
Ok(result) => {
|
||||
match into_serde_json(&result) {
|
||||
Ok(json) => json,
|
||||
Err(e) => {
|
||||
console_error!("replace_with - closure returned a invalid JSON: {:?}", e);
|
||||
Value::Null
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(js_v) => match fun.call1(&JsValue::NULL, &js_v) {
|
||||
Ok(result) => match into_serde_json(&result) {
|
||||
Ok(json) => json,
|
||||
Err(e) => {
|
||||
console_error!("replace_with - fail to call closure: {:?}", e);
|
||||
console_error!("replace_with - closure returned a invalid JSON: {:?}", e);
|
||||
Value::Null
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
console_error!("replace_with - fail to call closure: {:?}", e);
|
||||
Value::Null
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
console_error!("replace_with - invalid JSON object: {:?}", e);
|
||||
Value::Null
|
||||
@ -88,22 +85,22 @@ pub fn compile(path: &str) -> JsValue {
|
||||
let cb = Closure::wrap(Box::new(move |js_value: JsValue| {
|
||||
let json = match into_serde_json(&js_value) {
|
||||
Ok(json) => json,
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e)))
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e))),
|
||||
};
|
||||
|
||||
let mut selector = _Selector::new();
|
||||
|
||||
match &node {
|
||||
Ok(node) => selector.compiled_path(node),
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Path(e.clone())))
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Path(e.clone()))),
|
||||
};
|
||||
|
||||
match selector.value(&json).select() {
|
||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||
Ok(ret) => ret,
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||
},
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||
}
|
||||
}) as Box<Fn(JsValue) -> JsValue>);
|
||||
|
||||
@ -116,25 +113,27 @@ pub fn compile(path: &str) -> JsValue {
|
||||
pub fn selector(js_value: JsValue) -> JsValue {
|
||||
let json: Value = match JsValue::into_serde(&js_value) {
|
||||
Ok(json) => json,
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||
};
|
||||
|
||||
let cb = Closure::wrap(Box::new(move |path: String| {
|
||||
match Parser::compile(path.as_str()) {
|
||||
let cb = Closure::wrap(
|
||||
Box::new(move |path: String| match Parser::compile(path.as_str()) {
|
||||
Ok(node) => {
|
||||
let mut selector = _Selector::new();
|
||||
let _ = selector.compiled_path(&node);
|
||||
match selector.value(&json).select() {
|
||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||
Ok(ret) => ret,
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
||||
Err(e) => {
|
||||
JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
||||
}
|
||||
},
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Path(e)))
|
||||
}
|
||||
}) as Box<Fn(String) -> JsValue>);
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Path(e))),
|
||||
}) as Box<Fn(String) -> JsValue>,
|
||||
);
|
||||
|
||||
let ret = cb.as_ref().clone();
|
||||
cb.forget();
|
||||
@ -145,15 +144,15 @@ pub fn selector(js_value: JsValue) -> JsValue {
|
||||
pub fn select(js_value: JsValue, path: &str) -> JsValue {
|
||||
let json = match into_serde_json(&js_value) {
|
||||
Ok(json) => json,
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e)))
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e))),
|
||||
};
|
||||
|
||||
match jsonpath::select(&json, path) {
|
||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||
Ok(ret) => ret,
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||
},
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -161,17 +160,15 @@ pub fn select(js_value: JsValue, path: &str) -> JsValue {
|
||||
pub fn delete(js_value: JsValue, path: &str) -> JsValue {
|
||||
let json = match into_serde_json(&js_value) {
|
||||
Ok(json) => json,
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e)))
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e))),
|
||||
};
|
||||
|
||||
match jsonpath::delete(json, path) {
|
||||
Ok(ret) => {
|
||||
match JsValue::from_serde(&ret) {
|
||||
Ok(ret) => ret,
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
||||
}
|
||||
}
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||
Ok(ret) => ret,
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||
},
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -179,15 +176,15 @@ pub fn delete(js_value: JsValue, path: &str) -> JsValue {
|
||||
pub fn replace_with(js_value: JsValue, path: &str, fun: js_sys::Function) -> JsValue {
|
||||
let json = match into_serde_json(&js_value) {
|
||||
Ok(json) => json,
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e)))
|
||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e))),
|
||||
};
|
||||
|
||||
match jsonpath::replace_with(json, path, &mut |v| replace_fun(v, &fun)) {
|
||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||
Ok(ret) => ret,
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||
},
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
||||
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -205,7 +202,10 @@ pub struct Selector {
|
||||
impl Selector {
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new() -> Self {
|
||||
Selector { path: None, value: None }
|
||||
Selector {
|
||||
path: None,
|
||||
value: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
@ -227,23 +227,34 @@ impl Selector {
|
||||
let mut selector = _Selector::new();
|
||||
|
||||
if let Some(path) = &self.path {
|
||||
let _ = selector.str_path(&path).map_err(|e| JsValue::from_str(&format!("{:?}", e)))?;
|
||||
let _ = selector
|
||||
.str_path(&path)
|
||||
.map_err(|e| JsValue::from_str(&format!("{:?}", e)))?;
|
||||
} else {
|
||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyPath)));
|
||||
return Err(JsValue::from_str(&format!(
|
||||
"{:?}",
|
||||
JsonPathError::EmptyPath
|
||||
)));
|
||||
}
|
||||
|
||||
if let Some(value) = &self.value {
|
||||
let _ = selector.value(value);
|
||||
} else {
|
||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyValue)));
|
||||
return Err(JsValue::from_str(&format!(
|
||||
"{:?}",
|
||||
JsonPathError::EmptyValue
|
||||
)));
|
||||
}
|
||||
|
||||
match selector.select() {
|
||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||
Ok(ret) => Ok(ret),
|
||||
Err(e) => Err(JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))))
|
||||
Err(e) => Err(JsValue::from_str(&format!(
|
||||
"{:?}",
|
||||
JsonPathError::Serde(e.to_string())
|
||||
))),
|
||||
},
|
||||
Err(e) => Err(JsValue::from_str(&format!("{:?}", e)))
|
||||
Err(e) => Err(JsValue::from_str(&format!("{:?}", e))),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -261,7 +272,10 @@ pub struct SelectorMut {
|
||||
impl SelectorMut {
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new() -> Self {
|
||||
SelectorMut { path: None, value: None }
|
||||
SelectorMut {
|
||||
path: None,
|
||||
value: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
@ -285,13 +299,19 @@ impl SelectorMut {
|
||||
if let Some(path) = &self.path {
|
||||
let _ = selector.str_path(path);
|
||||
} else {
|
||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyPath)));
|
||||
return Err(JsValue::from_str(&format!(
|
||||
"{:?}",
|
||||
JsonPathError::EmptyPath
|
||||
)));
|
||||
};
|
||||
|
||||
if let Some(value) = self.value.take() {
|
||||
selector.value(value);
|
||||
} else {
|
||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyValue)));
|
||||
return Err(JsValue::from_str(&format!(
|
||||
"{:?}",
|
||||
JsonPathError::EmptyValue
|
||||
)));
|
||||
};
|
||||
|
||||
match selector.delete() {
|
||||
@ -310,13 +330,19 @@ impl SelectorMut {
|
||||
if let Some(path) = &self.path {
|
||||
let _ = selector.str_path(path);
|
||||
} else {
|
||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyPath)));
|
||||
return Err(JsValue::from_str(&format!(
|
||||
"{:?}",
|
||||
JsonPathError::EmptyPath
|
||||
)));
|
||||
};
|
||||
|
||||
if let Some(value) = self.value.take() {
|
||||
selector.value(value);
|
||||
} else {
|
||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyValue)));
|
||||
return Err(JsValue::from_str(&format!(
|
||||
"{:?}",
|
||||
JsonPathError::EmptyValue
|
||||
)));
|
||||
};
|
||||
|
||||
match selector.replace_with(&mut |v| replace_fun(v, &fun)) {
|
||||
@ -333,9 +359,12 @@ impl SelectorMut {
|
||||
match self.value.take() {
|
||||
Some(ret) => match JsValue::from_serde(&ret) {
|
||||
Ok(ret) => Ok(ret),
|
||||
Err(e) => Err(JsValue::from_str(&format!("{:?}", e)))
|
||||
Err(e) => Err(JsValue::from_str(&format!("{:?}", e))),
|
||||
},
|
||||
None => Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyValue)))
|
||||
None => Err(JsValue::from_str(&format!(
|
||||
"{:?}",
|
||||
JsonPathError::EmptyValue
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -412,6 +412,79 @@ describe('filter test', () => {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('escaped single quote notation', (done) => {
|
||||
let result = jsonpath.select({"single'quote":"value"}, "$['single\\'quote']");
|
||||
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('escaped double quote notation', (done) => {
|
||||
let result = jsonpath.select({"single\"quote":"value"}, "$['single\"quote']");
|
||||
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[::]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third", "forth", "fifth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[::2]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::2]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "third", "fifth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[1: :]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1: :]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["second", "third", "forth", "fifth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[1:2:]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1:2:]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["second"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[1::2]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1::2]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["second", "forth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[0:3:1]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:1]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[0:3:2]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:2]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "third"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array keys', (done) => {
|
||||
let result = jsonpath.select({
|
||||
"key1": "value1",
|
||||
"key2": 2
|
||||
}, "$['key1', 'key2']");
|
||||
if (JSON.stringify(result) === JSON.stringify(["value1", 2])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('SelectorMut test', () => {
|
||||
@ -821,4 +894,13 @@ describe('README test', () => {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('ISSUE test', () => {
|
||||
it('Results do not match other implementations #6', (done) => {
|
||||
let result = jsonpath.select(["first", "second"], "$[:]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "second"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
Reference in New Issue
Block a user