mirror of
https://github.com/fluencelabs/jsonpath
synced 2025-07-11 21:51:38 +00:00
Compare commits
16 Commits
Author | SHA1 | Date | |
---|---|---|---|
488e0b400f | |||
fff0e869cb | |||
6a270c9456 | |||
ebd49c2205 | |||
2537469f03 | |||
2e0f78f017 | |||
5d36a0cf15 | |||
a72a13117e | |||
964e0c00f5 | |||
de97e2f95a | |||
74666d264e | |||
51deec66d0 | |||
909c851dcc | |||
b41b9f3aa6 | |||
1a5e8cc025 | |||
5abbfba254 |
@ -1,7 +1,7 @@
|
|||||||
<component name="ProjectRunConfigurationManager">
|
<component name="ProjectRunConfigurationManager">
|
||||||
<configuration default="false" name="selector" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
<configuration default="false" name="mutable" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
<option name="channel" value="DEFAULT" />
|
<option name="channel" value="DEFAULT" />
|
||||||
<option name="command" value="test --package jsonpath_lib --test selector """ />
|
<option name="command" value="test --package jsonpath_lib --test mutable """ />
|
||||||
<option name="allFeatures" value="false" />
|
<option name="allFeatures" value="false" />
|
||||||
<option name="nocapture" value="false" />
|
<option name="nocapture" value="false" />
|
||||||
<option name="backtrace" value="SHORT" />
|
<option name="backtrace" value="SHORT" />
|
2
.idea/runConfigurations/parser.xml
generated
2
.idea/runConfigurations/parser.xml
generated
@ -1,7 +1,7 @@
|
|||||||
<component name="ProjectRunConfigurationManager">
|
<component name="ProjectRunConfigurationManager">
|
||||||
<configuration default="false" name="parser" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
<configuration default="false" name="parser" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
<option name="channel" value="DEFAULT" />
|
<option name="channel" value="DEFAULT" />
|
||||||
<option name="command" value="test --package jsonpath_lib --test parser """ />
|
<option name="command" value="test --package jsonpath_lib --lib parser::parser_tests" />
|
||||||
<option name="allFeatures" value="false" />
|
<option name="allFeatures" value="false" />
|
||||||
<option name="nocapture" value="false" />
|
<option name="nocapture" value="false" />
|
||||||
<option name="backtrace" value="SHORT" />
|
<option name="backtrace" value="SHORT" />
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
<component name="ProjectRunConfigurationManager">
|
<component name="ProjectRunConfigurationManager">
|
||||||
<configuration default="false" name="serde" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
<configuration default="false" name="readme" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
<option name="channel" value="DEFAULT" />
|
<option name="channel" value="DEFAULT" />
|
||||||
<option name="command" value="test --package jsonpath_lib --test serde """ />
|
<option name="command" value="test --package jsonpath_lib --test readme """ />
|
||||||
<option name="allFeatures" value="false" />
|
<option name="allFeatures" value="false" />
|
||||||
<option name="nocapture" value="false" />
|
<option name="nocapture" value="false" />
|
||||||
<option name="backtrace" value="SHORT" />
|
<option name="backtrace" value="SHORT" />
|
2
.idea/runConfigurations/tokenizer.xml
generated
2
.idea/runConfigurations/tokenizer.xml
generated
@ -1,7 +1,7 @@
|
|||||||
<component name="ProjectRunConfigurationManager">
|
<component name="ProjectRunConfigurationManager">
|
||||||
<configuration default="false" name="tokenizer" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
<configuration default="false" name="tokenizer" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
<option name="channel" value="DEFAULT" />
|
<option name="channel" value="DEFAULT" />
|
||||||
<option name="command" value="test --package jsonpath_lib --test tokenizer """ />
|
<option name="command" value="test --package jsonpath_lib --lib parser::tokenizer_tests" />
|
||||||
<option name="allFeatures" value="false" />
|
<option name="allFeatures" value="false" />
|
||||||
<option name="nocapture" value="false" />
|
<option name="nocapture" value="false" />
|
||||||
<option name="backtrace" value="SHORT" />
|
<option name="backtrace" value="SHORT" />
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "jsonpath_lib"
|
name = "jsonpath_lib"
|
||||||
version = "0.2.0"
|
version = "0.2.2"
|
||||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||||
|
|
||||||
description = "It is JsonPath engine written in Rust. it provide a similar API interface in Webassembly and Javascript also. - Webassembly Demo: https://freestrings.github.io/jsonpath"
|
description = "It is JsonPath engine written in Rust. it provide a similar API interface in Webassembly and Javascript also. - Webassembly Demo: https://freestrings.github.io/jsonpath"
|
||||||
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
38
docs/bench/bootstrap.js
vendored
38
docs/bench/bootstrap.js
vendored
@ -58,23 +58,26 @@
|
|||||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbg_error_af8a3e3880eae1c8": function(p0i32,p1i32) {
|
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_af8a3e3880eae1c8"](p0i32,p1i32);
|
|
||||||
/******/ },
|
|
||||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
|
||||||
/******/ },
|
|
||||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
|
||||||
/******/ },
|
|
||||||
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbg_call_88d2a6153573084e": function(p0i32,p1i32,p2i32,p3i32) {
|
/******/ "__wbg_error_8015049cb5adfca2": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_88d2a6153573084e"](p0i32,p1i32,p2i32,p3i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_8015049cb5adfca2"](p0i32,p1i32);
|
||||||
|
/******/ },
|
||||||
|
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||||
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||||
|
/******/ },
|
||||||
|
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||||
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||||
|
/******/ },
|
||||||
|
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||||
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||||
|
/******/ },
|
||||||
|
/******/ "__wbg_call_972de3aa550c37b2": function(p0i32,p1i32,p2i32,p3i32) {
|
||||||
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_972de3aa550c37b2"](p0i32,p1i32,p2i32,p3i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_is_string": function(p0i32) {
|
/******/ "__wbindgen_is_string": function(p0i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||||
@ -82,9 +85,6 @@
|
|||||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
|
||||||
/******/ },
|
|
||||||
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
@ -94,11 +94,11 @@
|
|||||||
/******/ "__wbindgen_rethrow": function(p0i32) {
|
/******/ "__wbindgen_rethrow": function(p0i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_closure_wrapper22": function(p0i32,p1i32,p2i32) {
|
/******/ "__wbindgen_closure_wrapper18": function(p0i32,p1i32,p2i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper22"](p0i32,p1i32,p2i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper18"](p0i32,p1i32,p2i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_closure_wrapper24": function(p0i32,p1i32,p2i32) {
|
/******/ "__wbindgen_closure_wrapper20": function(p0i32,p1i32,p2i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper24"](p0i32,p1i32,p2i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper20"](p0i32,p1i32,p2i32);
|
||||||
/******/ }
|
/******/ }
|
||||||
/******/ }
|
/******/ }
|
||||||
/******/ };
|
/******/ };
|
||||||
@ -198,7 +198,7 @@
|
|||||||
/******/ promises.push(installedWasmModuleData);
|
/******/ promises.push(installedWasmModuleData);
|
||||||
/******/ else {
|
/******/ else {
|
||||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"7a2fe8020c3403dd4ce6"}[wasmModuleId] + ".module.wasm");
|
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"d60993d3a441db221b47"}[wasmModuleId] + ".module.wasm");
|
||||||
/******/ var promise;
|
/******/ var promise;
|
||||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||||
|
BIN
docs/bench/d60993d3a441db221b47.module.wasm
Normal file
BIN
docs/bench/d60993d3a441db221b47.module.wasm
Normal file
Binary file not shown.
38
docs/bootstrap.js
vendored
38
docs/bootstrap.js
vendored
@ -58,23 +58,26 @@
|
|||||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbg_error_af8a3e3880eae1c8": function(p0i32,p1i32) {
|
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_af8a3e3880eae1c8"](p0i32,p1i32);
|
|
||||||
/******/ },
|
|
||||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
|
||||||
/******/ },
|
|
||||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
|
||||||
/******/ },
|
|
||||||
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbg_call_88d2a6153573084e": function(p0i32,p1i32,p2i32,p3i32) {
|
/******/ "__wbg_error_8015049cb5adfca2": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_88d2a6153573084e"](p0i32,p1i32,p2i32,p3i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_8015049cb5adfca2"](p0i32,p1i32);
|
||||||
|
/******/ },
|
||||||
|
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||||
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||||
|
/******/ },
|
||||||
|
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||||
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||||
|
/******/ },
|
||||||
|
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||||
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||||
|
/******/ },
|
||||||
|
/******/ "__wbg_call_972de3aa550c37b2": function(p0i32,p1i32,p2i32,p3i32) {
|
||||||
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_972de3aa550c37b2"](p0i32,p1i32,p2i32,p3i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_is_string": function(p0i32) {
|
/******/ "__wbindgen_is_string": function(p0i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||||
@ -82,9 +85,6 @@
|
|||||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
|
||||||
/******/ },
|
|
||||||
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
@ -94,11 +94,11 @@
|
|||||||
/******/ "__wbindgen_rethrow": function(p0i32) {
|
/******/ "__wbindgen_rethrow": function(p0i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_closure_wrapper22": function(p0i32,p1i32,p2i32) {
|
/******/ "__wbindgen_closure_wrapper18": function(p0i32,p1i32,p2i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper22"](p0i32,p1i32,p2i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper18"](p0i32,p1i32,p2i32);
|
||||||
/******/ },
|
/******/ },
|
||||||
/******/ "__wbindgen_closure_wrapper24": function(p0i32,p1i32,p2i32) {
|
/******/ "__wbindgen_closure_wrapper20": function(p0i32,p1i32,p2i32) {
|
||||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper24"](p0i32,p1i32,p2i32);
|
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper20"](p0i32,p1i32,p2i32);
|
||||||
/******/ }
|
/******/ }
|
||||||
/******/ }
|
/******/ }
|
||||||
/******/ };
|
/******/ };
|
||||||
@ -198,7 +198,7 @@
|
|||||||
/******/ promises.push(installedWasmModuleData);
|
/******/ promises.push(installedWasmModuleData);
|
||||||
/******/ else {
|
/******/ else {
|
||||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"7a2fe8020c3403dd4ce6"}[wasmModuleId] + ".module.wasm");
|
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"d60993d3a441db221b47"}[wasmModuleId] + ".module.wasm");
|
||||||
/******/ var promise;
|
/******/ var promise;
|
||||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||||
|
BIN
docs/d60993d3a441db221b47.module.wasm
Normal file
BIN
docs/d60993d3a441db221b47.module.wasm
Normal file
Binary file not shown.
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "jsonpath4nodejs"
|
name = "jsonpath4nodejs"
|
||||||
version = "0.2.0"
|
version = "0.2.2"
|
||||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||||
description = "jsonpath_lib bindings for nodejs"
|
description = "jsonpath_lib bindings for nodejs"
|
||||||
keywords = ["library", "jsonpath", "json", "nodejs"]
|
keywords = ["library", "jsonpath", "json", "nodejs"]
|
||||||
@ -14,7 +14,7 @@ exclude = ["artifacts.json", "index.node"]
|
|||||||
neon-build = "0.2.0"
|
neon-build = "0.2.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
jsonpath_lib = "0.2.0"
|
jsonpath_lib = "0.2.2"
|
||||||
#jsonpath_lib = { path = "../../" }
|
#jsonpath_lib = { path = "../../" }
|
||||||
neon = "0.2.0"
|
neon = "0.2.0"
|
||||||
neon-serde = "0.1.1"
|
neon-serde = "0.1.1"
|
||||||
|
@ -18,7 +18,7 @@ fn select(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
|||||||
|
|
||||||
match jsonpath::select(&json, path.as_str()) {
|
match jsonpath::select(&json, path.as_str()) {
|
||||||
Ok(value) => Ok(neon_serde::to_value(&mut ctx, &value)?),
|
Ok(value) => Ok(neon_serde::to_value(&mut ctx, &value)?),
|
||||||
Err(e) => panic!("{:?}", e)
|
Err(e) => panic!("{:?}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -27,7 +27,7 @@ fn select_str(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
|||||||
let path = ctx.argument::<JsString>(1)?.value();
|
let path = ctx.argument::<JsString>(1)?.value();
|
||||||
match jsonpath::select_as_str(&json_val, path.as_str()) {
|
match jsonpath::select_as_str(&json_val, path.as_str()) {
|
||||||
Ok(value) => Ok(JsString::new(&mut ctx, &value).upcast()),
|
Ok(value) => Ok(JsString::new(&mut ctx, &value).upcast()),
|
||||||
Err(e) => panic!("{:?}", e)
|
Err(e) => panic!("{:?}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,15 +35,19 @@ fn delete(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
|||||||
let json_val = ctx.argument::<JsString>(0)?.value();
|
let json_val = ctx.argument::<JsString>(0)?.value();
|
||||||
let json: Value = match serde_json::from_str(&json_val) {
|
let json: Value = match serde_json::from_str(&json_val) {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||||
};
|
};
|
||||||
let path = ctx.argument::<JsString>(1)?.value();
|
let path = ctx.argument::<JsString>(1)?.value();
|
||||||
match jsonpath::delete(json, &path) {
|
match jsonpath::delete(json, &path) {
|
||||||
Ok(value) => Ok(JsString::new(&mut ctx, match serde_json::to_string(&value) {
|
Ok(value) => Ok(JsString::new(
|
||||||
|
&mut ctx,
|
||||||
|
match serde_json::to_string(&value) {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||||
}).upcast()),
|
},
|
||||||
Err(e) => panic!("{:?}", e)
|
)
|
||||||
|
.upcast()),
|
||||||
|
Err(e) => panic!("{:?}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,36 +55,43 @@ fn replace_with(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
|||||||
let json_val = ctx.argument::<JsString>(0)?.value();
|
let json_val = ctx.argument::<JsString>(0)?.value();
|
||||||
let json: Value = match serde_json::from_str(&json_val) {
|
let json: Value = match serde_json::from_str(&json_val) {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||||
};
|
};
|
||||||
let path = ctx.argument::<JsString>(1)?.value();
|
let path = ctx.argument::<JsString>(1)?.value();
|
||||||
let fun = ctx.argument::<JsFunction>(2)?;
|
let fun = ctx.argument::<JsFunction>(2)?;
|
||||||
match jsonpath::replace_with(json, &path, &mut |v| {
|
match jsonpath::replace_with(json, &path, &mut |v| {
|
||||||
let json_str = JsString::new(&mut ctx, match serde_json::to_string(v) {
|
let json_str = JsString::new(
|
||||||
|
&mut ctx,
|
||||||
|
match serde_json::to_string(v) {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||||
});
|
},
|
||||||
|
);
|
||||||
|
|
||||||
let null = ctx.null();
|
let null = ctx.null();
|
||||||
let args = vec![ctx.string(json_str.value())];
|
let args = vec![ctx.string(json_str.value())];
|
||||||
let result = match fun.call(&mut ctx, null, args) {
|
let result = match fun.call(&mut ctx, null, args) {
|
||||||
Ok(result) => result,
|
Ok(result) => result,
|
||||||
Err(e) => panic!("{:?}", e)
|
Err(e) => panic!("{:?}", e),
|
||||||
};
|
};
|
||||||
let json_str = match result.downcast::<JsString>() {
|
let json_str = match result.downcast::<JsString>() {
|
||||||
Ok(v) => v.value(),
|
Ok(v) => v.value(),
|
||||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||||
};
|
};
|
||||||
match serde_json::from_str(&json_str) {
|
match serde_json::from_str(&json_str) {
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||||
}
|
}
|
||||||
}) {
|
}) {
|
||||||
Ok(value) => Ok(JsString::new(&mut ctx, match serde_json::to_string(&value) {
|
Ok(value) => Ok(JsString::new(
|
||||||
|
&mut ctx,
|
||||||
|
match serde_json::to_string(&value) {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||||
}).upcast()),
|
},
|
||||||
Err(e) => panic!("{:?}", e)
|
)
|
||||||
|
.upcast()),
|
||||||
|
Err(e) => panic!("{:?}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -93,7 +104,7 @@ impl SelectorCls {
|
|||||||
fn path(&mut self, path: &str) {
|
fn path(&mut self, path: &str) {
|
||||||
let node = match Parser::compile(path) {
|
let node = match Parser::compile(path) {
|
||||||
Ok(node) => node,
|
Ok(node) => node,
|
||||||
Err(e) => panic!("{:?}", e)
|
Err(e) => panic!("{:?}", e),
|
||||||
};
|
};
|
||||||
|
|
||||||
self.node = Some(node);
|
self.node = Some(node);
|
||||||
@ -102,7 +113,7 @@ impl SelectorCls {
|
|||||||
fn value(&mut self, json_str: &str) {
|
fn value(&mut self, json_str: &str) {
|
||||||
let value: Value = match serde_json::from_str(&json_str) {
|
let value: Value = match serde_json::from_str(&json_str) {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||||
};
|
};
|
||||||
|
|
||||||
self.value = Some(value);
|
self.value = Some(value);
|
||||||
@ -111,12 +122,12 @@ impl SelectorCls {
|
|||||||
fn select(&self) -> String {
|
fn select(&self) -> String {
|
||||||
let node = match &self.node {
|
let node = match &self.node {
|
||||||
Some(node) => node,
|
Some(node) => node,
|
||||||
None => panic!("{:?}", JsonPathError::EmptyPath)
|
None => panic!("{:?}", JsonPathError::EmptyPath),
|
||||||
};
|
};
|
||||||
|
|
||||||
let value = match &self.value {
|
let value = match &self.value {
|
||||||
Some(value) => value,
|
Some(value) => value,
|
||||||
None => panic!("{:?}", JsonPathError::EmptyValue)
|
None => panic!("{:?}", JsonPathError::EmptyValue),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut selector = Selector::new();
|
let mut selector = Selector::new();
|
||||||
@ -124,7 +135,7 @@ impl SelectorCls {
|
|||||||
selector.value(&value);
|
selector.value(&value);
|
||||||
match selector.select_as_str() {
|
match selector.select_as_str() {
|
||||||
Ok(ret) => ret,
|
Ok(ret) => ret,
|
||||||
Err(e) => panic!("{:?}", e)
|
Err(e) => panic!("{:?}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -257,10 +268,14 @@ declare_types! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
register_module!(mut m, {
|
register_module!(mut m, {
|
||||||
m.export_class::<JsCompileFn>("CompileFn").expect("CompileFn class error");
|
m.export_class::<JsCompileFn>("CompileFn")
|
||||||
m.export_class::<JsSelectorFn>("SelectorFn").expect("SelectorFn class error");
|
.expect("CompileFn class error");
|
||||||
m.export_class::<JsSelector>("Selector").expect("Selector class error");
|
m.export_class::<JsSelectorFn>("SelectorFn")
|
||||||
m.export_class::<JsSelectorMut>("SelectorMut").expect("SelectorMut class error");
|
.expect("SelectorFn class error");
|
||||||
|
m.export_class::<JsSelector>("Selector")
|
||||||
|
.expect("Selector class error");
|
||||||
|
m.export_class::<JsSelectorMut>("SelectorMut")
|
||||||
|
.expect("SelectorMut class error");
|
||||||
m.export_function("select", select)?;
|
m.export_function("select", select)?;
|
||||||
m.export_function("deleteValue", delete)?;
|
m.export_function("deleteValue", delete)?;
|
||||||
m.export_function("replaceWith", replace_with)?;
|
m.export_function("replaceWith", replace_with)?;
|
||||||
|
2
nodejs/package-lock.json
generated
2
nodejs/package-lock.json
generated
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "jsonpath-rs",
|
"name": "jsonpath-rs",
|
||||||
"version": "0.2.0",
|
"version": "0.2.2",
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "jsonpath-rs",
|
"name": "jsonpath-rs",
|
||||||
"version": "0.2.0",
|
"version": "0.2.2",
|
||||||
"description": "It is JsonPath implementation. The core implementation is written in Rust",
|
"description": "It is JsonPath implementation. The core implementation is written in Rust",
|
||||||
"author": "Changseok Han <freestrings@gmail.com>",
|
"author": "Changseok Han <freestrings@gmail.com>",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
@ -398,6 +398,93 @@ describe('filter test', () => {
|
|||||||
run (done, i, list[i]);
|
run (done, i, list[i]);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
it('object equal', (done) => {
|
||||||
|
let selector = new jsonpath.Selector();
|
||||||
|
selector.path('$..[?(@.a == 1)]');
|
||||||
|
selector.value({
|
||||||
|
'a': 1,
|
||||||
|
'b': {'a': 1},
|
||||||
|
'c': {'a': 1},
|
||||||
|
});
|
||||||
|
let result = selector.select();
|
||||||
|
if (JSON.stringify(result) === JSON.stringify([{'a': 1}, {'a': 1}])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('escaped single quote notation', (done) => {
|
||||||
|
let result = jsonpath.select({"single'quote":"value"}, "$['single\\'quote']");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('escaped double quote notation', (done) => {
|
||||||
|
let result = jsonpath.select({"single\"quote":"value"}, "$['single\"quote']");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[::]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third", "forth", "fifth"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[::2]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::2]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "third", "fifth"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[1: :]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1: :]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["second", "third", "forth", "fifth"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[1:2:]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1:2:]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["second"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[1::2]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1::2]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["second", "forth"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[0:3:1]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:1]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[0:3:2]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:2]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "third"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array keys', (done) => {
|
||||||
|
let result = jsonpath.select({
|
||||||
|
"key1": "value1",
|
||||||
|
"key2": 2
|
||||||
|
}, "$['key1', 'key2']");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["value1", 2])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('SelectorMut test', () => {
|
describe('SelectorMut test', () => {
|
||||||
@ -798,3 +885,12 @@ describe('README test', () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('ISSUE test', () => {
|
||||||
|
it('Results do not match other implementations #6', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second"], "$[:]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "second"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
42
src/lib.rs
42
src/lib.rs
@ -134,8 +134,8 @@ extern crate serde_json;
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
pub use parser::parser::{Node, Parser};
|
pub use parser::parser::{Node, Parser};
|
||||||
pub use select::{Selector, SelectorMut};
|
|
||||||
pub use select::JsonPathError;
|
pub use select::JsonPathError;
|
||||||
|
pub use select::{Selector, SelectorMut};
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
mod parser;
|
mod parser;
|
||||||
@ -171,14 +171,12 @@ mod select;
|
|||||||
/// ```
|
/// ```
|
||||||
pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> {
|
pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> {
|
||||||
let node = Parser::compile(path);
|
let node = Parser::compile(path);
|
||||||
move |json| {
|
move |json| match &node {
|
||||||
match &node {
|
|
||||||
Ok(node) => {
|
Ok(node) => {
|
||||||
let mut selector = Selector::new();
|
let mut selector = Selector::new();
|
||||||
selector.compiled_path(node).value(json).select()
|
selector.compiled_path(node).value(json).select()
|
||||||
}
|
}
|
||||||
Err(e) => Err(JsonPathError::Path(e.to_string()))
|
Err(e) => Err(JsonPathError::Path(e.to_string())),
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,9 +217,7 @@ pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPath
|
|||||||
pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value>, JsonPathError> {
|
pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value>, JsonPathError> {
|
||||||
let mut selector = Selector::new();
|
let mut selector = Selector::new();
|
||||||
let _ = selector.value(json);
|
let _ = selector.value(json);
|
||||||
move |path: &str| {
|
move |path: &str| selector.str_path(path)?.reset_value().select()
|
||||||
selector.str_path(path)?.reset_value().select()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// It is the same to `selector` function. but it deserialize the result as given type `T`.
|
/// It is the same to `selector` function. but it deserialize the result as given type `T`.
|
||||||
@ -270,12 +266,12 @@ pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value
|
|||||||
///
|
///
|
||||||
/// assert_eq!(json, ret);
|
/// assert_eq!(json, ret);
|
||||||
/// ```
|
/// ```
|
||||||
pub fn selector_as<T: serde::de::DeserializeOwned>(json: &Value) -> impl FnMut(&str) -> Result<Vec<T>, JsonPathError> + '_ {
|
pub fn selector_as<T: serde::de::DeserializeOwned>(
|
||||||
|
json: &Value,
|
||||||
|
) -> impl FnMut(&str) -> Result<Vec<T>, JsonPathError> + '_ {
|
||||||
let mut selector = Selector::new();
|
let mut selector = Selector::new();
|
||||||
let _ = selector.value(json);
|
let _ = selector.value(json);
|
||||||
move |path: &str| {
|
move |path: &str| selector.str_path(path)?.reset_value().select_as()
|
||||||
selector.str_path(path)?.reset_value().select_as()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// It is a simple select function. but it compile the jsonpath argument every time.
|
/// It is a simple select function. but it compile the jsonpath argument every time.
|
||||||
@ -374,7 +370,10 @@ pub fn select_as_str(json_str: &str, path: &str) -> Result<String, JsonPathError
|
|||||||
///
|
///
|
||||||
/// assert_eq!(ret[0], person);
|
/// assert_eq!(ret[0], person);
|
||||||
/// ```
|
/// ```
|
||||||
pub fn select_as<T: serde::de::DeserializeOwned>(json_str: &str, path: &str) -> Result<Vec<T>, JsonPathError> {
|
pub fn select_as<T: serde::de::DeserializeOwned>(
|
||||||
|
json_str: &str,
|
||||||
|
path: &str,
|
||||||
|
) -> Result<Vec<T>, JsonPathError> {
|
||||||
let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
|
let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
|
||||||
Selector::new().str_path(path)?.value(&json).select_as()
|
Selector::new().str_path(path)?.value(&json).select_as()
|
||||||
}
|
}
|
||||||
@ -413,7 +412,12 @@ pub fn select_as<T: serde::de::DeserializeOwned>(json_str: &str, path: &str) ->
|
|||||||
/// ```
|
/// ```
|
||||||
pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
|
pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
|
||||||
let mut selector = SelectorMut::new();
|
let mut selector = SelectorMut::new();
|
||||||
let ret = selector.str_path(path)?.value(value).delete()?.take().unwrap_or(Value::Null);
|
let ret = selector
|
||||||
|
.str_path(path)?
|
||||||
|
.value(value)
|
||||||
|
.delete()?
|
||||||
|
.take()
|
||||||
|
.unwrap_or(Value::Null);
|
||||||
Ok(ret)
|
Ok(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -460,9 +464,15 @@ pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
|
|||||||
/// ]}));
|
/// ]}));
|
||||||
/// ```
|
/// ```
|
||||||
pub fn replace_with<F>(value: Value, path: &str, fun: &mut F) -> Result<Value, JsonPathError>
|
pub fn replace_with<F>(value: Value, path: &str, fun: &mut F) -> Result<Value, JsonPathError>
|
||||||
where F: FnMut(&Value) -> Value
|
where
|
||||||
|
F: FnMut(&Value) -> Value,
|
||||||
{
|
{
|
||||||
let mut selector = SelectorMut::new();
|
let mut selector = SelectorMut::new();
|
||||||
let ret = selector.str_path(path)?.value(value).replace_with(fun)?.take().unwrap_or(Value::Null);
|
let ret = selector
|
||||||
|
.str_path(path)?
|
||||||
|
.value(value)
|
||||||
|
.replace_with(fun)?
|
||||||
|
.take()
|
||||||
|
.unwrap_or(Value::Null);
|
||||||
Ok(ret)
|
Ok(ret)
|
||||||
}
|
}
|
@ -1,10 +1,10 @@
|
|||||||
|
pub mod parser;
|
||||||
mod path_reader;
|
mod path_reader;
|
||||||
pub(crate) mod tokenizer;
|
pub(crate) mod tokenizer;
|
||||||
pub mod parser;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod parser_tests {
|
mod parser_tests {
|
||||||
use parser::parser::{ParseToken, Parser, NodeVisitor, FilterToken};
|
use parser::parser::{FilterToken, NodeVisitor, ParseToken, Parser};
|
||||||
|
|
||||||
struct NodeVisitorTestImpl<'a> {
|
struct NodeVisitorTestImpl<'a> {
|
||||||
input: &'a str,
|
input: &'a str,
|
||||||
@ -13,7 +13,10 @@ mod parser_tests {
|
|||||||
|
|
||||||
impl<'a> NodeVisitorTestImpl<'a> {
|
impl<'a> NodeVisitorTestImpl<'a> {
|
||||||
fn new(input: &'a str) -> Self {
|
fn new(input: &'a str) -> Self {
|
||||||
NodeVisitorTestImpl { input, stack: Vec::new() }
|
NodeVisitorTestImpl {
|
||||||
|
input,
|
||||||
|
stack: Vec::new(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start(&mut self) -> Result<Vec<ParseToken>, String> {
|
fn start(&mut self) -> Result<Vec<ParseToken>, String> {
|
||||||
@ -42,21 +45,29 @@ mod parser_tests {
|
|||||||
fn parse_path() {
|
fn parse_path() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
assert_eq!(run("$.aa"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$.aa"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::In,
|
ParseToken::In,
|
||||||
ParseToken::Key("aa".to_owned())
|
ParseToken::Key("aa".to_owned())
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.00.a"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$.00.a"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::In,
|
ParseToken::In,
|
||||||
ParseToken::Key("00".to_owned()),
|
ParseToken::Key("00".to_owned()),
|
||||||
ParseToken::In,
|
ParseToken::In,
|
||||||
ParseToken::Key("a".to_owned())
|
ParseToken::Key("a".to_owned())
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.00.韓창.seok"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$.00.韓창.seok"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::In,
|
ParseToken::In,
|
||||||
ParseToken::Key("00".to_owned()),
|
ParseToken::Key("00".to_owned()),
|
||||||
@ -64,27 +75,33 @@ mod parser_tests {
|
|||||||
ParseToken::Key("韓창".to_owned()),
|
ParseToken::Key("韓창".to_owned()),
|
||||||
ParseToken::In,
|
ParseToken::In,
|
||||||
ParseToken::Key("seok".to_owned())
|
ParseToken::Key("seok".to_owned())
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.*"), Ok(vec![
|
assert_eq!(
|
||||||
ParseToken::Absolute,
|
run("$.*"),
|
||||||
ParseToken::In,
|
Ok(vec![ParseToken::Absolute, ParseToken::In, ParseToken::All])
|
||||||
ParseToken::All
|
);
|
||||||
]));
|
|
||||||
|
|
||||||
assert_eq!(run("$..*"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$..*"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::Leaves,
|
ParseToken::Leaves,
|
||||||
ParseToken::All
|
ParseToken::All
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$..[0]"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$..[0]"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::Leaves,
|
ParseToken::Leaves,
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Number(0.0),
|
ParseToken::Number(0.0),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
match run("$.") {
|
match run("$.") {
|
||||||
Ok(_) => panic!(),
|
Ok(_) => panic!(),
|
||||||
@ -106,7 +123,9 @@ mod parser_tests {
|
|||||||
fn parse_array_sytax() {
|
fn parse_array_sytax() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
assert_eq!(run("$.book[?(@.isbn)]"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$.book[?(@.isbn)]"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::In,
|
ParseToken::In,
|
||||||
ParseToken::Key("book".to_string()),
|
ParseToken::Key("book".to_string()),
|
||||||
@ -115,119 +134,264 @@ mod parser_tests {
|
|||||||
ParseToken::In,
|
ParseToken::In,
|
||||||
ParseToken::Key("isbn".to_string()),
|
ParseToken::Key("isbn".to_string()),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
//
|
//
|
||||||
// Array도 컨텍스트 In으로 간주 할거라서 중첩되면 하나만
|
// Array도 컨텍스트 In으로 간주 할거라서 중첩되면 하나만
|
||||||
//
|
//
|
||||||
assert_eq!(run("$.[*]"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$.[*]"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::All,
|
ParseToken::All,
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.a[*]"), Ok(vec![
|
assert_eq!(
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
run("$.a[*]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::All,
|
ParseToken::All,
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.a[*].가"), Ok(vec![
|
assert_eq!(
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
run("$.a[*].가"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::All,
|
ParseToken::All,
|
||||||
ParseToken::ArrayEof,
|
ParseToken::ArrayEof,
|
||||||
ParseToken::In, ParseToken::Key("가".to_owned())
|
ParseToken::In,
|
||||||
]));
|
ParseToken::Key("가".to_owned())
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.a[0][1]"), Ok(vec![
|
assert_eq!(
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
run("$.a[0][1]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Number(0_f64),
|
ParseToken::Number(0_f64),
|
||||||
ParseToken::ArrayEof,
|
ParseToken::ArrayEof,
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Number(1_f64),
|
ParseToken::Number(1_f64),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.a[1,2]"), Ok(vec![
|
assert_eq!(
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
run("$.a[1,2]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Union(vec![1, 2]),
|
ParseToken::Union(vec![1, 2]),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.a[10:]"), Ok(vec![
|
assert_eq!(
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
run("$.a[10:]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Range(Some(10), None),
|
ParseToken::Range(Some(10), None, None),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.a[:11]"), Ok(vec![
|
assert_eq!(
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
run("$.a[:11]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Range(None, Some(11)),
|
ParseToken::Range(None, Some(11), None),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.a[-12:13]"), Ok(vec![
|
assert_eq!(
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
run("$.a[-12:13]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Range(Some(-12), Some(13)),
|
ParseToken::Range(Some(-12), Some(13), None),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.a[?(1>2)]"), Ok(vec![
|
assert_eq!(
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
run(r#"$[0:3:2]"#),
|
||||||
ParseToken::Array,
|
Ok(vec![
|
||||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Greater),
|
|
||||||
ParseToken::ArrayEof
|
|
||||||
]));
|
|
||||||
|
|
||||||
assert_eq!(run("$.a[?($.b>3)]"), Ok(vec![
|
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
|
||||||
ParseToken::Array,
|
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("b".to_owned()), ParseToken::Number(3_f64), ParseToken::Filter(FilterToken::Greater),
|
|
||||||
ParseToken::ArrayEof
|
|
||||||
]));
|
|
||||||
|
|
||||||
assert_eq!(run("$[?($.c>@.d && 1==2)]"), Ok(vec![
|
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
|
ParseToken::Range(Some(0), Some(3), Some(2)),
|
||||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run(r#"$[:3:2]"#),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Range(None, Some(3), Some(2)),
|
||||||
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run(r#"$[:]"#),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Range(None, None, None),
|
||||||
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run(r#"$[::]"#),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Range(None, None, None),
|
||||||
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run(r#"$[::2]"#),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Range(None, None, Some(2)),
|
||||||
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run(r#"$["a", 'b']"#),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Keys(vec!["a".to_string(), "b".to_string()]),
|
||||||
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run("$.a[?(1>2)]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Number(1_f64),
|
||||||
|
ParseToken::Number(2_f64),
|
||||||
ParseToken::Filter(FilterToken::Greater),
|
ParseToken::Filter(FilterToken::Greater),
|
||||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run("$.a[?($.b>3)]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("b".to_owned()),
|
||||||
|
ParseToken::Number(3_f64),
|
||||||
|
ParseToken::Filter(FilterToken::Greater),
|
||||||
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run("$[?($.c>@.d && 1==2)]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("c".to_owned()),
|
||||||
|
ParseToken::Relative,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("d".to_owned()),
|
||||||
|
ParseToken::Filter(FilterToken::Greater),
|
||||||
|
ParseToken::Number(1_f64),
|
||||||
|
ParseToken::Number(2_f64),
|
||||||
|
ParseToken::Filter(FilterToken::Equal),
|
||||||
ParseToken::Filter(FilterToken::And),
|
ParseToken::Filter(FilterToken::And),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$[?($.c>@.d&&(1==2||3>=4))]"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$[?($.c>@.d&&(1==2||3>=4))]"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
|
ParseToken::Absolute,
|
||||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
|
ParseToken::In,
|
||||||
|
ParseToken::Key("c".to_owned()),
|
||||||
|
ParseToken::Relative,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("d".to_owned()),
|
||||||
ParseToken::Filter(FilterToken::Greater),
|
ParseToken::Filter(FilterToken::Greater),
|
||||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
|
ParseToken::Number(1_f64),
|
||||||
ParseToken::Number(3_f64), ParseToken::Number(4_f64), ParseToken::Filter(FilterToken::GreaterOrEqual),
|
ParseToken::Number(2_f64),
|
||||||
|
ParseToken::Filter(FilterToken::Equal),
|
||||||
|
ParseToken::Number(3_f64),
|
||||||
|
ParseToken::Number(4_f64),
|
||||||
|
ParseToken::Filter(FilterToken::GreaterOrEqual),
|
||||||
ParseToken::Filter(FilterToken::Or),
|
ParseToken::Filter(FilterToken::Or),
|
||||||
ParseToken::Filter(FilterToken::And),
|
ParseToken::Filter(FilterToken::And),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$[?(@.a<@.b)]"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$[?(@.a<@.b)]"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("a".to_owned()),
|
ParseToken::Relative,
|
||||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("b".to_owned()),
|
ParseToken::In,
|
||||||
|
ParseToken::Key("a".to_owned()),
|
||||||
|
ParseToken::Relative,
|
||||||
|
ParseToken::In,
|
||||||
|
ParseToken::Key("b".to_owned()),
|
||||||
ParseToken::Filter(FilterToken::Little),
|
ParseToken::Filter(FilterToken::Little),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$[*][*][*]"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$[*][*][*]"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::All,
|
ParseToken::All,
|
||||||
@ -238,9 +402,12 @@ mod parser_tests {
|
|||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::All,
|
ParseToken::All,
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$['a']['bb']"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$['a']['bb']"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Key("a".to_string()),
|
ParseToken::Key("a".to_string()),
|
||||||
@ -248,9 +415,12 @@ mod parser_tests {
|
|||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Key("bb".to_string()),
|
ParseToken::Key("bb".to_string()),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(run("$.a[?(@.e==true)]"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$.a[?(@.e==true)]"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::In,
|
ParseToken::In,
|
||||||
ParseToken::Key("a".to_string()),
|
ParseToken::Key("a".to_string()),
|
||||||
@ -261,7 +431,38 @@ mod parser_tests {
|
|||||||
ParseToken::Bool(true),
|
ParseToken::Bool(true),
|
||||||
ParseToken::Filter(FilterToken::Equal),
|
ParseToken::Filter(FilterToken::Equal),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run("$[:]"),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Range(None, None, None),
|
||||||
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run(r#"$['single\'quote']"#),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Key("single'quote".to_string()),
|
||||||
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
run(r#"$["single\"quote"]"#),
|
||||||
|
Ok(vec![
|
||||||
|
ParseToken::Absolute,
|
||||||
|
ParseToken::Array,
|
||||||
|
ParseToken::Key(r#"single"quote"#.to_string()),
|
||||||
|
ParseToken::ArrayEof
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
match run("$[") {
|
match run("$[") {
|
||||||
Ok(_) => panic!(),
|
Ok(_) => panic!(),
|
||||||
@ -298,12 +499,17 @@ mod parser_tests {
|
|||||||
fn parse_array_float() {
|
fn parse_array_float() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
assert_eq!(run("$[?(1.1<2.1)]"), Ok(vec![
|
assert_eq!(
|
||||||
|
run("$[?(1.1<2.1)]"),
|
||||||
|
Ok(vec![
|
||||||
ParseToken::Absolute,
|
ParseToken::Absolute,
|
||||||
ParseToken::Array,
|
ParseToken::Array,
|
||||||
ParseToken::Number(1.1), ParseToken::Number(2.1), ParseToken::Filter(FilterToken::Little),
|
ParseToken::Number(1.1),
|
||||||
|
ParseToken::Number(2.1),
|
||||||
|
ParseToken::Filter(FilterToken::Little),
|
||||||
ParseToken::ArrayEof
|
ParseToken::ArrayEof
|
||||||
]));
|
])
|
||||||
|
);
|
||||||
|
|
||||||
match run("$[1.1]") {
|
match run("$[1.1]") {
|
||||||
Ok(_) => panic!(),
|
Ok(_) => panic!(),
|
||||||
@ -329,7 +535,11 @@ mod parser_tests {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tokenizer_tests {
|
mod tokenizer_tests {
|
||||||
use parser::tokenizer::{Token, TokenError, Tokenizer, TokenReader};
|
use parser::tokenizer::{Token, TokenError, TokenReader, Tokenizer};
|
||||||
|
|
||||||
|
fn setup() {
|
||||||
|
let _ = env_logger::try_init();
|
||||||
|
}
|
||||||
|
|
||||||
fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
|
fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
|
||||||
let mut tokenizer = Tokenizer::new(input);
|
let mut tokenizer = Tokenizer::new(input);
|
||||||
@ -352,73 +562,80 @@ mod tokenizer_tests {
|
|||||||
let mut tokenizer = TokenReader::new("$.a");
|
let mut tokenizer = TokenReader::new("$.a");
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(t) => assert_eq!(Token::Absolute(0), t),
|
Ok(t) => assert_eq!(Token::Absolute(0), t),
|
||||||
_ => panic!()
|
_ => panic!(),
|
||||||
}
|
}
|
||||||
|
|
||||||
match tokenizer.peek_token() {
|
match tokenizer.peek_token() {
|
||||||
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
||||||
_ => panic!()
|
_ => panic!(),
|
||||||
}
|
}
|
||||||
|
|
||||||
match tokenizer.peek_token() {
|
match tokenizer.peek_token() {
|
||||||
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
||||||
_ => panic!()
|
_ => panic!(),
|
||||||
}
|
}
|
||||||
|
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(t) => assert_eq!(Token::Dot(1), t),
|
Ok(t) => assert_eq!(Token::Dot(1), t),
|
||||||
_ => panic!()
|
_ => panic!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn token() {
|
fn token() {
|
||||||
run("$.01.a",
|
setup();
|
||||||
|
|
||||||
|
run(
|
||||||
|
"$.01.a",
|
||||||
(
|
(
|
||||||
vec![
|
vec![
|
||||||
Token::Absolute(0),
|
Token::Absolute(0),
|
||||||
Token::Dot(1),
|
Token::Dot(1),
|
||||||
Token::Key(2, "01".to_string()),
|
Token::Key(2, "01".to_string()),
|
||||||
Token::Dot(4),
|
Token::Dot(4),
|
||||||
Token::Key(5, "a".to_string())
|
Token::Key(5, "a".to_string()),
|
||||||
]
|
],
|
||||||
, Some(TokenError::Eof)
|
Some(TokenError::Eof),
|
||||||
));
|
),
|
||||||
|
);
|
||||||
|
|
||||||
run("$. []",
|
run(
|
||||||
|
"$. []",
|
||||||
(
|
(
|
||||||
vec![
|
vec![
|
||||||
Token::Absolute(0),
|
Token::Absolute(0),
|
||||||
Token::Dot(1),
|
Token::Dot(1),
|
||||||
Token::Whitespace(2, 2),
|
Token::Whitespace(2, 2),
|
||||||
Token::OpenArray(5),
|
Token::OpenArray(5),
|
||||||
Token::CloseArray(6)
|
Token::CloseArray(6),
|
||||||
]
|
],
|
||||||
, Some(TokenError::Eof)
|
Some(TokenError::Eof),
|
||||||
));
|
),
|
||||||
|
);
|
||||||
|
|
||||||
run("$..",
|
run(
|
||||||
|
"$..",
|
||||||
|
(
|
||||||
|
vec![Token::Absolute(0), Token::Dot(1), Token::Dot(2)],
|
||||||
|
Some(TokenError::Eof),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
run(
|
||||||
|
"$..ab",
|
||||||
(
|
(
|
||||||
vec![
|
vec![
|
||||||
Token::Absolute(0),
|
Token::Absolute(0),
|
||||||
Token::Dot(1),
|
Token::Dot(1),
|
||||||
Token::Dot(2),
|
Token::Dot(2),
|
||||||
]
|
Token::Key(3, "ab".to_string()),
|
||||||
, Some(TokenError::Eof)
|
],
|
||||||
));
|
Some(TokenError::Eof),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
run("$..ab",
|
run(
|
||||||
(
|
"$..가 [",
|
||||||
vec![
|
|
||||||
Token::Absolute(0),
|
|
||||||
Token::Dot(1),
|
|
||||||
Token::Dot(2),
|
|
||||||
Token::Key(3, "ab".to_string())
|
|
||||||
]
|
|
||||||
, Some(TokenError::Eof)
|
|
||||||
));
|
|
||||||
|
|
||||||
run("$..가 [",
|
|
||||||
(
|
(
|
||||||
vec![
|
vec![
|
||||||
Token::Absolute(0),
|
Token::Absolute(0),
|
||||||
@ -427,11 +644,13 @@ mod tokenizer_tests {
|
|||||||
Token::Key(3, "가".to_string()),
|
Token::Key(3, "가".to_string()),
|
||||||
Token::Whitespace(6, 0),
|
Token::Whitespace(6, 0),
|
||||||
Token::OpenArray(7),
|
Token::OpenArray(7),
|
||||||
]
|
],
|
||||||
, Some(TokenError::Eof)
|
Some(TokenError::Eof),
|
||||||
));
|
),
|
||||||
|
);
|
||||||
|
|
||||||
run("[-1, 2 ]",
|
run(
|
||||||
|
"[-1, 2 ]",
|
||||||
(
|
(
|
||||||
vec![
|
vec![
|
||||||
Token::OpenArray(0),
|
Token::OpenArray(0),
|
||||||
@ -441,11 +660,13 @@ mod tokenizer_tests {
|
|||||||
Token::Key(5, "2".to_string()),
|
Token::Key(5, "2".to_string()),
|
||||||
Token::Whitespace(6, 0),
|
Token::Whitespace(6, 0),
|
||||||
Token::CloseArray(7),
|
Token::CloseArray(7),
|
||||||
]
|
],
|
||||||
, Some(TokenError::Eof)
|
Some(TokenError::Eof),
|
||||||
));
|
),
|
||||||
|
);
|
||||||
|
|
||||||
run("[ 1 2 , 3 \"abc\" : -10 ]",
|
run(
|
||||||
|
"[ 1 2 , 3 \"abc\" : -10 ]",
|
||||||
(
|
(
|
||||||
vec![
|
vec![
|
||||||
Token::OpenArray(0),
|
Token::OpenArray(0),
|
||||||
@ -465,11 +686,13 @@ mod tokenizer_tests {
|
|||||||
Token::Key(18, "-10".to_string()),
|
Token::Key(18, "-10".to_string()),
|
||||||
Token::Whitespace(21, 0),
|
Token::Whitespace(21, 0),
|
||||||
Token::CloseArray(22),
|
Token::CloseArray(22),
|
||||||
]
|
],
|
||||||
, Some(TokenError::Eof)
|
Some(TokenError::Eof),
|
||||||
));
|
),
|
||||||
|
);
|
||||||
|
|
||||||
run("?(@.a가 <41.01)",
|
run(
|
||||||
|
"?(@.a가 <41.01)",
|
||||||
(
|
(
|
||||||
vec![
|
vec![
|
||||||
Token::Question(0),
|
Token::Question(0),
|
||||||
@ -483,11 +706,13 @@ mod tokenizer_tests {
|
|||||||
Token::Dot(12),
|
Token::Dot(12),
|
||||||
Token::Key(13, "01".to_string()),
|
Token::Key(13, "01".to_string()),
|
||||||
Token::CloseParenthesis(15),
|
Token::CloseParenthesis(15),
|
||||||
]
|
],
|
||||||
, Some(TokenError::Eof)
|
Some(TokenError::Eof),
|
||||||
));
|
),
|
||||||
|
);
|
||||||
|
|
||||||
run("?(@.a <4a.01)",
|
run(
|
||||||
|
"?(@.a <4a.01)",
|
||||||
(
|
(
|
||||||
vec![
|
vec![
|
||||||
Token::Question(0),
|
Token::Question(0),
|
||||||
@ -501,11 +726,14 @@ mod tokenizer_tests {
|
|||||||
Token::Dot(9),
|
Token::Dot(9),
|
||||||
Token::Key(10, "01".to_string()),
|
Token::Key(10, "01".to_string()),
|
||||||
Token::CloseParenthesis(12),
|
Token::CloseParenthesis(12),
|
||||||
]
|
],
|
||||||
, Some(TokenError::Eof)
|
Some(TokenError::Eof),
|
||||||
));
|
),
|
||||||
|
);
|
||||||
|
|
||||||
run("?($.c>@.d)", (
|
run(
|
||||||
|
"?($.c>@.d)",
|
||||||
|
(
|
||||||
vec![
|
vec![
|
||||||
Token::Question(0),
|
Token::Question(0),
|
||||||
Token::OpenParenthesis(1),
|
Token::OpenParenthesis(1),
|
||||||
@ -516,9 +744,49 @@ mod tokenizer_tests {
|
|||||||
Token::At(6),
|
Token::At(6),
|
||||||
Token::Dot(7),
|
Token::Dot(7),
|
||||||
Token::Key(8, "d".to_string()),
|
Token::Key(8, "d".to_string()),
|
||||||
Token::CloseParenthesis(9)
|
Token::CloseParenthesis(9),
|
||||||
]
|
],
|
||||||
, Some(TokenError::Eof)
|
Some(TokenError::Eof),
|
||||||
));
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
run(
|
||||||
|
"$[:]",
|
||||||
|
(
|
||||||
|
vec![
|
||||||
|
Token::Absolute(0),
|
||||||
|
Token::OpenArray(1),
|
||||||
|
Token::Split(2),
|
||||||
|
Token::CloseArray(3),
|
||||||
|
],
|
||||||
|
Some(TokenError::Eof),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
run(
|
||||||
|
r#"$['single\'quote']"#,
|
||||||
|
(
|
||||||
|
vec![
|
||||||
|
Token::Absolute(0),
|
||||||
|
Token::OpenArray(1),
|
||||||
|
Token::SingleQuoted(2, "single\'quote".to_string()),
|
||||||
|
Token::CloseArray(17),
|
||||||
|
],
|
||||||
|
Some(TokenError::Eof),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
run(
|
||||||
|
r#"$["double\"quote"]"#,
|
||||||
|
(
|
||||||
|
vec![
|
||||||
|
Token::Absolute(0),
|
||||||
|
Token::OpenArray(1),
|
||||||
|
Token::DoubleQuoted(2, "double\"quote".to_string()),
|
||||||
|
Token::CloseArray(17),
|
||||||
|
],
|
||||||
|
Some(TokenError::Eof),
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use super::tokenizer::*;
|
use super::tokenizer::*;
|
||||||
|
|
||||||
const DUMMY: usize = 0;
|
const DUMMY: usize = 0;
|
||||||
@ -5,19 +7,15 @@ const DUMMY: usize = 0;
|
|||||||
type ParseResult<T> = Result<T, String>;
|
type ParseResult<T> = Result<T, String>;
|
||||||
|
|
||||||
mod utils {
|
mod utils {
|
||||||
pub fn string_to_isize<F>(string: &String, msg_handler: F) -> Result<isize, String>
|
use std::str::FromStr;
|
||||||
where F: Fn() -> String {
|
|
||||||
match string.as_str().parse::<isize>() {
|
|
||||||
Ok(n) => Ok(n),
|
|
||||||
_ => Err(msg_handler())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn string_to_f64<F>(string: &String, msg_handler: F) -> Result<f64, String>
|
pub fn string_to_num<F, S: FromStr>(string: &String, msg_handler: F) -> Result<S, String>
|
||||||
where F: Fn() -> String {
|
where
|
||||||
match string.as_str().parse::<f64>() {
|
F: Fn() -> String,
|
||||||
|
{
|
||||||
|
match string.as_str().parse() {
|
||||||
Ok(n) => Ok(n),
|
Ok(n) => Ok(n),
|
||||||
_ => Err(msg_handler())
|
_ => Err(msg_handler()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -36,6 +34,7 @@ pub enum ParseToken {
|
|||||||
All,
|
All,
|
||||||
|
|
||||||
Key(String),
|
Key(String),
|
||||||
|
Keys(Vec<String>),
|
||||||
// []
|
// []
|
||||||
Array,
|
Array,
|
||||||
// 메타토큰
|
// 메타토큰
|
||||||
@ -43,7 +42,7 @@ pub enum ParseToken {
|
|||||||
// ?( filter )
|
// ?( filter )
|
||||||
Filter(FilterToken),
|
Filter(FilterToken),
|
||||||
// 1 : 2
|
// 1 : 2
|
||||||
Range(Option<isize>, Option<isize>),
|
Range(Option<isize>, Option<isize>, Option<usize>),
|
||||||
// 1, 2, 3
|
// 1, 2, 3
|
||||||
Union(Vec<isize>),
|
Union(Vec<isize>),
|
||||||
|
|
||||||
@ -88,9 +87,7 @@ impl Parser {
|
|||||||
let node = Self::node(ParseToken::Absolute);
|
let node = Self::node(ParseToken::Absolute);
|
||||||
Self::paths(node, tokenizer)
|
Self::paths(node, tokenizer)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Err(tokenizer.err_msg()),
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -107,9 +104,7 @@ impl Parser {
|
|||||||
let node = Self::array(prev, tokenizer)?;
|
let node = Self::array(prev, tokenizer)?;
|
||||||
Self::paths(node, tokenizer)
|
Self::paths(node, tokenizer)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Ok(prev),
|
||||||
Ok(prev)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,34 +119,22 @@ impl Parser {
|
|||||||
| Ok(Token::Greater(_))
|
| Ok(Token::Greater(_))
|
||||||
| Ok(Token::GreaterOrEqual(_))
|
| Ok(Token::GreaterOrEqual(_))
|
||||||
| Ok(Token::And(_))
|
| Ok(Token::And(_))
|
||||||
| Ok(Token::Or(_)) => {
|
| Ok(Token::Or(_)) => Ok(node),
|
||||||
Ok(node)
|
_ => Self::paths(node, tokenizer),
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
Self::paths(node, tokenizer)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
debug!("#path");
|
debug!("#path");
|
||||||
match tokenizer.peek_token() {
|
match tokenizer.peek_token() {
|
||||||
Ok(Token::Dot(_)) => {
|
Ok(Token::Dot(_)) => Self::path_leaves(prev, tokenizer),
|
||||||
Self::path_leaves(prev, tokenizer)
|
Ok(Token::Asterisk(_)) => Self::path_in_all(prev, tokenizer),
|
||||||
}
|
Ok(Token::Key(_, _)) => Self::path_in_key(prev, tokenizer),
|
||||||
Ok(Token::Asterisk(_)) => {
|
|
||||||
Self::path_in_all(prev, tokenizer)
|
|
||||||
}
|
|
||||||
Ok(Token::Key(_, _)) => {
|
|
||||||
Self::path_in_key(prev, tokenizer)
|
|
||||||
}
|
|
||||||
Ok(Token::OpenArray(_)) => {
|
Ok(Token::OpenArray(_)) => {
|
||||||
Self::eat_token(tokenizer);
|
Self::eat_token(tokenizer);
|
||||||
Self::array(prev, tokenizer)
|
Self::array(prev, tokenizer)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Err(tokenizer.err_msg()),
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -159,17 +142,13 @@ impl Parser {
|
|||||||
debug!("#path_leaves");
|
debug!("#path_leaves");
|
||||||
Self::eat_token(tokenizer);
|
Self::eat_token(tokenizer);
|
||||||
match tokenizer.peek_token() {
|
match tokenizer.peek_token() {
|
||||||
Ok(Token::Asterisk(_)) => {
|
Ok(Token::Asterisk(_)) => Self::path_leaves_all(prev, tokenizer),
|
||||||
Self::path_leaves_all(prev, tokenizer)
|
|
||||||
}
|
|
||||||
Ok(Token::OpenArray(_)) => {
|
Ok(Token::OpenArray(_)) => {
|
||||||
let mut leaves_node = Self::node(ParseToken::Leaves);
|
let mut leaves_node = Self::node(ParseToken::Leaves);
|
||||||
leaves_node.left = Some(Box::new(prev));
|
leaves_node.left = Some(Box::new(prev));
|
||||||
Ok(Self::paths(leaves_node, tokenizer)?)
|
Ok(Self::paths(leaves_node, tokenizer)?)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Self::path_leaves_key(prev, tokenizer),
|
||||||
Self::path_leaves_key(prev, tokenizer)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -214,12 +193,8 @@ impl Parser {
|
|||||||
fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
debug!("#key");
|
debug!("#key");
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(Token::Key(_, v)) => {
|
Ok(Token::Key(_, v)) => Ok(Self::node(ParseToken::Key(v))),
|
||||||
Ok(Self::node(ParseToken::Key(v)))
|
_ => Err(tokenizer.err_msg()),
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -229,25 +204,45 @@ impl Parser {
|
|||||||
Ok(Token::Key(_, v)) => {
|
Ok(Token::Key(_, v)) => {
|
||||||
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
|
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Err(tokenizer.err_msg()),
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn array_quota_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn array_keys(tokenizer: &mut TokenReader, first_key: String) -> ParseResult<Node> {
|
||||||
debug!("#array_quota_value");
|
let mut keys = vec![first_key];
|
||||||
|
while tokenizer.peek_is(COMMA) {
|
||||||
|
Self::eat_token(tokenizer);
|
||||||
|
Self::eat_whitespace(tokenizer);
|
||||||
|
|
||||||
|
if !(tokenizer.peek_is(SINGLE_QUOTE) || tokenizer.peek_is(DOUBLE_QUOTE)) {
|
||||||
|
return Err(tokenizer.err_msg());
|
||||||
|
}
|
||||||
|
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(Token::SingleQuoted(_, val))
|
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
|
||||||
| Ok(Token::DoubleQuoted(_, val)) => {
|
keys.push(val);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self::eat_whitespace(tokenizer);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self::node(ParseToken::Keys(keys)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn array_quote_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
|
debug!("#array_quote_value");
|
||||||
|
match tokenizer.next_token() {
|
||||||
|
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
|
||||||
|
if !tokenizer.peek_is(COMMA) {
|
||||||
Ok(Self::node(ParseToken::Key(val)))
|
Ok(Self::node(ParseToken::Key(val)))
|
||||||
|
} else {
|
||||||
|
Self::array_keys(tokenizer, val)
|
||||||
}
|
}
|
||||||
Err(TokenError::Eof) => {
|
|
||||||
Ok(Self::node(ParseToken::Eof))
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
}
|
||||||
|
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||||
|
_ => Err(tokenizer.err_msg()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -270,13 +265,11 @@ impl Parser {
|
|||||||
right: Some(Box::new(Self::node(ParseToken::All))),
|
right: Some(Box::new(Self::node(ParseToken::All))),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Ok(Node {
|
||||||
Ok(Node {
|
|
||||||
token: ParseToken::Array,
|
token: ParseToken::Array,
|
||||||
left: Some(Box::new(prev)),
|
left: Some(Box::new(prev)),
|
||||||
right: Some(Box::new(Self::array_value(tokenizer)?)),
|
right: Some(Box::new(Self::array_value(tokenizer)?)),
|
||||||
})
|
}),
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -291,45 +284,31 @@ impl Parser {
|
|||||||
debug!("#array_value_key");
|
debug!("#array_value_key");
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(Token::Key(pos, ref val)) => {
|
Ok(Token::Key(pos, ref val)) => {
|
||||||
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
|
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||||
Self::eat_whitespace(tokenizer);
|
Self::eat_whitespace(tokenizer);
|
||||||
|
|
||||||
match tokenizer.peek_token() {
|
match tokenizer.peek_token() {
|
||||||
Ok(Token::Comma(_)) => {
|
Ok(Token::Comma(_)) => Self::union(digit, tokenizer),
|
||||||
Self::union(digit, tokenizer)
|
Ok(Token::Split(_)) => Self::range_from(digit, tokenizer),
|
||||||
}
|
_ => Ok(Self::node(ParseToken::Number(digit as f64))),
|
||||||
Ok(Token::Split(_)) => {
|
|
||||||
Self::range_from(digit, tokenizer)
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
Ok(Self::node(ParseToken::Number(digit as f64)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
_ => Err(tokenizer.err_msg()),
|
||||||
_ => {
|
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
debug!("#array_value");
|
debug!("#array_value");
|
||||||
match tokenizer.peek_token() {
|
match tokenizer.peek_token() {
|
||||||
Ok(Token::Key(_, _)) => {
|
Ok(Token::Key(_, _)) => Self::array_value_key(tokenizer),
|
||||||
Self::array_value_key(tokenizer)
|
|
||||||
}
|
|
||||||
Ok(Token::Split(_)) => {
|
Ok(Token::Split(_)) => {
|
||||||
Self::eat_token(tokenizer);
|
Self::eat_token(tokenizer);
|
||||||
Self::range_to(tokenizer)
|
Self::range_to(tokenizer)
|
||||||
}
|
}
|
||||||
Ok(Token::DoubleQuoted(_, _))
|
Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => {
|
||||||
| Ok(Token::SingleQuoted(_, _)) => {
|
Self::array_quote_value(tokenizer)
|
||||||
Self::array_quota_value(tokenizer)
|
|
||||||
}
|
|
||||||
Err(TokenError::Eof) => {
|
|
||||||
Ok(Self::node(ParseToken::Eof))
|
|
||||||
}
|
}
|
||||||
|
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||||
_ => {
|
_ => {
|
||||||
Self::eat_token(tokenizer);
|
Self::eat_token(tokenizer);
|
||||||
Err(tokenizer.err_msg())
|
Err(tokenizer.err_msg())
|
||||||
@ -342,13 +321,13 @@ impl Parser {
|
|||||||
let mut values = vec![num];
|
let mut values = vec![num];
|
||||||
while match tokenizer.peek_token() {
|
while match tokenizer.peek_token() {
|
||||||
Ok(Token::Comma(_)) => true,
|
Ok(Token::Comma(_)) => true,
|
||||||
_ => false
|
_ => false,
|
||||||
} {
|
} {
|
||||||
Self::eat_token(tokenizer);
|
Self::eat_token(tokenizer);
|
||||||
Self::eat_whitespace(tokenizer);
|
Self::eat_whitespace(tokenizer);
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(Token::Key(pos, ref val)) => {
|
Ok(Token::Key(pos, ref val)) => {
|
||||||
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
|
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||||
values.push(digit);
|
values.push(digit);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
@ -359,43 +338,80 @@ impl Parser {
|
|||||||
Ok(Self::node(ParseToken::Union(values)))
|
Ok(Self::node(ParseToken::Union(values)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn range_from(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> {
|
||||||
|
Self::eat_whitespace(tokenizer);
|
||||||
|
|
||||||
|
if tokenizer.peek_is(SPLIT) {
|
||||||
|
Self::eat_token(tokenizer);
|
||||||
|
Self::eat_whitespace(tokenizer);
|
||||||
|
|
||||||
|
if tokenizer.peek_is(KEY) {
|
||||||
|
match tokenizer.next_token() {
|
||||||
|
Ok(Token::Key(pos, str_step)) => {
|
||||||
|
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
|
||||||
|
Ok(step) => Ok(Some(step)),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Ok(None),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn range_from(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
debug!("#range_from");
|
debug!("#range_from");
|
||||||
Self::eat_token(tokenizer);
|
Self::eat_token(tokenizer);
|
||||||
Self::eat_whitespace(tokenizer);
|
Self::eat_whitespace(tokenizer);
|
||||||
|
|
||||||
match tokenizer.peek_token() {
|
match tokenizer.peek_token() {
|
||||||
Ok(Token::Key(_, _)) => {
|
Ok(Token::Key(_, _)) => Self::range(from, tokenizer),
|
||||||
Self::range(num, tokenizer)
|
Ok(Token::Split(_)) => match Self::range_value(tokenizer)? {
|
||||||
}
|
Some(step) => Ok(Self::node(ParseToken::Range(Some(from), None, Some(step)))),
|
||||||
_ => {
|
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
|
||||||
Ok(Self::node(ParseToken::Range(Some(num), None)))
|
},
|
||||||
}
|
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
debug!("#range_to");
|
debug!("#range_to");
|
||||||
|
|
||||||
|
match Self::range_value(tokenizer)? {
|
||||||
|
Some(step) => return Ok(Self::node(ParseToken::Range(None, None, Some(step)))),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
match tokenizer.peek_token() {
|
||||||
|
Ok(Token::CloseArray(_)) => {
|
||||||
|
return Ok(Self::node(ParseToken::Range(None, None, None)));
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(Token::Key(pos, ref val)) => {
|
Ok(Token::Key(pos, ref to_str)) => {
|
||||||
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
|
let to = utils::string_to_num(to_str, || tokenizer.err_msg_with_pos(pos))?;
|
||||||
Ok(Self::node(ParseToken::Range(None, Some(digit))))
|
let step = Self::range_value(tokenizer)?;
|
||||||
}
|
Ok(Self::node(ParseToken::Range(None, Some(to), step)))
|
||||||
_ => {
|
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
}
|
||||||
|
_ => Err(tokenizer.err_msg()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn range(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn range(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
debug!("#range");
|
debug!("#range");
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(Token::Key(pos, ref val)) => {
|
Ok(Token::Key(pos, ref str_to)) => {
|
||||||
let digit = utils::string_to_isize(val, || tokenizer.err_msg_with_pos(pos))?;
|
let to = utils::string_to_num(str_to, || tokenizer.err_msg_with_pos(pos))?;
|
||||||
Ok(Self::node(ParseToken::Range(Some(num), Some(digit))))
|
let step = Self::range_value(tokenizer)?;
|
||||||
}
|
Ok(Self::node(ParseToken::Range(Some(from), Some(to), step)))
|
||||||
_ => {
|
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
}
|
||||||
|
_ => Err(tokenizer.err_msg()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -407,12 +423,8 @@ impl Parser {
|
|||||||
Self::eat_whitespace(tokenizer);
|
Self::eat_whitespace(tokenizer);
|
||||||
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
|
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
|
||||||
}
|
}
|
||||||
Err(TokenError::Eof) => {
|
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||||
Ok(Self::node(ParseToken::Eof))
|
_ => Err(tokenizer.err_msg()),
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -455,9 +467,7 @@ impl Parser {
|
|||||||
right: Some(Box::new(Self::exprs(tokenizer)?)),
|
right: Some(Box::new(Self::exprs(tokenizer)?)),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Ok(prev),
|
||||||
Ok(prev)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -466,7 +476,7 @@ impl Parser {
|
|||||||
|
|
||||||
let has_prop_candidate = match tokenizer.peek_token() {
|
let has_prop_candidate = match tokenizer.peek_token() {
|
||||||
Ok(Token::At(_)) => true,
|
Ok(Token::At(_)) => true,
|
||||||
_ => false
|
_ => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
let node = Self::term(tokenizer)?;
|
let node = Self::term(tokenizer)?;
|
||||||
@ -479,7 +489,7 @@ impl Parser {
|
|||||||
| Ok(Token::LittleOrEqual(_))
|
| Ok(Token::LittleOrEqual(_))
|
||||||
| Ok(Token::Greater(_))
|
| Ok(Token::Greater(_))
|
||||||
| Ok(Token::GreaterOrEqual(_)) => true,
|
| Ok(Token::GreaterOrEqual(_)) => true,
|
||||||
_ => false
|
_ => false,
|
||||||
} {
|
} {
|
||||||
Self::op(node, tokenizer)
|
Self::op(node, tokenizer)
|
||||||
} else if has_prop_candidate {
|
} else if has_prop_candidate {
|
||||||
@ -492,23 +502,15 @@ impl Parser {
|
|||||||
fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
debug!("#term_num");
|
debug!("#term_num");
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(Token::Key(pos, val)) => {
|
Ok(Token::Key(pos, val)) => match tokenizer.peek_token() {
|
||||||
match tokenizer.peek_token() {
|
Ok(Token::Dot(_)) => Self::term_num_float(val.as_str(), tokenizer),
|
||||||
Ok(Token::Dot(_)) => {
|
|
||||||
Self::term_num_float(val.as_str(), tokenizer)
|
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
let number = utils::string_to_f64(&val, || tokenizer.err_msg_with_pos(pos))?;
|
let number = utils::string_to_num(&val, || tokenizer.err_msg_with_pos(pos))?;
|
||||||
Ok(Self::node(ParseToken::Number(number)))
|
Ok(Self::node(ParseToken::Number(number)))
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||||
Err(TokenError::Eof) => {
|
_ => Err(tokenizer.err_msg()),
|
||||||
Ok(Self::node(ParseToken::Eof))
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -521,12 +523,10 @@ impl Parser {
|
|||||||
f.push_str(&mut num);
|
f.push_str(&mut num);
|
||||||
f.push('.');
|
f.push('.');
|
||||||
f.push_str(frac.as_str());
|
f.push_str(frac.as_str());
|
||||||
let number = utils::string_to_f64(&f, || tokenizer.err_msg_with_pos(pos))?;
|
let number = utils::string_to_num(&f, || tokenizer.err_msg_with_pos(pos))?;
|
||||||
Ok(Self::node(ParseToken::Number(number)))
|
Ok(Self::node(ParseToken::Number(number)))
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Err(tokenizer.err_msg()),
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -550,9 +550,7 @@ impl Parser {
|
|||||||
Self::eat_whitespace(tokenizer);
|
Self::eat_whitespace(tokenizer);
|
||||||
Ok(node)
|
Ok(node)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Self::paths(node, tokenizer),
|
||||||
Self::paths(node, tokenizer)
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -560,8 +558,8 @@ impl Parser {
|
|||||||
return Self::json_path(tokenizer);
|
return Self::json_path(tokenizer);
|
||||||
}
|
}
|
||||||
|
|
||||||
if tokenizer.peek_is(DOUBLE_QUOTA) || tokenizer.peek_is(SINGLE_QUOTA) {
|
if tokenizer.peek_is(DOUBLE_QUOTE) || tokenizer.peek_is(SINGLE_QUOTE) {
|
||||||
return Self::array_quota_value(tokenizer);
|
return Self::array_quote_value(tokenizer);
|
||||||
}
|
}
|
||||||
|
|
||||||
if tokenizer.peek_is(KEY) {
|
if tokenizer.peek_is(KEY) {
|
||||||
@ -569,11 +567,11 @@ impl Parser {
|
|||||||
Some(key) => match key.chars().next() {
|
Some(key) => match key.chars().next() {
|
||||||
Some(ch) => match ch {
|
Some(ch) => match ch {
|
||||||
'-' | '0'...'9' => Self::term_num(tokenizer),
|
'-' | '0'...'9' => Self::term_num(tokenizer),
|
||||||
_ => Self::boolean(tokenizer)
|
_ => Self::boolean(tokenizer),
|
||||||
}
|
|
||||||
_ => Err(tokenizer.err_msg())
|
|
||||||
},
|
},
|
||||||
_ => Err(tokenizer.err_msg())
|
_ => Err(tokenizer.err_msg()),
|
||||||
|
},
|
||||||
|
_ => Err(tokenizer.err_msg()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -583,27 +581,13 @@ impl Parser {
|
|||||||
fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
debug!("#op");
|
debug!("#op");
|
||||||
let token = match tokenizer.next_token() {
|
let token = match tokenizer.next_token() {
|
||||||
Ok(Token::Equal(_)) => {
|
Ok(Token::Equal(_)) => ParseToken::Filter(FilterToken::Equal),
|
||||||
ParseToken::Filter(FilterToken::Equal)
|
Ok(Token::NotEqual(_)) => ParseToken::Filter(FilterToken::NotEqual),
|
||||||
}
|
Ok(Token::Little(_)) => ParseToken::Filter(FilterToken::Little),
|
||||||
Ok(Token::NotEqual(_)) => {
|
Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual),
|
||||||
ParseToken::Filter(FilterToken::NotEqual)
|
Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater),
|
||||||
}
|
Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual),
|
||||||
Ok(Token::Little(_)) => {
|
Err(TokenError::Eof) => ParseToken::Eof,
|
||||||
ParseToken::Filter(FilterToken::Little)
|
|
||||||
}
|
|
||||||
Ok(Token::LittleOrEqual(_)) => {
|
|
||||||
ParseToken::Filter(FilterToken::LittleOrEqual)
|
|
||||||
}
|
|
||||||
Ok(Token::Greater(_)) => {
|
|
||||||
ParseToken::Filter(FilterToken::Greater)
|
|
||||||
}
|
|
||||||
Ok(Token::GreaterOrEqual(_)) => {
|
|
||||||
ParseToken::Filter(FilterToken::GreaterOrEqual)
|
|
||||||
}
|
|
||||||
Err(TokenError::Eof) => {
|
|
||||||
ParseToken::Eof
|
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
return Err(tokenizer.err_msg());
|
return Err(tokenizer.err_msg());
|
||||||
}
|
}
|
||||||
@ -629,18 +613,18 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn node(token: ParseToken) -> Node {
|
fn node(token: ParseToken) -> Node {
|
||||||
Node { left: None, right: None, token }
|
Node {
|
||||||
|
left: None,
|
||||||
|
right: None,
|
||||||
|
token,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||||
debug!("#close_token");
|
debug!("#close_token");
|
||||||
match tokenizer.next_token() {
|
match tokenizer.next_token() {
|
||||||
Ok(ref t) if t.partial_eq(token) => {
|
Ok(ref t) if t.partial_eq(token) => Ok(ret),
|
||||||
Ok(ret)
|
_ => Err(tokenizer.err_msg()),
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
Err(tokenizer.err_msg())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -652,14 +636,14 @@ pub trait NodeVisitor {
|
|||||||
| ParseToken::Relative
|
| ParseToken::Relative
|
||||||
| ParseToken::All
|
| ParseToken::All
|
||||||
| ParseToken::Key(_)
|
| ParseToken::Key(_)
|
||||||
| ParseToken::Range(_, _)
|
| ParseToken::Keys(_)
|
||||||
|
| ParseToken::Range(_, _, _)
|
||||||
| ParseToken::Union(_)
|
| ParseToken::Union(_)
|
||||||
| ParseToken::Number(_)
|
| ParseToken::Number(_)
|
||||||
| ParseToken::Bool(_) => {
|
| ParseToken::Bool(_) => {
|
||||||
self.visit_token(&node.token);
|
self.visit_token(&node.token);
|
||||||
}
|
}
|
||||||
ParseToken::In
|
ParseToken::In | ParseToken::Leaves => {
|
||||||
| ParseToken::Leaves => {
|
|
||||||
match &node.left {
|
match &node.left {
|
||||||
Some(n) => self.visit(&*n),
|
Some(n) => self.visit(&*n),
|
||||||
_ => {}
|
_ => {}
|
||||||
@ -686,8 +670,7 @@ pub trait NodeVisitor {
|
|||||||
}
|
}
|
||||||
self.visit_token(&ParseToken::ArrayEof);
|
self.visit_token(&ParseToken::ArrayEof);
|
||||||
}
|
}
|
||||||
ParseToken::Filter(FilterToken::And)
|
ParseToken::Filter(FilterToken::And) | ParseToken::Filter(FilterToken::Or) => {
|
||||||
| ParseToken::Filter(FilterToken::Or) => {
|
|
||||||
match &node.left {
|
match &node.left {
|
||||||
Some(n) => self.visit(&*n),
|
Some(n) => self.visit(&*n),
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -12,10 +12,7 @@ pub struct PathReader<'a> {
|
|||||||
|
|
||||||
impl<'a> PathReader<'a> {
|
impl<'a> PathReader<'a> {
|
||||||
pub fn new(input: &'a str) -> Self {
|
pub fn new(input: &'a str) -> Self {
|
||||||
PathReader {
|
PathReader { input, pos: 0 }
|
||||||
input,
|
|
||||||
pos: 0,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn peek_char(&self) -> Result<(usize, char), ReaderError> {
|
pub fn peek_char(&self) -> Result<(usize, char), ReaderError> {
|
||||||
@ -25,7 +22,7 @@ impl<'a> PathReader<'a> {
|
|||||||
|
|
||||||
pub fn take_while<F>(&mut self, fun: F) -> Result<(usize, String), ReaderError>
|
pub fn take_while<F>(&mut self, fun: F) -> Result<(usize, String), ReaderError>
|
||||||
where
|
where
|
||||||
F: Fn(&char) -> bool
|
F: Fn(&char) -> bool,
|
||||||
{
|
{
|
||||||
let mut char_len: usize = 0;
|
let mut char_len: usize = 0;
|
||||||
let mut ret = String::new();
|
let mut ret = String::new();
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
use std::io::Write;
|
|
||||||
use std::result::Result;
|
use std::result::Result;
|
||||||
|
|
||||||
use super::path_reader::{PathReader, ReaderError};
|
use super::path_reader::{PathReader, ReaderError};
|
||||||
@ -15,8 +14,8 @@ pub const SPLIT: &'static str = ":";
|
|||||||
pub const OPEN_PARENTHESIS: &'static str = "(";
|
pub const OPEN_PARENTHESIS: &'static str = "(";
|
||||||
pub const CLOSE_PARENTHESIS: &'static str = ")";
|
pub const CLOSE_PARENTHESIS: &'static str = ")";
|
||||||
pub const KEY: &'static str = "Key";
|
pub const KEY: &'static str = "Key";
|
||||||
pub const DOUBLE_QUOTA: &'static str = "\"";
|
pub const DOUBLE_QUOTE: &'static str = "\"";
|
||||||
pub const SINGLE_QUOTA: &'static str = "'";
|
pub const SINGLE_QUOTE: &'static str = "'";
|
||||||
pub const EQUAL: &'static str = "==";
|
pub const EQUAL: &'static str = "==";
|
||||||
pub const GREATER_OR_EQUAL: &'static str = ">=";
|
pub const GREATER_OR_EQUAL: &'static str = ">=";
|
||||||
pub const GREATER: &'static str = ">";
|
pub const GREATER: &'static str = ">";
|
||||||
@ -44,8 +43,8 @@ const CH_PIPE: char = '|';
|
|||||||
const CH_LITTLE: char = '<';
|
const CH_LITTLE: char = '<';
|
||||||
const CH_GREATER: char = '>';
|
const CH_GREATER: char = '>';
|
||||||
const CH_EXCLAMATION: char = '!';
|
const CH_EXCLAMATION: char = '!';
|
||||||
const CH_SINGLE_QUOTA: char = '\'';
|
const CH_SINGLE_QUOTE: char = '\'';
|
||||||
const CH_DOUBLE_QUOTA: char = '"';
|
const CH_DOUBLE_QUOTE: char = '"';
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub enum TokenError {
|
pub enum TokenError {
|
||||||
@ -55,7 +54,7 @@ pub enum TokenError {
|
|||||||
|
|
||||||
fn to_token_error(read_err: ReaderError) -> TokenError {
|
fn to_token_error(read_err: ReaderError) -> TokenError {
|
||||||
match read_err {
|
match read_err {
|
||||||
ReaderError::Eof => TokenError::Eof
|
ReaderError::Eof => TokenError::Eof,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,8 +108,8 @@ impl Token {
|
|||||||
Token::OpenParenthesis(_) => OPEN_PARENTHESIS,
|
Token::OpenParenthesis(_) => OPEN_PARENTHESIS,
|
||||||
Token::CloseParenthesis(_) => CLOSE_PARENTHESIS,
|
Token::CloseParenthesis(_) => CLOSE_PARENTHESIS,
|
||||||
Token::Key(_, _) => KEY,
|
Token::Key(_, _) => KEY,
|
||||||
Token::DoubleQuoted(_, _) => DOUBLE_QUOTA,
|
Token::DoubleQuoted(_, _) => DOUBLE_QUOTE,
|
||||||
Token::SingleQuoted(_, _) => SINGLE_QUOTA,
|
Token::SingleQuoted(_, _) => SINGLE_QUOTE,
|
||||||
Token::Equal(_) => EQUAL,
|
Token::Equal(_) => EQUAL,
|
||||||
Token::GreaterOrEqual(_) => GREATER_OR_EQUAL,
|
Token::GreaterOrEqual(_) => GREATER_OR_EQUAL,
|
||||||
Token::Greater(_) => GREATER,
|
Token::Greater(_) => GREATER,
|
||||||
@ -119,7 +118,7 @@ impl Token {
|
|||||||
Token::NotEqual(_) => NOT_EQUAL,
|
Token::NotEqual(_) => NOT_EQUAL,
|
||||||
Token::And(_) => AND,
|
Token::And(_) => AND,
|
||||||
Token::Or(_) => OR,
|
Token::Or(_) => OR,
|
||||||
Token::Whitespace(_, _) => WHITESPACE
|
Token::Whitespace(_, _) => WHITESPACE,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -137,7 +136,7 @@ fn simple_matched_token(ch: char, pos: usize) -> Option<Token> {
|
|||||||
CH_QUESTION => Some(Token::Question(pos)),
|
CH_QUESTION => Some(Token::Question(pos)),
|
||||||
CH_COMMA => Some(Token::Comma(pos)),
|
CH_COMMA => Some(Token::Comma(pos)),
|
||||||
CH_SEMICOLON => Some(Token::Split(pos)),
|
CH_SEMICOLON => Some(Token::Split(pos)),
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -147,20 +146,42 @@ pub struct Tokenizer<'a> {
|
|||||||
|
|
||||||
impl<'a> Tokenizer<'a> {
|
impl<'a> Tokenizer<'a> {
|
||||||
pub fn new(input: &'a str) -> Self {
|
pub fn new(input: &'a str) -> Self {
|
||||||
|
trace!("input: {}", input);
|
||||||
Tokenizer {
|
Tokenizer {
|
||||||
input: PathReader::new(input),
|
input: PathReader::new(input),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn single_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
fn quote(&mut self, ch: char) -> Result<String, TokenError> {
|
||||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
let (_, mut val) = self
|
||||||
|
.input
|
||||||
|
.take_while(|c| *c != ch)
|
||||||
|
.map_err(to_token_error)?;
|
||||||
|
|
||||||
|
if let Some('\\') = val.chars().last() {
|
||||||
self.input.next_char().map_err(to_token_error)?;
|
self.input.next_char().map_err(to_token_error)?;
|
||||||
|
let _ = val.pop();
|
||||||
|
let (_, mut val_remain) = self
|
||||||
|
.input
|
||||||
|
.take_while(|c| *c != ch)
|
||||||
|
.map_err(to_token_error)?;
|
||||||
|
self.input.next_char().map_err(to_token_error)?;
|
||||||
|
val.push(ch);
|
||||||
|
val.push_str(val_remain.as_str());
|
||||||
|
} else {
|
||||||
|
self.input.next_char().map_err(to_token_error)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(val)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn single_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||||
|
let val = self.quote(ch)?;
|
||||||
Ok(Token::SingleQuoted(pos, val))
|
Ok(Token::SingleQuoted(pos, val))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn double_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
fn double_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
let val = self.quote(ch)?;
|
||||||
self.input.next_char().map_err(to_token_error)?;
|
|
||||||
Ok(Token::DoubleQuoted(pos, val))
|
Ok(Token::DoubleQuoted(pos, val))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -171,7 +192,7 @@ impl<'a> Tokenizer<'a> {
|
|||||||
self.input.next_char().map_err(to_token_error)?;
|
self.input.next_char().map_err(to_token_error)?;
|
||||||
Ok(Token::Equal(pos))
|
Ok(Token::Equal(pos))
|
||||||
}
|
}
|
||||||
_ => Err(TokenError::Position(pos))
|
_ => Err(TokenError::Position(pos)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -182,7 +203,7 @@ impl<'a> Tokenizer<'a> {
|
|||||||
self.input.next_char().map_err(to_token_error)?;
|
self.input.next_char().map_err(to_token_error)?;
|
||||||
Ok(Token::NotEqual(pos))
|
Ok(Token::NotEqual(pos))
|
||||||
}
|
}
|
||||||
_ => Err(TokenError::Position(pos))
|
_ => Err(TokenError::Position(pos)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -215,7 +236,7 @@ impl<'a> Tokenizer<'a> {
|
|||||||
let _ = self.input.next_char().map_err(to_token_error);
|
let _ = self.input.next_char().map_err(to_token_error);
|
||||||
Ok(Token::And(pos))
|
Ok(Token::And(pos))
|
||||||
}
|
}
|
||||||
_ => Err(TokenError::Position(pos))
|
_ => Err(TokenError::Position(pos)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -226,27 +247,31 @@ impl<'a> Tokenizer<'a> {
|
|||||||
self.input.next_char().map_err(to_token_error)?;
|
self.input.next_char().map_err(to_token_error)?;
|
||||||
Ok(Token::Or(pos))
|
Ok(Token::Or(pos))
|
||||||
}
|
}
|
||||||
_ => Err(TokenError::Position(pos))
|
_ => Err(TokenError::Position(pos)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn whitespace(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
fn whitespace(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||||
let (_, vec) = self.input.take_while(|c| c.is_whitespace()).map_err(to_token_error)?;
|
let (_, vec) = self
|
||||||
|
.input
|
||||||
|
.take_while(|c| c.is_whitespace())
|
||||||
|
.map_err(to_token_error)?;
|
||||||
Ok(Token::Whitespace(pos, vec.len()))
|
Ok(Token::Whitespace(pos, vec.len()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn other(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
fn other(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||||
let fun = |c: &char| {
|
let fun = |c: &char| match simple_matched_token(*c, pos) {
|
||||||
match simple_matched_token(*c, pos) {
|
|
||||||
Some(_) => false,
|
Some(_) => false,
|
||||||
_ if c == &CH_LITTLE
|
_ if c == &CH_LITTLE
|
||||||
|| c == &CH_GREATER
|
|| c == &CH_GREATER
|
||||||
|| c == &CH_EQUAL
|
|| c == &CH_EQUAL
|
||||||
|| c == &CH_AMPERSAND
|
|| c == &CH_AMPERSAND
|
||||||
|| c == &CH_PIPE
|
|| c == &CH_PIPE
|
||||||
|| c == &CH_EXCLAMATION => false,
|
|| c == &CH_EXCLAMATION =>
|
||||||
_ => !c.is_whitespace()
|
{
|
||||||
|
false
|
||||||
}
|
}
|
||||||
|
_ => !c.is_whitespace(),
|
||||||
};
|
};
|
||||||
let (_, mut vec) = self.input.take_while(fun).map_err(to_token_error)?;
|
let (_, mut vec) = self.input.take_while(fun).map_err(to_token_error)?;
|
||||||
vec.insert(0, ch);
|
vec.insert(0, ch);
|
||||||
@ -257,10 +282,9 @@ impl<'a> Tokenizer<'a> {
|
|||||||
let (pos, ch) = self.input.next_char().map_err(to_token_error)?;
|
let (pos, ch) = self.input.next_char().map_err(to_token_error)?;
|
||||||
match simple_matched_token(ch, pos) {
|
match simple_matched_token(ch, pos) {
|
||||||
Some(t) => Ok(t),
|
Some(t) => Ok(t),
|
||||||
None => {
|
None => match ch {
|
||||||
match ch {
|
CH_SINGLE_QUOTE => self.single_quote(pos, ch),
|
||||||
CH_SINGLE_QUOTA => self.single_quota(pos, ch),
|
CH_DOUBLE_QUOTE => self.double_quote(pos, ch),
|
||||||
CH_DOUBLE_QUOTA => self.double_quota(pos, ch),
|
|
||||||
CH_EQUAL => self.equal(pos, ch),
|
CH_EQUAL => self.equal(pos, ch),
|
||||||
CH_GREATER => self.greater(pos, ch),
|
CH_GREATER => self.greater(pos, ch),
|
||||||
CH_LITTLE => self.little(pos, ch),
|
CH_LITTLE => self.little(pos, ch),
|
||||||
@ -269,8 +293,7 @@ impl<'a> Tokenizer<'a> {
|
|||||||
CH_EXCLAMATION => self.not_equal(pos, ch),
|
CH_EXCLAMATION => self.not_equal(pos, ch),
|
||||||
_ if ch.is_whitespace() => self.whitespace(pos, ch),
|
_ if ch.is_whitespace() => self.whitespace(pos, ch),
|
||||||
_ => self.other(pos, ch),
|
_ => self.other(pos, ch),
|
||||||
}
|
},
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -312,7 +335,7 @@ impl<'a> TokenReader<'a> {
|
|||||||
pub fn peek_is(&self, simple_token: &str) -> bool {
|
pub fn peek_is(&self, simple_token: &str) -> bool {
|
||||||
match self.peek_token() {
|
match self.peek_token() {
|
||||||
Ok(t) => t.simple_eq(simple_token),
|
Ok(t) => t.simple_eq(simple_token),
|
||||||
_ => false
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -344,23 +367,13 @@ impl<'a> TokenReader<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn err_msg_with_pos(&self, pos: usize) -> String {
|
pub fn err_msg_with_pos(&self, pos: usize) -> String {
|
||||||
let mut w = Vec::new();
|
format!("{}\n{}", self.origin_input, "^".repeat(pos))
|
||||||
writeln!(&mut w, "{}", self.origin_input).unwrap();
|
|
||||||
writeln!(&mut w, "{}", "^".repeat(pos)).unwrap();
|
|
||||||
match std::str::from_utf8(&w[..]) {
|
|
||||||
Ok(s) => s.to_owned(),
|
|
||||||
Err(_) => panic!("Invalid UTF-8")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn err_msg(&self) -> String {
|
pub fn err_msg(&self) -> String {
|
||||||
match self.curr_pos {
|
match self.curr_pos {
|
||||||
Some(pos) => {
|
Some(pos) => self.err_msg_with_pos(pos),
|
||||||
self.err_msg_with_pos(pos)
|
_ => self.err_msg_with_pos(self.err_pos),
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
self.err_msg_with_pos(self.err_pos)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,4 +1,5 @@
|
|||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
use array_tool::vec::{Intersect, Union};
|
use array_tool::vec::{Intersect, Union};
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
@ -25,7 +26,9 @@ trait Cmp {
|
|||||||
|
|
||||||
fn cmp_json<'a>(&self, v1: &Vec<&'a Value>, v2: &Vec<&'a Value>) -> Vec<&'a Value>;
|
fn cmp_json<'a>(&self, v1: &Vec<&'a Value>, v2: &Vec<&'a Value>) -> Vec<&'a Value>;
|
||||||
|
|
||||||
fn default(&self) -> bool { false }
|
fn default(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CmpEq;
|
struct CmpEq;
|
||||||
@ -200,59 +203,63 @@ impl<'a> ExprTerm<'a> {
|
|||||||
fn is_string(&self) -> bool {
|
fn is_string(&self) -> bool {
|
||||||
match &self {
|
match &self {
|
||||||
ExprTerm::String(_) => true,
|
ExprTerm::String(_) => true,
|
||||||
_ => false
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_number(&self) -> bool {
|
fn is_number(&self) -> bool {
|
||||||
match &self {
|
match &self {
|
||||||
ExprTerm::Number(_) => true,
|
ExprTerm::Number(_) => true,
|
||||||
_ => false
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_bool(&self) -> bool {
|
fn is_bool(&self) -> bool {
|
||||||
match &self {
|
match &self {
|
||||||
ExprTerm::Bool(_) => true,
|
ExprTerm::Bool(_) => true,
|
||||||
_ => false
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_json(&self) -> bool {
|
fn is_json(&self) -> bool {
|
||||||
match &self {
|
match &self {
|
||||||
ExprTerm::Json(_, _) => true,
|
ExprTerm::Json(_, _) => true,
|
||||||
_ => false
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cmp<C1: Cmp, C2: Cmp>(&self, other: &Self, cmp_fn: &C1, reverse_cmp_fn: &C2) -> ExprTerm<'a> {
|
fn cmp<C1: Cmp, C2: Cmp>(
|
||||||
|
&self,
|
||||||
|
other: &Self,
|
||||||
|
cmp_fn: &C1,
|
||||||
|
reverse_cmp_fn: &C2,
|
||||||
|
) -> ExprTerm<'a> {
|
||||||
match &self {
|
match &self {
|
||||||
ExprTerm::String(s1) => match &other {
|
ExprTerm::String(s1) => match &other {
|
||||||
ExprTerm::String(s2) => ExprTerm::Bool(cmp_fn.cmp_string(s1, s2)),
|
ExprTerm::String(s2) => ExprTerm::Bool(cmp_fn.cmp_string(s1, s2)),
|
||||||
ExprTerm::Json(_, _) => {
|
ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||||
other.cmp(&self, reverse_cmp_fn, cmp_fn)
|
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||||
}
|
},
|
||||||
_ => ExprTerm::Bool(cmp_fn.default())
|
|
||||||
}
|
|
||||||
ExprTerm::Number(n1) => match &other {
|
ExprTerm::Number(n1) => match &other {
|
||||||
ExprTerm::Number(n2) => ExprTerm::Bool(cmp_fn.cmp_f64(&to_f64(n1), &to_f64(n2))),
|
ExprTerm::Number(n2) => ExprTerm::Bool(cmp_fn.cmp_f64(&to_f64(n1), &to_f64(n2))),
|
||||||
ExprTerm::Json(_, _) => {
|
ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||||
other.cmp(&self, reverse_cmp_fn, cmp_fn)
|
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||||
}
|
},
|
||||||
_ => ExprTerm::Bool(cmp_fn.default())
|
|
||||||
}
|
|
||||||
ExprTerm::Bool(b1) => match &other {
|
ExprTerm::Bool(b1) => match &other {
|
||||||
ExprTerm::Bool(b2) => ExprTerm::Bool(cmp_fn.cmp_bool(b1, b2)),
|
ExprTerm::Bool(b2) => ExprTerm::Bool(cmp_fn.cmp_bool(b1, b2)),
|
||||||
ExprTerm::Json(_, _) => {
|
ExprTerm::Json(_, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||||
other.cmp(&self, reverse_cmp_fn, cmp_fn)
|
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||||
}
|
},
|
||||||
_ => ExprTerm::Bool(cmp_fn.default())
|
|
||||||
}
|
|
||||||
ExprTerm::Json(fk1, vec1) if other.is_string() => {
|
ExprTerm::Json(fk1, vec1) if other.is_string() => {
|
||||||
let s2 = if let ExprTerm::String(s2) = &other { s2 } else { unreachable!() };
|
let s2 = if let ExprTerm::String(s2) = &other {
|
||||||
|
s2
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
let ret: Vec<&Value> = vec1.iter().filter(|v1| {
|
let ret: Vec<&Value> = vec1
|
||||||
match v1 {
|
.iter()
|
||||||
|
.filter(|v1| match v1 {
|
||||||
Value::String(s1) => cmp_fn.cmp_string(s1, s2),
|
Value::String(s1) => cmp_fn.cmp_string(s1, s2),
|
||||||
Value::Object(map1) => {
|
Value::Object(map1) => {
|
||||||
if let Some(FilterKey::String(k)) = fk1 {
|
if let Some(FilterKey::String(k)) = fk1 {
|
||||||
@ -262,16 +269,26 @@ impl<'a> ExprTerm<'a> {
|
|||||||
}
|
}
|
||||||
cmp_fn.default()
|
cmp_fn.default()
|
||||||
}
|
}
|
||||||
_ => cmp_fn.default()
|
_ => cmp_fn.default(),
|
||||||
}
|
})
|
||||||
}).map(|v| *v).collect();
|
.map(|v| *v)
|
||||||
|
.collect();
|
||||||
|
|
||||||
if ret.is_empty() { ExprTerm::Bool(cmp_fn.default()) } else { ExprTerm::Json(None, ret) }
|
if ret.is_empty() {
|
||||||
|
ExprTerm::Bool(cmp_fn.default())
|
||||||
|
} else {
|
||||||
|
ExprTerm::Json(None, ret)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
ExprTerm::Json(fk1, vec1) if other.is_number() => {
|
ExprTerm::Json(fk1, vec1) if other.is_number() => {
|
||||||
let n2 = if let ExprTerm::Number(n2) = &other { n2 } else { unreachable!() };
|
let n2 = if let ExprTerm::Number(n2) = &other {
|
||||||
let ret: Vec<&Value> = vec1.iter().filter(|v1| {
|
n2
|
||||||
match v1 {
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
let ret: Vec<&Value> = vec1
|
||||||
|
.iter()
|
||||||
|
.filter(|v1| match v1 {
|
||||||
Value::Number(n1) => cmp_fn.cmp_f64(&to_f64(n1), &to_f64(n2)),
|
Value::Number(n1) => cmp_fn.cmp_f64(&to_f64(n1), &to_f64(n2)),
|
||||||
Value::Object(map1) => {
|
Value::Object(map1) => {
|
||||||
if let Some(FilterKey::String(k)) = fk1 {
|
if let Some(FilterKey::String(k)) = fk1 {
|
||||||
@ -281,16 +298,26 @@ impl<'a> ExprTerm<'a> {
|
|||||||
}
|
}
|
||||||
cmp_fn.default()
|
cmp_fn.default()
|
||||||
}
|
}
|
||||||
_ => cmp_fn.default()
|
_ => cmp_fn.default(),
|
||||||
}
|
})
|
||||||
}).map(|v| *v).collect();
|
.map(|v| *v)
|
||||||
|
.collect();
|
||||||
|
|
||||||
if ret.is_empty() { ExprTerm::Bool(cmp_fn.default()) } else { ExprTerm::Json(None, ret) }
|
if ret.is_empty() {
|
||||||
|
ExprTerm::Bool(cmp_fn.default())
|
||||||
|
} else {
|
||||||
|
ExprTerm::Json(None, ret)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
ExprTerm::Json(fk1, vec1) if other.is_bool() => {
|
ExprTerm::Json(fk1, vec1) if other.is_bool() => {
|
||||||
let b2 = if let ExprTerm::Bool(b2) = &other { b2 } else { unreachable!() };
|
let b2 = if let ExprTerm::Bool(b2) = &other {
|
||||||
let ret: Vec<&Value> = vec1.iter().filter(|v1| {
|
b2
|
||||||
match v1 {
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
let ret: Vec<&Value> = vec1
|
||||||
|
.iter()
|
||||||
|
.filter(|v1| match v1 {
|
||||||
Value::Bool(b1) => cmp_fn.cmp_bool(b1, b2),
|
Value::Bool(b1) => cmp_fn.cmp_bool(b1, b2),
|
||||||
Value::Object(map1) => {
|
Value::Object(map1) => {
|
||||||
if let Some(FilterKey::String(k)) = fk1 {
|
if let Some(FilterKey::String(k)) = fk1 {
|
||||||
@ -300,22 +327,29 @@ impl<'a> ExprTerm<'a> {
|
|||||||
}
|
}
|
||||||
cmp_fn.default()
|
cmp_fn.default()
|
||||||
}
|
}
|
||||||
_ => cmp_fn.default()
|
_ => cmp_fn.default(),
|
||||||
}
|
})
|
||||||
}).map(|v| *v).collect();
|
.map(|v| *v)
|
||||||
|
.collect();
|
||||||
|
|
||||||
if ret.is_empty() { ExprTerm::Bool(cmp_fn.default()) } else { ExprTerm::Json(None, ret) }
|
if ret.is_empty() {
|
||||||
|
ExprTerm::Bool(cmp_fn.default())
|
||||||
|
} else {
|
||||||
|
ExprTerm::Json(None, ret)
|
||||||
}
|
}
|
||||||
ExprTerm::Json(_, vec1) if other.is_json() => {
|
}
|
||||||
match &other {
|
ExprTerm::Json(_, vec1) if other.is_json() => match &other {
|
||||||
ExprTerm::Json(_, vec2) => {
|
ExprTerm::Json(_, vec2) => {
|
||||||
let vec = cmp_fn.cmp_json(vec1, vec2);
|
let vec = cmp_fn.cmp_json(vec1, vec2);
|
||||||
if vec.is_empty() { ExprTerm::Bool(cmp_fn.default()) } else { ExprTerm::Json(None, vec) }
|
if vec.is_empty() {
|
||||||
}
|
ExprTerm::Bool(cmp_fn.default())
|
||||||
_ => unreachable!()
|
} else {
|
||||||
|
ExprTerm::Json(None, vec)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => unreachable!()
|
_ => unreachable!(),
|
||||||
|
},
|
||||||
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -399,30 +433,31 @@ impl<'a> Into<ExprTerm<'a>> for &Vec<&'a Value> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walk_all_with_str<'a>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>, key: &str, is_filter: bool) {
|
fn walk_all_with_str<'a>(
|
||||||
|
vec: &Vec<&'a Value>,
|
||||||
|
tmp: &mut Vec<&'a Value>,
|
||||||
|
key: &str,
|
||||||
|
is_filter: bool,
|
||||||
|
) {
|
||||||
if is_filter {
|
if is_filter {
|
||||||
walk(vec, tmp, &|v| match v {
|
walk(vec, tmp, &|v| match v {
|
||||||
Value::Object(map) if map.contains_key(key) => {
|
Value::Object(map) if map.contains_key(key) => Some(vec![v]),
|
||||||
Some(vec![v])
|
_ => None,
|
||||||
}
|
|
||||||
_ => None
|
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
walk(vec, tmp, &|v| match v {
|
walk(vec, tmp, &|v| match v {
|
||||||
Value::Object(map) => match map.get(key) {
|
Value::Object(map) => match map.get(key) {
|
||||||
Some(v) => Some(vec![v]),
|
Some(v) => Some(vec![v]),
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
},
|
||||||
_ => None
|
_ => None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walk_all<'a>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>) {
|
fn walk_all<'a>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>) {
|
||||||
walk(vec, tmp, &|v| match v {
|
walk(vec, tmp, &|v| match v {
|
||||||
Value::Array(vec) => {
|
Value::Array(vec) => Some(vec.iter().collect()),
|
||||||
Some(vec.iter().collect())
|
|
||||||
}
|
|
||||||
Value::Object(map) => {
|
Value::Object(map) => {
|
||||||
let mut tmp = Vec::new();
|
let mut tmp = Vec::new();
|
||||||
for (_, v) in map {
|
for (_, v) in map {
|
||||||
@ -430,15 +465,17 @@ fn walk_all<'a>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>) {
|
|||||||
}
|
}
|
||||||
Some(tmp)
|
Some(tmp)
|
||||||
}
|
}
|
||||||
_ => None
|
_ => None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walk<'a, F>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>, fun: &F)
|
fn walk<'a, F>(vec: &Vec<&'a Value>, tmp: &mut Vec<&'a Value>, fun: &F)
|
||||||
where F: Fn(&Value) -> Option<Vec<&Value>>
|
where
|
||||||
|
F: Fn(&Value) -> Option<Vec<&Value>>,
|
||||||
{
|
{
|
||||||
fn _walk<'a, F>(v: &'a Value, tmp: &mut Vec<&'a Value>, fun: &F)
|
fn _walk<'a, F>(v: &'a Value, tmp: &mut Vec<&'a Value>, fun: &F)
|
||||||
where F: Fn(&Value) -> Option<Vec<&Value>>
|
where
|
||||||
|
F: Fn(&Value) -> Option<Vec<&Value>>,
|
||||||
{
|
{
|
||||||
if let Some(mut ret) = fun(v) {
|
if let Some(mut ret) = fun(v) {
|
||||||
tmp.append(&mut ret);
|
tmp.append(&mut ret);
|
||||||
@ -478,7 +515,6 @@ enum FilterKey {
|
|||||||
All,
|
All,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum JsonPathError {
|
pub enum JsonPathError {
|
||||||
EmptyPath,
|
EmptyPath,
|
||||||
EmptyValue,
|
EmptyValue,
|
||||||
@ -486,6 +522,23 @@ pub enum JsonPathError {
|
|||||||
Serde(String),
|
Serde(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for JsonPathError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{}", self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for JsonPathError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
JsonPathError::EmptyPath => f.write_str("path not set"),
|
||||||
|
JsonPathError::EmptyValue => f.write_str("json value not set"),
|
||||||
|
JsonPathError::Path(msg) => f.write_str(&format!("path error: \n{}\n", msg)),
|
||||||
|
JsonPathError::Serde(msg) => f.write_str(&format!("serde error: \n{}\n", msg)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Selector<'a, 'b> {
|
pub struct Selector<'a, 'b> {
|
||||||
node: Option<Node>,
|
node: Option<Node>,
|
||||||
@ -575,12 +628,12 @@ impl<'a, 'b> Selector<'a, 'b> {
|
|||||||
for v in vec {
|
for v in vec {
|
||||||
match T::deserialize(*v) {
|
match T::deserialize(*v) {
|
||||||
Ok(v) => ret.push(v),
|
Ok(v) => ret.push(v),
|
||||||
Err(e) => return Err(JsonPathError::Serde(e.to_string()))
|
Err(e) => return Err(JsonPathError::Serde(e.to_string())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(ret)
|
Ok(ret)
|
||||||
}
|
}
|
||||||
_ => Err(JsonPathError::EmptyValue)
|
_ => Err(JsonPathError::EmptyValue),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -589,10 +642,9 @@ impl<'a, 'b> Selector<'a, 'b> {
|
|||||||
|
|
||||||
match &self.current {
|
match &self.current {
|
||||||
Some(r) => {
|
Some(r) => {
|
||||||
Ok(serde_json::to_string(r)
|
Ok(serde_json::to_string(r).map_err(|e| JsonPathError::Serde(e.to_string()))?)
|
||||||
.map_err(|e| JsonPathError::Serde(e.to_string()))?)
|
|
||||||
}
|
}
|
||||||
_ => Err(JsonPathError::EmptyValue)
|
_ => Err(JsonPathError::EmptyValue),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -601,7 +653,7 @@ impl<'a, 'b> Selector<'a, 'b> {
|
|||||||
|
|
||||||
match &self.current {
|
match &self.current {
|
||||||
Some(r) => Ok(r.to_vec()),
|
Some(r) => Ok(r.to_vec()),
|
||||||
_ => Err(JsonPathError::EmptyValue)
|
_ => Err(JsonPathError::EmptyValue),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -622,7 +674,7 @@ impl<'a, 'b> Selector<'a, 'b> {
|
|||||||
let filter_key = fun(&vec, &mut tmp);
|
let filter_key = fun(&vec, &mut tmp);
|
||||||
self.terms.push(Some(ExprTerm::Json(Some(filter_key), tmp)));
|
self.terms.push(Some(ExprTerm::Json(Some(filter_key), tmp)));
|
||||||
}
|
}
|
||||||
_ => unreachable!()
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
@ -649,18 +701,27 @@ impl<'a, 'b> Selector<'a, 'b> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn next_in_filter_with_str(&mut self, key: &str) {
|
fn next_in_filter_with_str(&mut self, key: &str) {
|
||||||
fn _collect<'a>(v: &'a Value, tmp: &mut Vec<&'a Value>, key: &str, visited: &mut HashSet<*const Value>) {
|
fn _collect<'a>(
|
||||||
|
v: &'a Value,
|
||||||
|
tmp: &mut Vec<&'a Value>,
|
||||||
|
key: &str,
|
||||||
|
visited: &mut HashSet<*const Value>,
|
||||||
|
) {
|
||||||
match v {
|
match v {
|
||||||
Value::Object(map) => if map.contains_key(key) {
|
Value::Object(map) => {
|
||||||
|
if map.contains_key(key) {
|
||||||
let ptr = v as *const Value;
|
let ptr = v as *const Value;
|
||||||
if !visited.contains(&ptr) {
|
if !visited.contains(&ptr) {
|
||||||
visited.insert(ptr);
|
visited.insert(ptr);
|
||||||
tmp.push(v)
|
tmp.push(v)
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Value::Array(vec) => for v in vec {
|
}
|
||||||
|
Value::Array(vec) => {
|
||||||
|
for v in vec {
|
||||||
_collect(v, tmp, key, visited);
|
_collect(v, tmp, key, visited);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -690,13 +751,22 @@ impl<'a, 'b> Selector<'a, 'b> {
|
|||||||
self.current = Some(tmp);
|
self.current = Some(tmp);
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("next_from_current_with_num : {:?}, {:?}", &index, self.current);
|
debug!(
|
||||||
|
"next_from_current_with_num : {:?}, {:?}",
|
||||||
|
&index, self.current
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_from_current_with_str(&mut self, key: &str) {
|
fn next_from_current_with_str(&mut self, keys: &Vec<String>) {
|
||||||
fn _collect<'a>(v: &'a Value, tmp: &mut Vec<&'a Value>, key: &str, visited: &mut HashSet<*const Value>) {
|
fn _collect<'a>(
|
||||||
|
v: &'a Value,
|
||||||
|
tmp: &mut Vec<&'a Value>,
|
||||||
|
keys: &Vec<String>,
|
||||||
|
visited: &mut HashSet<*const Value>,
|
||||||
|
) {
|
||||||
match v {
|
match v {
|
||||||
Value::Object(map) => {
|
Value::Object(map) => {
|
||||||
|
for key in keys {
|
||||||
if let Some(v) = map.get(key) {
|
if let Some(v) = map.get(key) {
|
||||||
let ptr = v as *const Value;
|
let ptr = v as *const Value;
|
||||||
if !visited.contains(&ptr) {
|
if !visited.contains(&ptr) {
|
||||||
@ -705,8 +775,11 @@ impl<'a, 'b> Selector<'a, 'b> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::Array(vec) => for v in vec {
|
}
|
||||||
_collect(v, tmp, key, visited);
|
Value::Array(vec) => {
|
||||||
|
for v in vec {
|
||||||
|
_collect(v, tmp, keys, visited);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@ -716,12 +789,15 @@ impl<'a, 'b> Selector<'a, 'b> {
|
|||||||
let mut tmp = Vec::new();
|
let mut tmp = Vec::new();
|
||||||
let mut visited = HashSet::new();
|
let mut visited = HashSet::new();
|
||||||
for c in current {
|
for c in current {
|
||||||
_collect(c, &mut tmp, key, &mut visited);
|
_collect(c, &mut tmp, keys, &mut visited);
|
||||||
}
|
}
|
||||||
self.current = Some(tmp);
|
self.current = Some(tmp);
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("next_from_current_with_str : {}, {:?}", key, self.current);
|
debug!(
|
||||||
|
"next_from_current_with_str : {:?}, {:?}",
|
||||||
|
keys, self.current
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_all_from_current(&mut self) {
|
fn next_all_from_current(&mut self) {
|
||||||
@ -732,9 +808,11 @@ impl<'a, 'b> Selector<'a, 'b> {
|
|||||||
tmp.push(v)
|
tmp.push(v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::Array(vec) => for v in vec {
|
Value::Array(vec) => {
|
||||||
|
for v in vec {
|
||||||
_collect(v, tmp);
|
_collect(v, tmp);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -838,7 +916,7 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
|||||||
self.next_from_current_with_num(to_f64(&n));
|
self.next_from_current_with_num(to_f64(&n));
|
||||||
}
|
}
|
||||||
ExprTerm::String(key) => {
|
ExprTerm::String(key) => {
|
||||||
self.next_from_current_with_str(&key);
|
self.next_from_current_with_str(&vec![key]);
|
||||||
}
|
}
|
||||||
ExprTerm::Json(_, v) => {
|
ExprTerm::Json(_, v) => {
|
||||||
if v.is_empty() {
|
if v.is_empty() {
|
||||||
@ -856,8 +934,7 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
|||||||
|
|
||||||
self.tokens.pop();
|
self.tokens.pop();
|
||||||
}
|
}
|
||||||
ParseToken::All => {
|
ParseToken::All => match self.tokens.last() {
|
||||||
match self.tokens.last() {
|
|
||||||
Some(ParseToken::Leaves) => {
|
Some(ParseToken::Leaves) => {
|
||||||
self.tokens.pop();
|
self.tokens.pop();
|
||||||
self.all_from_current();
|
self.all_from_current();
|
||||||
@ -867,8 +944,7 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
|||||||
self.next_all_from_current();
|
self.next_all_from_current();
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
},
|
||||||
}
|
|
||||||
ParseToken::Bool(b) => {
|
ParseToken::Bool(b) => {
|
||||||
self.terms.push(Some(ExprTerm::Bool(*b)));
|
self.terms.push(Some(ExprTerm::Bool(*b)));
|
||||||
}
|
}
|
||||||
@ -882,11 +958,9 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
|||||||
Some(t) => {
|
Some(t) => {
|
||||||
if self.terms.is_empty() {
|
if self.terms.is_empty() {
|
||||||
match t {
|
match t {
|
||||||
ParseToken::Leaves => {
|
ParseToken::Leaves => self.all_from_current_with_str(key.as_str()),
|
||||||
self.all_from_current_with_str(key.as_str())
|
|
||||||
}
|
|
||||||
ParseToken::In => {
|
ParseToken::In => {
|
||||||
self.next_from_current_with_str(key.as_str())
|
self.next_from_current_with_str(&vec![key.clone()])
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@ -905,8 +979,20 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
|||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ParseToken::Keys(keys) => {
|
||||||
|
if !self.terms.is_empty() {
|
||||||
|
unimplemented!("keys in filter");
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ParseToken::Array) = self.tokens.pop() {
|
||||||
|
self.next_from_current_with_str(keys);
|
||||||
|
} else {
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
|
}
|
||||||
ParseToken::Number(v) => {
|
ParseToken::Number(v) => {
|
||||||
self.terms.push(Some(ExprTerm::Number(Number::from_f64(*v).unwrap())));
|
self.terms
|
||||||
|
.push(Some(ExprTerm::Number(Number::from_f64(*v).unwrap())));
|
||||||
}
|
}
|
||||||
ParseToken::Filter(ref ft) => {
|
ParseToken::Filter(ref ft) => {
|
||||||
if let Some(Some(ref right)) = self.terms.pop() {
|
if let Some(Some(ref right)) = self.terms.pop() {
|
||||||
@ -933,7 +1019,7 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
|||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ParseToken::Range(from, to) => {
|
ParseToken::Range(from, to, step) => {
|
||||||
if !self.terms.is_empty() {
|
if !self.terms.is_empty() {
|
||||||
unimplemented!("range syntax in filter");
|
unimplemented!("range syntax in filter");
|
||||||
}
|
}
|
||||||
@ -955,7 +1041,10 @@ impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
|
|||||||
vec.len()
|
vec.len()
|
||||||
};
|
};
|
||||||
|
|
||||||
for i in from..to {
|
for i in (from..to).step_by(match step {
|
||||||
|
Some(step) => *step,
|
||||||
|
_ => 1,
|
||||||
|
}) {
|
||||||
if let Some(v) = vec.get(i) {
|
if let Some(v) = vec.get(i) {
|
||||||
tmp.push(v);
|
tmp.push(v);
|
||||||
}
|
}
|
||||||
@ -1049,7 +1138,10 @@ fn replace_value<F: FnMut(&Value) -> Value>(tokens: Vec<String>, value: &mut Val
|
|||||||
|
|
||||||
impl SelectorMut {
|
impl SelectorMut {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
SelectorMut { path: None, value: None }
|
SelectorMut {
|
||||||
|
path: None,
|
||||||
|
value: None,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn str_path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> {
|
pub fn str_path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> {
|
||||||
@ -1067,7 +1159,12 @@ impl SelectorMut {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn compute_paths(&self, mut result: Vec<&Value>) -> Vec<Vec<String>> {
|
fn compute_paths(&self, mut result: Vec<&Value>) -> Vec<Vec<String>> {
|
||||||
fn _walk(origin: &Value, target: &mut Vec<&Value>, tokens: &mut Vec<String>, visited: &mut IndexMap<*const Value, Vec<String>>) -> bool {
|
fn _walk(
|
||||||
|
origin: &Value,
|
||||||
|
target: &mut Vec<&Value>,
|
||||||
|
tokens: &mut Vec<String>,
|
||||||
|
visited: &mut IndexMap<*const Value, Vec<String>>,
|
||||||
|
) -> bool {
|
||||||
trace!("{:?}, {:?}", target, tokens);
|
trace!("{:?}, {:?}", target, tokens);
|
||||||
|
|
||||||
if target.is_empty() {
|
if target.is_empty() {
|
||||||
@ -1084,20 +1181,24 @@ impl SelectorMut {
|
|||||||
});
|
});
|
||||||
|
|
||||||
match origin {
|
match origin {
|
||||||
Value::Array(vec) => for (i, v) in vec.iter().enumerate() {
|
Value::Array(vec) => {
|
||||||
|
for (i, v) in vec.iter().enumerate() {
|
||||||
tokens.push(i.to_string());
|
tokens.push(i.to_string());
|
||||||
if _walk(v, target, tokens, visited) {
|
if _walk(v, target, tokens, visited) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
tokens.pop();
|
tokens.pop();
|
||||||
},
|
}
|
||||||
Value::Object(map) => for (k, v) in map {
|
}
|
||||||
|
Value::Object(map) => {
|
||||||
|
for (k, v) in map {
|
||||||
tokens.push(k.clone());
|
tokens.push(k.clone());
|
||||||
if _walk(v, target, tokens, visited) {
|
if _walk(v, target, tokens, visited) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
tokens.pop();
|
tokens.pop();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1133,7 +1234,10 @@ impl SelectorMut {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn replace_with<F: FnMut(&Value) -> Value>(&mut self, fun: &mut F) -> Result<&mut Self, JsonPathError> {
|
pub fn replace_with<F: FnMut(&Value) -> Value>(
|
||||||
|
&mut self,
|
||||||
|
fun: &mut F,
|
||||||
|
) -> Result<&mut Self, JsonPathError> {
|
||||||
let paths = {
|
let paths = {
|
||||||
let result = self.select()?;
|
let result = self.select()?;
|
||||||
self.compute_paths(result)
|
self.compute_paths(result)
|
||||||
|
@ -31,11 +31,22 @@ pub fn read_contents(path: &str) -> String {
|
|||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) {
|
pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) {
|
||||||
let mut s = Selector::new();
|
let mut selector = Selector::new();
|
||||||
let _ = s.str_path(path);
|
let result = selector
|
||||||
let _ = s.value(&json);
|
.str_path(path)
|
||||||
let result = serde_json::to_value(s.select().unwrap()).unwrap();
|
.unwrap()
|
||||||
assert_eq!(result, target, "{}", path);
|
.value(&json)
|
||||||
|
.select_as::<Value>()
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
match target {
|
||||||
|
Value::Array(vec) => vec.clone(),
|
||||||
|
_ => panic!("Give me the Array!"),
|
||||||
|
},
|
||||||
|
"{}",
|
||||||
|
path
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
465
tests/filter.rs
465
tests/filter.rs
@ -11,130 +11,212 @@ mod common;
|
|||||||
fn array() {
|
fn array() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
select_and_then_compare("$.school.friends[1, 2]", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
|
"$.school.friends[1, 2]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([
|
||||||
{"id": 1, "name": "Vincent Cannon" },
|
{"id": 1, "name": "Vincent Cannon" },
|
||||||
{"id": 2, "name": "Gray Berry"}
|
{"id": 2, "name": "Gray Berry"}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.school.friends[1: ]", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
|
"$.school.friends[1: ]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([
|
||||||
{"id": 1, "name": "Vincent Cannon" },
|
{"id": 1, "name": "Vincent Cannon" },
|
||||||
{"id": 2, "name": "Gray Berry"}
|
{"id": 2, "name": "Gray Berry"}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.school.friends[:-2]", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
|
"$.school.friends[:-2]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([
|
||||||
{"id": 0, "name": "Millicent Norman"}
|
{"id": 0, "name": "Millicent Norman"}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$..friends[2].name", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
"Gray Berry", "Gray Berry"
|
"$..friends[2].name",
|
||||||
]));
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!(["Gray Berry", "Gray Berry"]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$..friends[*].name", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
"Vincent Cannon","Gray Berry","Millicent Norman","Vincent Cannon","Gray Berry"
|
"$..friends[*].name",
|
||||||
]));
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([
|
||||||
|
"Vincent Cannon",
|
||||||
|
"Gray Berry",
|
||||||
|
"Millicent Norman",
|
||||||
|
"Vincent Cannon",
|
||||||
|
"Gray Berry"
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$['school']['friends'][*].['name']", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
"Millicent Norman","Vincent Cannon","Gray Berry"
|
"$['school']['friends'][*].['name']",
|
||||||
]));
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!(["Millicent Norman", "Vincent Cannon", "Gray Berry"]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$['school']['friends'][0].['name']", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
"Millicent Norman"
|
"$['school']['friends'][0].['name']",
|
||||||
]));
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!(["Millicent Norman"]),
|
||||||
|
);
|
||||||
|
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.["eyeColor", "name"]"#,
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!(["blue", "Leonor Herman"]),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn return_type() {
|
fn return_type() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
select_and_then_compare("$.school", read_json("./benches/data_obj.json"), json!([{
|
select_and_then_compare(
|
||||||
|
"$.school",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([{
|
||||||
"friends": [
|
"friends": [
|
||||||
{"id": 0, "name": "Millicent Norman"},
|
{"id": 0, "name": "Millicent Norman"},
|
||||||
{"id": 1, "name": "Vincent Cannon" },
|
{"id": 1, "name": "Vincent Cannon" },
|
||||||
{"id": 2, "name": "Gray Berry"}
|
{"id": 2, "name": "Gray Berry"}
|
||||||
]
|
]
|
||||||
}]));
|
}]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.school[?(@.friends[0])]", read_json("./benches/data_obj.json"), json!([{
|
select_and_then_compare(
|
||||||
|
"$.school[?(@.friends[0])]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([{
|
||||||
"friends": [
|
"friends": [
|
||||||
{"id": 0, "name": "Millicent Norman"},
|
{"id": 0, "name": "Millicent Norman"},
|
||||||
{"id": 1, "name": "Vincent Cannon" },
|
{"id": 1, "name": "Vincent Cannon" },
|
||||||
{"id": 2, "name": "Gray Berry"}
|
{"id": 2, "name": "Gray Berry"}
|
||||||
]
|
]
|
||||||
}]));
|
}]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.school[?(@.friends[10])]", read_json("./benches/data_obj.json"), json!([{
|
select_and_then_compare(
|
||||||
|
"$.school[?(@.friends[10])]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([{
|
||||||
"friends": [
|
"friends": [
|
||||||
{"id": 0, "name": "Millicent Norman"},
|
{"id": 0, "name": "Millicent Norman"},
|
||||||
{"id": 1, "name": "Vincent Cannon" },
|
{"id": 1, "name": "Vincent Cannon" },
|
||||||
{"id": 2, "name": "Gray Berry"}
|
{"id": 2, "name": "Gray Berry"}
|
||||||
]
|
]
|
||||||
}]));
|
}]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.school[?(1==1)]", read_json("./benches/data_obj.json"), json!([{
|
select_and_then_compare(
|
||||||
|
"$.school[?(1==1)]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([{
|
||||||
"friends": [
|
"friends": [
|
||||||
{"id": 0, "name": "Millicent Norman"},
|
{"id": 0, "name": "Millicent Norman"},
|
||||||
{"id": 1, "name": "Vincent Cannon" },
|
{"id": 1, "name": "Vincent Cannon" },
|
||||||
{"id": 2, "name": "Gray Berry"}
|
{"id": 2, "name": "Gray Berry"}
|
||||||
]
|
]
|
||||||
}]));
|
}]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.school.friends[?(1==1)]", read_json("./benches/data_obj.json"), json!([[
|
select_and_then_compare(
|
||||||
|
"$.school.friends[?(1==1)]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([[
|
||||||
{"id": 0, "name": "Millicent Norman"},
|
{"id": 0, "name": "Millicent Norman"},
|
||||||
{"id": 1, "name": "Vincent Cannon" },
|
{"id": 1, "name": "Vincent Cannon" },
|
||||||
{"id": 2, "name": "Gray Berry"}
|
{"id": 2, "name": "Gray Berry"}
|
||||||
]]));
|
]]),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn op_default() {
|
fn op_default() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
select_and_then_compare("$.school[?(@.friends == @.friends)]", read_json("./benches/data_obj.json"), json!([{
|
select_and_then_compare(
|
||||||
|
"$.school[?(@.friends == @.friends)]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([{
|
||||||
"friends": [
|
"friends": [
|
||||||
{"id": 0, "name": "Millicent Norman"},
|
{"id": 0, "name": "Millicent Norman"},
|
||||||
{"id": 1, "name": "Vincent Cannon" },
|
{"id": 1, "name": "Vincent Cannon" },
|
||||||
{"id": 2, "name": "Gray Berry"}
|
{"id": 2, "name": "Gray Berry"}
|
||||||
]
|
]
|
||||||
}]));
|
}]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.friends[?(@.name)]", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
|
"$.friends[?(@.name)]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([
|
||||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||||
{ "id" : 2, "name" : "Gray Berry" }
|
{ "id" : 2, "name" : "Gray Berry" }
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.friends[?(@.id >= 2)]", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
|
"$.friends[?(@.id >= 2)]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([
|
||||||
{ "id" : 2, "name" : "Gray Berry" }
|
{ "id" : 2, "name" : "Gray Berry" }
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.friends[?(@.id >= 2 || @.id == 1)]", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
|
"$.friends[?(@.id >= 2 || @.id == 1)]",
|
||||||
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([
|
||||||
{ "id" : 2, "name" : "Gray Berry" },
|
{ "id" : 2, "name" : "Gray Berry" },
|
||||||
{ "id" : 1, "name" : "Vincent Cannon" }
|
{ "id" : 1, "name" : "Vincent Cannon" }
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$.friends[?( (@.id >= 2 || @.id == 1) && @.id == 0)]", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
Value::Null
|
"$.friends[?( (@.id >= 2 || @.id == 1) && @.id == 0)]",
|
||||||
]));
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([Value::Null]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$..friends[?(@.id == $.index)].id", read_json("./benches/data_obj.json"), json!([
|
select_and_then_compare(
|
||||||
0, 0
|
"$..friends[?(@.id == $.index)].id",
|
||||||
]));
|
read_json("./benches/data_obj.json"),
|
||||||
|
json!([0, 0]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$..book[?($.store.bicycle.price < @.price)].price", read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
22.99
|
"$..book[?($.store.bicycle.price < @.price)].price",
|
||||||
]));
|
read_json("./benches/example.json"),
|
||||||
|
json!([22.99]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$..book[?( (@.price == 12.99 || @.category == 'reference') && @.price > 10)].price", read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
12.99
|
"$..book[?( (@.price == 12.99 || @.category == 'reference') && @.price > 10)].price",
|
||||||
]));
|
read_json("./benches/example.json"),
|
||||||
|
json!([12.99]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$..[?(@.age > 40)]", json!([
|
select_and_then_compare(
|
||||||
|
"$..[?(@.age > 40)]",
|
||||||
|
json!([
|
||||||
{ "name": "이름1", "age": 40, "phone": "+33 12341234" },
|
{ "name": "이름1", "age": 40, "phone": "+33 12341234" },
|
||||||
{ "name": "이름2", "age": 42, "phone": "++44 12341234" }
|
{ "name": "이름2", "age": 42, "phone": "++44 12341234" }
|
||||||
]), json!([
|
]),
|
||||||
|
json!([
|
||||||
{ "name" : "이름2", "age" : 42, "phone" : "++44 12341234" }
|
{ "name" : "이름2", "age" : 42, "phone" : "++44 12341234" }
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare("$..[?(@.age >= 30)]", json!({
|
select_and_then_compare(
|
||||||
|
"$..[?(@.age >= 30)]",
|
||||||
|
json!({
|
||||||
"school": {
|
"school": {
|
||||||
"friends": [
|
"friends": [
|
||||||
{"name": "친구1", "age": 20},
|
{"name": "친구1", "age": 20},
|
||||||
@ -144,9 +226,11 @@ fn op_default() {
|
|||||||
"friends": [
|
"friends": [
|
||||||
{"name": "친구3", "age": 30},
|
{"name": "친구3", "age": 30},
|
||||||
{"name": "친구4"}
|
{"name": "친구4"}
|
||||||
]}), json!([
|
]}),
|
||||||
|
json!([
|
||||||
{ "name" : "친구3", "age" : 30 }
|
{ "name" : "친구3", "age" : 30 }
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -165,61 +249,130 @@ fn op_number() {
|
|||||||
fn op_string() {
|
fn op_string() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
select_and_then_compare(r#"$.[?(@.a == "b")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]));
|
select_and_then_compare(
|
||||||
select_and_then_compare(r#"$.[?(@.a != "c")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]));
|
r#"$.[?(@.a == "b")]"#,
|
||||||
select_and_then_compare(r#"$.[?(@.a < "b")]"#, json!({ "a": "b" }), json!([Value::Null]));
|
json!({ "a": "b" }),
|
||||||
select_and_then_compare(r#"$.[?(@.a <= "b")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]));
|
json!([{ "a": "b" }]),
|
||||||
select_and_then_compare(r#"$.[?(@.a > "b")]"#, json!({ "a": "b" }), json!([Value::Null]));
|
);
|
||||||
select_and_then_compare(r#"$.[?(@.a >= "b")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]));
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a != "c")]"#,
|
||||||
|
json!({ "a": "b" }),
|
||||||
|
json!([{ "a": "b" }]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a < "b")]"#,
|
||||||
|
json!({ "a": "b" }),
|
||||||
|
json!([Value::Null]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a <= "b")]"#,
|
||||||
|
json!({ "a": "b" }),
|
||||||
|
json!([{ "a": "b" }]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a > "b")]"#,
|
||||||
|
json!({ "a": "b" }),
|
||||||
|
json!([Value::Null]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a >= "b")]"#,
|
||||||
|
json!({ "a": "b" }),
|
||||||
|
json!([{ "a": "b" }]),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn op_object() {
|
fn op_object() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
select_and_then_compare(r#"$.[?(@.a == @.c)]"#,
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a == @.c)]"#,
|
||||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||||
json!([{"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}]));
|
json!([{"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}]),
|
||||||
select_and_then_compare(r#"$.[?(@.a != @.c)]"#,
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a != @.c)]"#,
|
||||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||||
json!([Value::Null]));
|
json!([Value::Null]),
|
||||||
select_and_then_compare(r#"$.[?(@.a < @.c)]"#,
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a < @.c)]"#,
|
||||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||||
json!([Value::Null]));
|
json!([Value::Null]),
|
||||||
select_and_then_compare(r#"$.[?(@.a <= @.c)]"#,
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a <= @.c)]"#,
|
||||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||||
json!([Value::Null]));
|
json!([Value::Null]),
|
||||||
select_and_then_compare(r#"$.[?(@.a > @.c)]"#,
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a > @.c)]"#,
|
||||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||||
json!([Value::Null]));
|
json!([Value::Null]),
|
||||||
select_and_then_compare(r#"$.[?(@.a >= @.c)]"#,
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a >= @.c)]"#,
|
||||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||||
json!([Value::Null]));
|
json!([Value::Null]),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn op_complex() {
|
fn op_complex() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
select_and_then_compare(r#"$.[?(1 == @.a)]"#, json!({ "a": { "b": 1 } }), json!([Value::Null]));
|
select_and_then_compare(
|
||||||
select_and_then_compare(r#"$.[?("1" != @.a)]"#, json!({ "a": { "b": 1 } }), json!([Value::Null]));
|
r#"$.[?(1 == @.a)]"#,
|
||||||
select_and_then_compare(r#"$.[?(@.a <= 1)]"#, json!({ "a": { "b": 1 } }), json!([Value::Null]));
|
json!({ "a": { "b": 1 } }),
|
||||||
select_and_then_compare(r#"$.[?(@.a > "1")]"#, json!({ "a": { "b": 1 } }), json!([Value::Null]));
|
json!([Value::Null]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?("1" != @.a)]"#,
|
||||||
|
json!({ "a": { "b": 1 } }),
|
||||||
|
json!([Value::Null]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a <= 1)]"#,
|
||||||
|
json!({ "a": { "b": 1 } }),
|
||||||
|
json!([Value::Null]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$.[?(@.a > "1")]"#,
|
||||||
|
json!({ "a": { "b": 1 } }),
|
||||||
|
json!([Value::Null]),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn example() {
|
fn example() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
select_and_then_compare(r#"$.store.book[*].author"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
"Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"
|
r#"$.store.book[*].author"#,
|
||||||
]));
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
|
"Nigel Rees",
|
||||||
|
"Evelyn Waugh",
|
||||||
|
"Herman Melville",
|
||||||
|
"J. R. R. Tolkien"
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$..author"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
"Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"
|
r#"$..author"#,
|
||||||
]));
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
|
"Nigel Rees",
|
||||||
|
"Evelyn Waugh",
|
||||||
|
"Herman Melville",
|
||||||
|
"J. R. R. Tolkien"
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$.store.*"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
|
r#"$.store.*"#,
|
||||||
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
[
|
[
|
||||||
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||||
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||||
@ -227,13 +380,19 @@ fn example() {
|
|||||||
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||||
],
|
],
|
||||||
{"color" : "red","price" : 19.95},
|
{"color" : "red","price" : 19.95},
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$.store..price"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
8.95, 12.99, 8.99, 22.99, 19.95
|
r#"$.store..price"#,
|
||||||
]));
|
read_json("./benches/example.json"),
|
||||||
|
json!([8.95, 12.99, 8.99, 22.99, 19.95]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$..book[2]"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
|
r#"$..book[2]"#,
|
||||||
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
{
|
{
|
||||||
"category" : "fiction",
|
"category" : "fiction",
|
||||||
"author" : "Herman Melville",
|
"author" : "Herman Melville",
|
||||||
@ -241,9 +400,13 @@ fn example() {
|
|||||||
"isbn" : "0-553-21311-3",
|
"isbn" : "0-553-21311-3",
|
||||||
"price" : 8.99
|
"price" : 8.99
|
||||||
}
|
}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$..book[-2]"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
|
r#"$..book[-2]"#,
|
||||||
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
{
|
{
|
||||||
"category" : "fiction",
|
"category" : "fiction",
|
||||||
"author" : "Herman Melville",
|
"author" : "Herman Melville",
|
||||||
@ -251,9 +414,13 @@ fn example() {
|
|||||||
"isbn" : "0-553-21311-3",
|
"isbn" : "0-553-21311-3",
|
||||||
"price" : 8.99
|
"price" : 8.99
|
||||||
}
|
}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$..book[0, 1]"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
|
r#"$..book[0, 1]"#,
|
||||||
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
{
|
{
|
||||||
"category" : "reference",
|
"category" : "reference",
|
||||||
"author" : "Nigel Rees",
|
"author" : "Nigel Rees",
|
||||||
@ -266,9 +433,13 @@ fn example() {
|
|||||||
"title" : "Sword of Honour",
|
"title" : "Sword of Honour",
|
||||||
"price" : 12.99
|
"price" : 12.99
|
||||||
}
|
}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$..book[:2]"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
|
r#"$..book[:2]"#,
|
||||||
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
{
|
{
|
||||||
"category" : "reference",
|
"category" : "reference",
|
||||||
"author" : "Nigel Rees",
|
"author" : "Nigel Rees",
|
||||||
@ -281,9 +452,13 @@ fn example() {
|
|||||||
"title" : "Sword of Honour",
|
"title" : "Sword of Honour",
|
||||||
"price" : 12.99
|
"price" : 12.99
|
||||||
}
|
}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$..book[2:]"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
|
r#"$..book[2:]"#,
|
||||||
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
{
|
{
|
||||||
"category" : "fiction",
|
"category" : "fiction",
|
||||||
"author" : "Herman Melville",
|
"author" : "Herman Melville",
|
||||||
@ -298,9 +473,13 @@ fn example() {
|
|||||||
"isbn" : "0-395-19395-8",
|
"isbn" : "0-395-19395-8",
|
||||||
"price" : 22.99
|
"price" : 22.99
|
||||||
}
|
}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$..book[?(@.isbn)]"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
|
r#"$..book[?(@.isbn)]"#,
|
||||||
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
{
|
{
|
||||||
"category" : "fiction",
|
"category" : "fiction",
|
||||||
"author" : "Herman Melville",
|
"author" : "Herman Melville",
|
||||||
@ -315,9 +494,13 @@ fn example() {
|
|||||||
"isbn" : "0-395-19395-8",
|
"isbn" : "0-395-19395-8",
|
||||||
"price" : 22.99
|
"price" : 22.99
|
||||||
}
|
}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$.store.book[?(@.price < 10)]"#, read_json("./benches/example.json"), json!([
|
select_and_then_compare(
|
||||||
|
r#"$.store.book[?(@.price < 10)]"#,
|
||||||
|
read_json("./benches/example.json"),
|
||||||
|
json!([
|
||||||
{
|
{
|
||||||
"category" : "reference",
|
"category" : "reference",
|
||||||
"author" : "Nigel Rees",
|
"author" : "Nigel Rees",
|
||||||
@ -331,22 +514,92 @@ fn example() {
|
|||||||
"isbn" : "0-553-21311-3",
|
"isbn" : "0-553-21311-3",
|
||||||
"price" : 8.99
|
"price" : 8.99
|
||||||
}
|
}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
select_and_then_compare(r#"$..*"#, read_json("./benches/example.json"),
|
select_and_then_compare(
|
||||||
read_json("./benches/giveme_every_thing_result.json"));
|
r#"$..*"#,
|
||||||
|
read_json("./benches/example.json"),
|
||||||
|
read_json("./benches/giveme_every_thing_result.json"),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn filer_same_obj() {
|
fn filer_same_obj() {
|
||||||
setup();
|
setup();
|
||||||
|
|
||||||
select_and_then_compare(r#"$..[?(@.a == 1)]"#, json!({
|
select_and_then_compare(
|
||||||
|
r#"$..[?(@.a == 1)]"#,
|
||||||
|
json!({
|
||||||
"a": 1,
|
"a": 1,
|
||||||
"b" : {"a": 1},
|
"b" : {"a": 1},
|
||||||
"c" : {"a": 1}
|
"c" : {"a": 1}
|
||||||
}), json!([
|
}),
|
||||||
|
json!([
|
||||||
{"a": 1},
|
{"a": 1},
|
||||||
{"a": 1}
|
{"a": 1}
|
||||||
]));
|
]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn range() {
|
||||||
|
setup();
|
||||||
|
|
||||||
|
select_and_then_compare(
|
||||||
|
"$[ : ]",
|
||||||
|
json!(["first", "second"]),
|
||||||
|
json!(["first", "second"]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
"$[::]",
|
||||||
|
json!(["first", "second", "third", "forth", "fifth"]),
|
||||||
|
json!(["first", "second", "third", "forth", "fifth"]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
"$[::2]",
|
||||||
|
json!(["first", "second", "third", "forth", "fifth"]),
|
||||||
|
json!(["first", "third", "fifth"]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
"$[1::]",
|
||||||
|
json!(["first", "second", "third", "forth", "fifth"]),
|
||||||
|
json!(["second", "third", "forth", "fifth"]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
"$[1:2:]",
|
||||||
|
json!(["first", "second", "third", "forth", "fifth"]),
|
||||||
|
json!(["second"]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
"$[1::2]",
|
||||||
|
json!(["first", "second", "third", "forth", "fifth"]),
|
||||||
|
json!(["second", "forth"]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
"$[0:3:1]",
|
||||||
|
json!(["first", "second", "third", "forth", "fifth"]),
|
||||||
|
json!(["first", "second", "third"]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
"$[0:3:2]",
|
||||||
|
json!(["first", "second", "third", "forth", "fifth"]),
|
||||||
|
json!(["first", "third"]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn quote() {
|
||||||
|
setup();
|
||||||
|
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$['single\'quote']"#,
|
||||||
|
json!({"single'quote":"value"}),
|
||||||
|
json!(["value"]),
|
||||||
|
);
|
||||||
|
select_and_then_compare(
|
||||||
|
r#"$["double\"quote"]"#,
|
||||||
|
json!({"double\"quote":"value"}),
|
||||||
|
json!(["value"]),
|
||||||
|
);
|
||||||
}
|
}
|
31
tests/lib.rs
31
tests/lib.rs
@ -65,17 +65,26 @@ fn selector_as() {
|
|||||||
let mut selector = jsonpath::selector_as::<Friend>(&json_obj);
|
let mut selector = jsonpath::selector_as::<Friend>(&json_obj);
|
||||||
let json = selector("$..friends[2]").unwrap();
|
let json = selector("$..friends[2]").unwrap();
|
||||||
|
|
||||||
let ret = vec!(
|
let ret = vec![
|
||||||
Friend { id: 2, name: Some("Gray Berry".to_string()) },
|
Friend {
|
||||||
Friend { id: 2, name: Some("Gray Berry".to_string()) },
|
id: 2,
|
||||||
);
|
name: Some("Gray Berry".to_string()),
|
||||||
|
},
|
||||||
|
Friend {
|
||||||
|
id: 2,
|
||||||
|
name: Some("Gray Berry".to_string()),
|
||||||
|
},
|
||||||
|
];
|
||||||
assert_eq!(json, ret);
|
assert_eq!(json, ret);
|
||||||
|
|
||||||
let json = selector("$..friends[0]").unwrap();
|
let json = selector("$..friends[0]").unwrap();
|
||||||
let ret = vec!(
|
let ret = vec![
|
||||||
Friend { id: 0, name: None },
|
Friend { id: 0, name: None },
|
||||||
Friend { id: 0, name: Some("Millicent Norman".to_string()) },
|
Friend {
|
||||||
);
|
id: 0,
|
||||||
|
name: Some("Millicent Norman".to_string()),
|
||||||
|
},
|
||||||
|
];
|
||||||
assert_eq!(json, ret);
|
assert_eq!(json, ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -117,7 +126,8 @@ fn test_to_struct() {
|
|||||||
phones: Vec<String>,
|
phones: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
let ret: Vec<Person> = jsonpath::select_as(r#"
|
let ret: Vec<Person> = jsonpath::select_as(
|
||||||
|
r#"
|
||||||
{
|
{
|
||||||
"person":
|
"person":
|
||||||
{
|
{
|
||||||
@ -129,7 +139,10 @@ fn test_to_struct() {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#, "$.person").unwrap();
|
"#,
|
||||||
|
"$.person",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let person = Person {
|
let person = Person {
|
||||||
name: "Doe John".to_string(),
|
name: "Doe John".to_string(),
|
||||||
|
@ -3,7 +3,7 @@ extern crate jsonpath_lib as jsonpath;
|
|||||||
extern crate serde_json;
|
extern crate serde_json;
|
||||||
|
|
||||||
use common::{read_json, setup};
|
use common::{read_json, setup};
|
||||||
use jsonpath::{SelectorMut, Selector};
|
use jsonpath::{Selector, SelectorMut};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
mod common;
|
mod common;
|
||||||
@ -16,7 +16,8 @@ fn selector_mut() {
|
|||||||
|
|
||||||
let mut nums = Vec::new();
|
let mut nums = Vec::new();
|
||||||
let result = selector_mut
|
let result = selector_mut
|
||||||
.str_path(r#"$.store..price"#).unwrap()
|
.str_path(r#"$.store..price"#)
|
||||||
|
.unwrap()
|
||||||
.value(read_json("./benches/example.json"))
|
.value(read_json("./benches/example.json"))
|
||||||
.replace_with(&mut |v| {
|
.replace_with(&mut |v| {
|
||||||
match v {
|
match v {
|
||||||
@ -26,15 +27,32 @@ fn selector_mut() {
|
|||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
Value::String("a".to_string())
|
Value::String("a".to_string())
|
||||||
}).unwrap()
|
})
|
||||||
.take().unwrap();
|
.unwrap()
|
||||||
|
.take()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(nums, vec![8.95_f64, 12.99_f64, 8.99_f64, 22.99_f64, 19.95_f64]);
|
assert_eq!(
|
||||||
|
nums,
|
||||||
|
vec![8.95_f64, 12.99_f64, 8.99_f64, 22.99_f64, 19.95_f64]
|
||||||
|
);
|
||||||
|
|
||||||
let mut selector = Selector::new();
|
let mut selector = Selector::new();
|
||||||
let result = selector.str_path(r#"$.store..price"#).unwrap()
|
let result = selector
|
||||||
|
.str_path(r#"$.store..price"#)
|
||||||
|
.unwrap()
|
||||||
.value(&result)
|
.value(&result)
|
||||||
.select().unwrap();
|
.select()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(vec![&json!("a"), &json!("a"), &json!("a"), &json!("a"), &json!("a")], result);
|
assert_eq!(
|
||||||
|
vec![
|
||||||
|
&json!("a"),
|
||||||
|
&json!("a"),
|
||||||
|
&json!("a"),
|
||||||
|
&json!("a"),
|
||||||
|
&json!("a")
|
||||||
|
],
|
||||||
|
result
|
||||||
|
);
|
||||||
}
|
}
|
220
tests/readme.rs
220
tests/readme.rs
@ -52,17 +52,28 @@ fn readme() {
|
|||||||
|
|
||||||
let mut selector = jsonpath::selector(&json_obj);
|
let mut selector = jsonpath::selector(&json_obj);
|
||||||
|
|
||||||
assert_eq!(selector("$.store.book[*].author").unwrap(),
|
assert_eq!(
|
||||||
|
selector("$.store.book[*].author").unwrap(),
|
||||||
vec![
|
vec![
|
||||||
"Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"
|
"Nigel Rees",
|
||||||
]);
|
"Evelyn Waugh",
|
||||||
|
"Herman Melville",
|
||||||
|
"J. R. R. Tolkien"
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$..author").unwrap(),
|
assert_eq!(
|
||||||
|
selector("$..author").unwrap(),
|
||||||
vec![
|
vec![
|
||||||
"Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"
|
"Nigel Rees",
|
||||||
]);
|
"Evelyn Waugh",
|
||||||
|
"Herman Melville",
|
||||||
|
"J. R. R. Tolkien"
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$.store.*").unwrap(),
|
assert_eq!(
|
||||||
|
selector("$.store.*").unwrap(),
|
||||||
vec![
|
vec![
|
||||||
&json!([
|
&json!([
|
||||||
{ "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95 },
|
{ "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95 },
|
||||||
@ -71,64 +82,75 @@ fn readme() {
|
|||||||
{ "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99 }
|
{ "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99 }
|
||||||
]),
|
]),
|
||||||
&json!({ "color": "red", "price": 19.95 })
|
&json!({ "color": "red", "price": 19.95 })
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$.store..price").unwrap(),
|
assert_eq!(
|
||||||
vec![
|
selector("$.store..price").unwrap(),
|
||||||
8.95, 12.99, 8.99, 22.99, 19.95
|
vec![8.95, 12.99, 8.99, 22.99, 19.95]
|
||||||
]);
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$..book[2]").unwrap(),
|
assert_eq!(
|
||||||
vec![
|
selector("$..book[2]").unwrap(),
|
||||||
&json!({
|
vec![&json!({
|
||||||
"category" : "fiction",
|
"category" : "fiction",
|
||||||
"author" : "Herman Melville",
|
"author" : "Herman Melville",
|
||||||
"title" : "Moby Dick",
|
"title" : "Moby Dick",
|
||||||
"isbn" : "0-553-21311-3",
|
"isbn" : "0-553-21311-3",
|
||||||
"price" : 8.99
|
"price" : 8.99
|
||||||
})
|
})]
|
||||||
]);
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$..book[-2]").unwrap(),
|
assert_eq!(
|
||||||
vec![
|
selector("$..book[-2]").unwrap(),
|
||||||
&json!({
|
vec![&json!({
|
||||||
"category" : "fiction",
|
"category" : "fiction",
|
||||||
"author" : "Herman Melville",
|
"author" : "Herman Melville",
|
||||||
"title" : "Moby Dick",
|
"title" : "Moby Dick",
|
||||||
"isbn" : "0-553-21311-3",
|
"isbn" : "0-553-21311-3",
|
||||||
"price" : 8.99
|
"price" : 8.99
|
||||||
})
|
})]
|
||||||
]);
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$..book[0,1]").unwrap(),
|
assert_eq!(
|
||||||
|
selector("$..book[0,1]").unwrap(),
|
||||||
vec![
|
vec![
|
||||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||||
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$..book[:2]").unwrap(),
|
assert_eq!(
|
||||||
|
selector("$..book[:2]").unwrap(),
|
||||||
vec![
|
vec![
|
||||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||||
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$..book[:2]").unwrap(),
|
assert_eq!(
|
||||||
|
selector("$..book[:2]").unwrap(),
|
||||||
vec![
|
vec![
|
||||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||||
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
&json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$..book[?(@.isbn)]").unwrap(),
|
assert_eq!(
|
||||||
|
selector("$..book[?(@.isbn)]").unwrap(),
|
||||||
vec![
|
vec![
|
||||||
&json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99}),
|
&json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99}),
|
||||||
&json!({"category" : "fiction","author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99})
|
&json!({"category" : "fiction","author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99})
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(selector("$.store.book[?(@.price < 10)]").unwrap(),
|
assert_eq!(
|
||||||
|
selector("$.store.book[?(@.price < 10)]").unwrap(),
|
||||||
vec![
|
vec![
|
||||||
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
&json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||||
&json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99})
|
&json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99})
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -154,9 +176,11 @@ fn readme_selector() {
|
|||||||
let mut selector = Selector::new();
|
let mut selector = Selector::new();
|
||||||
|
|
||||||
let result = selector
|
let result = selector
|
||||||
.str_path("$..[?(@.age >= 30)]").unwrap()
|
.str_path("$..[?(@.age >= 30)]")
|
||||||
|
.unwrap()
|
||||||
.value(&json_obj)
|
.value(&json_obj)
|
||||||
.select().unwrap();
|
.select()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(vec![&json!({"name": "친구3", "age": 30})], result);
|
assert_eq!(vec![&json!({"name": "친구3", "age": 30})], result);
|
||||||
|
|
||||||
@ -164,7 +188,13 @@ fn readme_selector() {
|
|||||||
assert_eq!(r#"[{"name":"친구3","age":30}]"#, result);
|
assert_eq!(r#"[{"name":"친구3","age":30}]"#, result);
|
||||||
|
|
||||||
let result = selector.select_as::<Friend>().unwrap();
|
let result = selector.select_as::<Friend>().unwrap();
|
||||||
assert_eq!(vec![Friend { name: "친구3".to_string(), age: Some(30) }], result);
|
assert_eq!(
|
||||||
|
vec![Friend {
|
||||||
|
name: "친구3".to_string(),
|
||||||
|
age: Some(30)
|
||||||
|
}],
|
||||||
|
result
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -184,7 +214,8 @@ fn readme_selector_mut() {
|
|||||||
let mut selector_mut = SelectorMut::new();
|
let mut selector_mut = SelectorMut::new();
|
||||||
|
|
||||||
let result = selector_mut
|
let result = selector_mut
|
||||||
.str_path("$..[?(@.age == 20)].age").unwrap()
|
.str_path("$..[?(@.age == 20)].age")
|
||||||
|
.unwrap()
|
||||||
.value(json_obj)
|
.value(json_obj)
|
||||||
.replace_with(&mut |v| {
|
.replace_with(&mut |v| {
|
||||||
let age = if let Value::Number(n) = v {
|
let age = if let Value::Number(n) = v {
|
||||||
@ -194,10 +225,14 @@ fn readme_selector_mut() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
json!(age)
|
json!(age)
|
||||||
}).unwrap()
|
})
|
||||||
.take().unwrap();
|
.unwrap()
|
||||||
|
.take()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(result, json!({
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
json!({
|
||||||
"school": {
|
"school": {
|
||||||
"friends": [
|
"friends": [
|
||||||
{"name": "친구1", "age": 40},
|
{"name": "친구1", "age": 40},
|
||||||
@ -207,7 +242,8 @@ fn readme_selector_mut() {
|
|||||||
"friends": [
|
"friends": [
|
||||||
{"name": "친구3", "age": 30},
|
{"name": "친구3", "age": 30},
|
||||||
{"name": "친구4"}
|
{"name": "친구4"}
|
||||||
]}));
|
]})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -226,15 +262,19 @@ fn readme_select() {
|
|||||||
|
|
||||||
let json = jsonpath::select(&json_obj, "$..friends[0]").unwrap();
|
let json = jsonpath::select(&json_obj, "$..friends[0]").unwrap();
|
||||||
|
|
||||||
assert_eq!(json, vec![
|
assert_eq!(
|
||||||
|
json,
|
||||||
|
vec![
|
||||||
&json!({"name": "친구3", "age": 30}),
|
&json!({"name": "친구3", "age": 30}),
|
||||||
&json!({"name": "친구1", "age": 20})
|
&json!({"name": "친구1", "age": 20})
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn readme_select_as_str() {
|
fn readme_select_as_str() {
|
||||||
let ret = jsonpath::select_as_str(r#"
|
let ret = jsonpath::select_as_str(
|
||||||
|
r#"
|
||||||
{
|
{
|
||||||
"school": {
|
"school": {
|
||||||
"friends": [
|
"friends": [
|
||||||
@ -247,9 +287,15 @@ fn readme_select_as_str() {
|
|||||||
{"name": "친구4"}
|
{"name": "친구4"}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
"#, "$..friends[0]").unwrap();
|
"#,
|
||||||
|
"$..friends[0]",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(ret, r#"[{"name":"친구3","age":30},{"name":"친구1","age":20}]"#);
|
assert_eq!(
|
||||||
|
ret,
|
||||||
|
r#"[{"name":"친구3","age":30},{"name":"친구1","age":20}]"#
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -261,8 +307,8 @@ fn readme_select_as() {
|
|||||||
phones: Vec<String>,
|
phones: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
let ret: Vec<Person> = jsonpath::select_as(r#"
|
let ret: Vec<Person> = jsonpath::select_as(
|
||||||
{
|
r#"{
|
||||||
"person":
|
"person":
|
||||||
{
|
{
|
||||||
"name": "Doe John",
|
"name": "Doe John",
|
||||||
@ -272,8 +318,10 @@ fn readme_select_as() {
|
|||||||
"+44 2345678"
|
"+44 2345678"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}"#,
|
||||||
"#, "$.person").unwrap();
|
"$.person",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let person = Person {
|
let person = Person {
|
||||||
name: "Doe John".to_string(),
|
name: "Doe John".to_string(),
|
||||||
@ -302,10 +350,13 @@ fn readme_compile() {
|
|||||||
|
|
||||||
let json = first_firend(&json_obj).unwrap();
|
let json = first_firend(&json_obj).unwrap();
|
||||||
|
|
||||||
assert_eq!(json, vec![
|
assert_eq!(
|
||||||
|
json,
|
||||||
|
vec![
|
||||||
&json!({"name": "친구3", "age": 30}),
|
&json!({"name": "친구3", "age": 30}),
|
||||||
&json!({"name": "친구1", "age": 20})
|
&json!({"name": "친구1", "age": 20})
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -326,17 +377,23 @@ fn readme_selector_fn() {
|
|||||||
|
|
||||||
let json = selector("$..friends[0]").unwrap();
|
let json = selector("$..friends[0]").unwrap();
|
||||||
|
|
||||||
assert_eq!(json, vec![
|
assert_eq!(
|
||||||
|
json,
|
||||||
|
vec![
|
||||||
&json!({"name": "친구3", "age": 30}),
|
&json!({"name": "친구3", "age": 30}),
|
||||||
&json!({"name": "친구1", "age": 20})
|
&json!({"name": "친구1", "age": 20})
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
|
|
||||||
let json = selector("$..friends[1]").unwrap();
|
let json = selector("$..friends[1]").unwrap();
|
||||||
|
|
||||||
assert_eq!(json, vec![
|
assert_eq!(
|
||||||
|
json,
|
||||||
|
vec![
|
||||||
&json!({"name": "친구4"}),
|
&json!({"name": "친구4"}),
|
||||||
&json!({"name": "친구2", "age": 20})
|
&json!({"name": "친구2", "age": 20})
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -363,23 +420,34 @@ fn readme_selector_as() {
|
|||||||
|
|
||||||
let json = selector("$..friends[0]").unwrap();
|
let json = selector("$..friends[0]").unwrap();
|
||||||
|
|
||||||
let ret = vec!(
|
let ret = vec![
|
||||||
Friend { name: "친구3".to_string(), age: Some(30) },
|
Friend {
|
||||||
Friend { name: "친구1".to_string(), age: Some(20) }
|
name: "친구3".to_string(),
|
||||||
);
|
age: Some(30),
|
||||||
|
},
|
||||||
|
Friend {
|
||||||
|
name: "친구1".to_string(),
|
||||||
|
age: Some(20),
|
||||||
|
},
|
||||||
|
];
|
||||||
assert_eq!(json, ret);
|
assert_eq!(json, ret);
|
||||||
|
|
||||||
let json = selector("$..friends[1]").unwrap();
|
let json = selector("$..friends[1]").unwrap();
|
||||||
|
|
||||||
let ret = vec!(
|
let ret = vec![
|
||||||
Friend { name: "친구4".to_string(), age: None },
|
Friend {
|
||||||
Friend { name: "친구2".to_string(), age: Some(20) }
|
name: "친구4".to_string(),
|
||||||
);
|
age: None,
|
||||||
|
},
|
||||||
|
Friend {
|
||||||
|
name: "친구2".to_string(),
|
||||||
|
age: Some(20),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
assert_eq!(json, ret);
|
assert_eq!(json, ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn readme_delete() {
|
fn readme_delete() {
|
||||||
let json_obj = json!({
|
let json_obj = json!({
|
||||||
@ -396,7 +464,9 @@ fn readme_delete() {
|
|||||||
|
|
||||||
let ret = jsonpath::delete(json_obj, "$..[?(20 == @.age)]").unwrap();
|
let ret = jsonpath::delete(json_obj, "$..[?(20 == @.age)]").unwrap();
|
||||||
|
|
||||||
assert_eq!(ret, json!({
|
assert_eq!(
|
||||||
|
ret,
|
||||||
|
json!({
|
||||||
"school": {
|
"school": {
|
||||||
"friends": [
|
"friends": [
|
||||||
null,
|
null,
|
||||||
@ -406,7 +476,8 @@ fn readme_delete() {
|
|||||||
"friends": [
|
"friends": [
|
||||||
{"name": "친구3", "age": 30},
|
{"name": "친구3", "age": 30},
|
||||||
{"name": "친구4"}
|
{"name": "친구4"}
|
||||||
]}));
|
]})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -417,7 +488,9 @@ fn readme_delete2() {
|
|||||||
|
|
||||||
println!("{:?}", ret);
|
println!("{:?}", ret);
|
||||||
|
|
||||||
assert_eq!(ret, json!({
|
assert_eq!(
|
||||||
|
ret,
|
||||||
|
json!({
|
||||||
"store": {
|
"store": {
|
||||||
"book": null,
|
"book": null,
|
||||||
"bicycle": {
|
"bicycle": {
|
||||||
@ -426,7 +499,8 @@ fn readme_delete2() {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"expensive": 10
|
"expensive": 10
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -451,9 +525,12 @@ fn readme_replace_with() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
json!(age)
|
json!(age)
|
||||||
}).unwrap();
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(result, json!({
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
json!({
|
||||||
"school": {
|
"school": {
|
||||||
"friends": [
|
"friends": [
|
||||||
{"name": "친구1", "age": 40},
|
{"name": "친구1", "age": 40},
|
||||||
@ -463,5 +540,6 @@ fn readme_replace_with() {
|
|||||||
"friends": [
|
"friends": [
|
||||||
{"name": "친구3", "age": 30},
|
{"name": "친구3", "age": 30},
|
||||||
{"name": "친구4"}
|
{"name": "친구4"}
|
||||||
]}));
|
]})
|
||||||
|
);
|
||||||
}
|
}
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "jsonpath-wasm"
|
name = "jsonpath-wasm"
|
||||||
version = "0.2.1"
|
version = "0.2.2"
|
||||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||||
description = "It is Webassembly version of jsonpath_lib that is JsonPath engine written in Rust - Demo: https://freestrings.github.io/jsonpath"
|
description = "It is Webassembly version of jsonpath_lib that is JsonPath engine written in Rust - Demo: https://freestrings.github.io/jsonpath"
|
||||||
keywords = ["jsonpath", "json", "webassembly", "parsing", "rust"]
|
keywords = ["jsonpath", "json", "webassembly", "parsing", "rust"]
|
||||||
|
129
wasm/src/lib.rs
129
wasm/src/lib.rs
@ -5,9 +5,9 @@ extern crate serde_json;
|
|||||||
extern crate wasm_bindgen;
|
extern crate wasm_bindgen;
|
||||||
|
|
||||||
use cfg_if::cfg_if;
|
use cfg_if::cfg_if;
|
||||||
use jsonpath::{JsonPathError, Parser};
|
|
||||||
use jsonpath::Selector as _Selector;
|
use jsonpath::Selector as _Selector;
|
||||||
use jsonpath::SelectorMut as _SelectorMut;
|
use jsonpath::SelectorMut as _SelectorMut;
|
||||||
|
use jsonpath::{JsonPathError, Parser};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use wasm_bindgen::prelude::*;
|
use wasm_bindgen::prelude::*;
|
||||||
|
|
||||||
@ -40,40 +40,37 @@ macro_rules! console_error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn into_serde_json<D>(js_value: &JsValue) -> Result<D, String>
|
fn into_serde_json<D>(js_value: &JsValue) -> Result<D, String>
|
||||||
where D: for<'a> serde::de::Deserialize<'a>
|
where
|
||||||
|
D: for<'a> serde::de::Deserialize<'a>,
|
||||||
{
|
{
|
||||||
if js_value.is_string() {
|
if js_value.is_string() {
|
||||||
match serde_json::from_str(js_value.as_string().unwrap().as_str()) {
|
match serde_json::from_str(js_value.as_string().unwrap().as_str()) {
|
||||||
Ok(json) => Ok(json),
|
Ok(json) => Ok(json),
|
||||||
Err(e) => Err(e.to_string())
|
Err(e) => Err(e.to_string()),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match js_value.into_serde() {
|
match js_value.into_serde() {
|
||||||
Ok(json) => Ok(json),
|
Ok(json) => Ok(json),
|
||||||
Err(e) => Err(e.to_string())
|
Err(e) => Err(e.to_string()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn replace_fun(v: &Value, fun: &js_sys::Function) -> Value {
|
fn replace_fun(v: &Value, fun: &js_sys::Function) -> Value {
|
||||||
match JsValue::from_serde(v) {
|
match JsValue::from_serde(v) {
|
||||||
Ok(js_v) => {
|
Ok(js_v) => match fun.call1(&JsValue::NULL, &js_v) {
|
||||||
match fun.call1(&JsValue::NULL, &js_v) {
|
Ok(result) => match into_serde_json(&result) {
|
||||||
Ok(result) => {
|
|
||||||
match into_serde_json(&result) {
|
|
||||||
Ok(json) => json,
|
Ok(json) => json,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
console_error!("replace_with - closure returned a invalid JSON: {:?}", e);
|
console_error!("replace_with - closure returned a invalid JSON: {:?}", e);
|
||||||
Value::Null
|
Value::Null
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
console_error!("replace_with - fail to call closure: {:?}", e);
|
console_error!("replace_with - fail to call closure: {:?}", e);
|
||||||
Value::Null
|
Value::Null
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
console_error!("replace_with - invalid JSON object: {:?}", e);
|
console_error!("replace_with - invalid JSON object: {:?}", e);
|
||||||
Value::Null
|
Value::Null
|
||||||
@ -88,22 +85,22 @@ pub fn compile(path: &str) -> JsValue {
|
|||||||
let cb = Closure::wrap(Box::new(move |js_value: JsValue| {
|
let cb = Closure::wrap(Box::new(move |js_value: JsValue| {
|
||||||
let json = match into_serde_json(&js_value) {
|
let json = match into_serde_json(&js_value) {
|
||||||
Ok(json) => json,
|
Ok(json) => json,
|
||||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e)))
|
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e))),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut selector = _Selector::new();
|
let mut selector = _Selector::new();
|
||||||
|
|
||||||
match &node {
|
match &node {
|
||||||
Ok(node) => selector.compiled_path(node),
|
Ok(node) => selector.compiled_path(node),
|
||||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Path(e.clone())))
|
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Path(e.clone()))),
|
||||||
};
|
};
|
||||||
|
|
||||||
match selector.value(&json).select() {
|
match selector.value(&json).select() {
|
||||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||||
Ok(ret) => ret,
|
Ok(ret) => ret,
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||||
},
|
},
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||||
}
|
}
|
||||||
}) as Box<Fn(JsValue) -> JsValue>);
|
}) as Box<Fn(JsValue) -> JsValue>);
|
||||||
|
|
||||||
@ -116,25 +113,27 @@ pub fn compile(path: &str) -> JsValue {
|
|||||||
pub fn selector(js_value: JsValue) -> JsValue {
|
pub fn selector(js_value: JsValue) -> JsValue {
|
||||||
let json: Value = match JsValue::into_serde(&js_value) {
|
let json: Value = match JsValue::into_serde(&js_value) {
|
||||||
Ok(json) => json,
|
Ok(json) => json,
|
||||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||||
};
|
};
|
||||||
|
|
||||||
let cb = Closure::wrap(Box::new(move |path: String| {
|
let cb = Closure::wrap(
|
||||||
match Parser::compile(path.as_str()) {
|
Box::new(move |path: String| match Parser::compile(path.as_str()) {
|
||||||
Ok(node) => {
|
Ok(node) => {
|
||||||
let mut selector = _Selector::new();
|
let mut selector = _Selector::new();
|
||||||
let _ = selector.compiled_path(&node);
|
let _ = selector.compiled_path(&node);
|
||||||
match selector.value(&json).select() {
|
match selector.value(&json).select() {
|
||||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||||
Ok(ret) => ret,
|
Ok(ret) => ret,
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
Err(e) => {
|
||||||
|
JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
||||||
|
}
|
||||||
},
|
},
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Path(e)))
|
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Path(e))),
|
||||||
}
|
}) as Box<Fn(String) -> JsValue>,
|
||||||
}) as Box<Fn(String) -> JsValue>);
|
);
|
||||||
|
|
||||||
let ret = cb.as_ref().clone();
|
let ret = cb.as_ref().clone();
|
||||||
cb.forget();
|
cb.forget();
|
||||||
@ -145,15 +144,15 @@ pub fn selector(js_value: JsValue) -> JsValue {
|
|||||||
pub fn select(js_value: JsValue, path: &str) -> JsValue {
|
pub fn select(js_value: JsValue, path: &str) -> JsValue {
|
||||||
let json = match into_serde_json(&js_value) {
|
let json = match into_serde_json(&js_value) {
|
||||||
Ok(json) => json,
|
Ok(json) => json,
|
||||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e)))
|
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e))),
|
||||||
};
|
};
|
||||||
|
|
||||||
match jsonpath::select(&json, path) {
|
match jsonpath::select(&json, path) {
|
||||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||||
Ok(ret) => ret,
|
Ok(ret) => ret,
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||||
},
|
},
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -161,17 +160,15 @@ pub fn select(js_value: JsValue, path: &str) -> JsValue {
|
|||||||
pub fn delete(js_value: JsValue, path: &str) -> JsValue {
|
pub fn delete(js_value: JsValue, path: &str) -> JsValue {
|
||||||
let json = match into_serde_json(&js_value) {
|
let json = match into_serde_json(&js_value) {
|
||||||
Ok(json) => json,
|
Ok(json) => json,
|
||||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e)))
|
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e))),
|
||||||
};
|
};
|
||||||
|
|
||||||
match jsonpath::delete(json, path) {
|
match jsonpath::delete(json, path) {
|
||||||
Ok(ret) => {
|
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||||
match JsValue::from_serde(&ret) {
|
|
||||||
Ok(ret) => ret,
|
Ok(ret) => ret,
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||||
}
|
},
|
||||||
}
|
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -179,15 +176,15 @@ pub fn delete(js_value: JsValue, path: &str) -> JsValue {
|
|||||||
pub fn replace_with(js_value: JsValue, path: &str, fun: js_sys::Function) -> JsValue {
|
pub fn replace_with(js_value: JsValue, path: &str, fun: js_sys::Function) -> JsValue {
|
||||||
let json = match into_serde_json(&js_value) {
|
let json = match into_serde_json(&js_value) {
|
||||||
Ok(json) => json,
|
Ok(json) => json,
|
||||||
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e)))
|
Err(e) => return JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e))),
|
||||||
};
|
};
|
||||||
|
|
||||||
match jsonpath::replace_with(json, path, &mut |v| replace_fun(v, &fun)) {
|
match jsonpath::replace_with(json, path, &mut |v| replace_fun(v, &fun)) {
|
||||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||||
Ok(ret) => ret,
|
Ok(ret) => ret,
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string())))
|
Err(e) => JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))),
|
||||||
},
|
},
|
||||||
Err(e) => JsValue::from_str(&format!("{:?}", e))
|
Err(e) => JsValue::from_str(&format!("{:?}", e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -205,7 +202,10 @@ pub struct Selector {
|
|||||||
impl Selector {
|
impl Selector {
|
||||||
#[wasm_bindgen(constructor)]
|
#[wasm_bindgen(constructor)]
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Selector { path: None, value: None }
|
Selector {
|
||||||
|
path: None,
|
||||||
|
value: None,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[wasm_bindgen(catch)]
|
#[wasm_bindgen(catch)]
|
||||||
@ -227,23 +227,34 @@ impl Selector {
|
|||||||
let mut selector = _Selector::new();
|
let mut selector = _Selector::new();
|
||||||
|
|
||||||
if let Some(path) = &self.path {
|
if let Some(path) = &self.path {
|
||||||
let _ = selector.str_path(&path).map_err(|e| JsValue::from_str(&format!("{:?}", e)))?;
|
let _ = selector
|
||||||
|
.str_path(&path)
|
||||||
|
.map_err(|e| JsValue::from_str(&format!("{:?}", e)))?;
|
||||||
} else {
|
} else {
|
||||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyPath)));
|
return Err(JsValue::from_str(&format!(
|
||||||
|
"{:?}",
|
||||||
|
JsonPathError::EmptyPath
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(value) = &self.value {
|
if let Some(value) = &self.value {
|
||||||
let _ = selector.value(value);
|
let _ = selector.value(value);
|
||||||
} else {
|
} else {
|
||||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyValue)));
|
return Err(JsValue::from_str(&format!(
|
||||||
|
"{:?}",
|
||||||
|
JsonPathError::EmptyValue
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
match selector.select() {
|
match selector.select() {
|
||||||
Ok(ret) => match JsValue::from_serde(&ret) {
|
Ok(ret) => match JsValue::from_serde(&ret) {
|
||||||
Ok(ret) => Ok(ret),
|
Ok(ret) => Ok(ret),
|
||||||
Err(e) => Err(JsValue::from_str(&format!("{:?}", JsonPathError::Serde(e.to_string()))))
|
Err(e) => Err(JsValue::from_str(&format!(
|
||||||
|
"{:?}",
|
||||||
|
JsonPathError::Serde(e.to_string())
|
||||||
|
))),
|
||||||
},
|
},
|
||||||
Err(e) => Err(JsValue::from_str(&format!("{:?}", e)))
|
Err(e) => Err(JsValue::from_str(&format!("{:?}", e))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -261,7 +272,10 @@ pub struct SelectorMut {
|
|||||||
impl SelectorMut {
|
impl SelectorMut {
|
||||||
#[wasm_bindgen(constructor)]
|
#[wasm_bindgen(constructor)]
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
SelectorMut { path: None, value: None }
|
SelectorMut {
|
||||||
|
path: None,
|
||||||
|
value: None,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[wasm_bindgen(catch)]
|
#[wasm_bindgen(catch)]
|
||||||
@ -285,13 +299,19 @@ impl SelectorMut {
|
|||||||
if let Some(path) = &self.path {
|
if let Some(path) = &self.path {
|
||||||
let _ = selector.str_path(path);
|
let _ = selector.str_path(path);
|
||||||
} else {
|
} else {
|
||||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyPath)));
|
return Err(JsValue::from_str(&format!(
|
||||||
|
"{:?}",
|
||||||
|
JsonPathError::EmptyPath
|
||||||
|
)));
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(value) = self.value.take() {
|
if let Some(value) = self.value.take() {
|
||||||
selector.value(value);
|
selector.value(value);
|
||||||
} else {
|
} else {
|
||||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyValue)));
|
return Err(JsValue::from_str(&format!(
|
||||||
|
"{:?}",
|
||||||
|
JsonPathError::EmptyValue
|
||||||
|
)));
|
||||||
};
|
};
|
||||||
|
|
||||||
match selector.delete() {
|
match selector.delete() {
|
||||||
@ -310,13 +330,19 @@ impl SelectorMut {
|
|||||||
if let Some(path) = &self.path {
|
if let Some(path) = &self.path {
|
||||||
let _ = selector.str_path(path);
|
let _ = selector.str_path(path);
|
||||||
} else {
|
} else {
|
||||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyPath)));
|
return Err(JsValue::from_str(&format!(
|
||||||
|
"{:?}",
|
||||||
|
JsonPathError::EmptyPath
|
||||||
|
)));
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(value) = self.value.take() {
|
if let Some(value) = self.value.take() {
|
||||||
selector.value(value);
|
selector.value(value);
|
||||||
} else {
|
} else {
|
||||||
return Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyValue)));
|
return Err(JsValue::from_str(&format!(
|
||||||
|
"{:?}",
|
||||||
|
JsonPathError::EmptyValue
|
||||||
|
)));
|
||||||
};
|
};
|
||||||
|
|
||||||
match selector.replace_with(&mut |v| replace_fun(v, &fun)) {
|
match selector.replace_with(&mut |v| replace_fun(v, &fun)) {
|
||||||
@ -333,9 +359,12 @@ impl SelectorMut {
|
|||||||
match self.value.take() {
|
match self.value.take() {
|
||||||
Some(ret) => match JsValue::from_serde(&ret) {
|
Some(ret) => match JsValue::from_serde(&ret) {
|
||||||
Ok(ret) => Ok(ret),
|
Ok(ret) => Ok(ret),
|
||||||
Err(e) => Err(JsValue::from_str(&format!("{:?}", e)))
|
Err(e) => Err(JsValue::from_str(&format!("{:?}", e))),
|
||||||
},
|
},
|
||||||
None => Err(JsValue::from_str(&format!("{:?}", JsonPathError::EmptyValue)))
|
None => Err(JsValue::from_str(&format!(
|
||||||
|
"{:?}",
|
||||||
|
JsonPathError::EmptyValue
|
||||||
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -412,6 +412,79 @@ describe('filter test', () => {
|
|||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('escaped single quote notation', (done) => {
|
||||||
|
let result = jsonpath.select({"single'quote":"value"}, "$['single\\'quote']");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('escaped double quote notation', (done) => {
|
||||||
|
let result = jsonpath.select({"single\"quote":"value"}, "$['single\"quote']");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[::]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third", "forth", "fifth"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[::2]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::2]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "third", "fifth"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[1: :]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1: :]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["second", "third", "forth", "fifth"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[1:2:]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1:2:]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["second"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[1::2]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1::2]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["second", "forth"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[0:3:1]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:1]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array range with step - $[0:3:2]', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:2]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "third"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('array keys', (done) => {
|
||||||
|
let result = jsonpath.select({
|
||||||
|
"key1": "value1",
|
||||||
|
"key2": 2
|
||||||
|
}, "$['key1', 'key2']");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["value1", 2])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('SelectorMut test', () => {
|
describe('SelectorMut test', () => {
|
||||||
@ -822,3 +895,12 @@ describe('README test', () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('ISSUE test', () => {
|
||||||
|
it('Results do not match other implementations #6', (done) => {
|
||||||
|
let result = jsonpath.select(["first", "second"], "$[:]");
|
||||||
|
if (JSON.stringify(result) === JSON.stringify(["first", "second"])) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
Reference in New Issue
Block a user