mirror of
https://github.com/fluencelabs/jsonpath
synced 2025-07-13 14:41:39 +00:00
Compare commits
190 Commits
Author | SHA1 | Date | |
---|---|---|---|
49179464f6 | |||
81bbf47545 | |||
55e8088111 | |||
c1d4c78da9 | |||
75c28fc73d | |||
1512e9b72f | |||
a168c46c04 | |||
83a170eb08 | |||
710f7ad544 | |||
74e79e6330 | |||
ec92c95a69 | |||
da7cf723de | |||
c99879cfbb | |||
7991bba51d | |||
b8c82a9126 | |||
ba31b48c73 | |||
e9f69cd2b9 | |||
241a7f482d | |||
67cc6447a8 | |||
9bc4b81f33 | |||
0d4d82fd49 | |||
de46a66148 | |||
ef52c63bc8 | |||
d618e60e13 | |||
a6f742e3ad | |||
6f20013076 | |||
218321a4bd | |||
d855c17899 | |||
4919a03c6b | |||
7a07bc7744 | |||
32eef3f7c7 | |||
b736c962d2 | |||
accd08139f | |||
25cffed2d7 | |||
bab2ff38f7 | |||
ffd87cfbe4 | |||
7597325f59 | |||
2ba3930a8c | |||
844a96b2d1 | |||
3b4d2b4ffc | |||
c2a6f3b319 | |||
8e10128826 | |||
ad39c9e668 | |||
636618e4ac | |||
9fa0f74ce9 | |||
cc5ce6f65f | |||
1152af6c68 | |||
1a54a36cd3 | |||
5b878d7ba7 | |||
d84d0d845c | |||
c3ac7e40e8 | |||
c8ab8ad107 | |||
8c24411c3f | |||
422a23ee57 | |||
b5c5d6b88e | |||
ea7599c012 | |||
9276c0aa02 | |||
2b875c8a7e | |||
6a89553c33 | |||
fe376c4483 | |||
5a52ded4ca | |||
773ea3a3b3 | |||
17a8608392 | |||
e0db04aed9 | |||
d5364ad74a | |||
67991df1f2 | |||
15e6c6065b | |||
fab07adc5a | |||
4b62fcd376 | |||
2daef2c938 | |||
43d092cb35 | |||
19f0878b5a | |||
b2e27b423a | |||
88e6320cf7 | |||
8b85ec9d61 | |||
f5e46882da | |||
53c4711d1a | |||
31612315b8 | |||
2efb019155 | |||
d9b9e9d8bd | |||
c0a5296451 | |||
6c983ced99 | |||
d0f93bde49 | |||
d7423e35da | |||
a23ab7a519 | |||
d4140c8ed5 | |||
595a2d8528 | |||
e8e9dbfe1c | |||
b644c5b0a6 | |||
5c29b54e3a | |||
dde0d5dc2e | |||
a553b4b06b | |||
3d33e8bd08 | |||
8f01598e05 | |||
cab5177811 | |||
9a28faf058 | |||
319186b1d9 | |||
2f0d29d644 | |||
b910ed35f9 | |||
f5717d6d26 | |||
f1fda2af13 | |||
6ad4432737 | |||
13816df970 | |||
030dccc8cc | |||
28ad9c903f | |||
be29571670 | |||
59dad2ea02 | |||
967df1b787 | |||
6d38c67e90 | |||
053be432f2 | |||
d384079842 | |||
5f832e8fe7 | |||
4390feb807 | |||
f536391b71 | |||
c19c75dac5 | |||
ad47444b7a | |||
488e0b400f | |||
fff0e869cb | |||
6a270c9456 | |||
ebd49c2205 | |||
2537469f03 | |||
2e0f78f017 | |||
5d36a0cf15 | |||
a72a13117e | |||
964e0c00f5 | |||
de97e2f95a | |||
74666d264e | |||
51deec66d0 | |||
909c851dcc | |||
b41b9f3aa6 | |||
1a5e8cc025 | |||
5abbfba254 | |||
ffefb7b2e6 | |||
950966d57e | |||
635b5b8d43 | |||
ff52821323 | |||
4af31947f5 | |||
766be8cab2 | |||
2e9e0ac6fc | |||
fbb2b49ba0 | |||
e096e62dbf | |||
ec5d76d2d6 | |||
24d18efb6f | |||
dd9315bc90 | |||
9a08df7843 | |||
ac3224892b | |||
498f2ce4f4 | |||
56a22674bf | |||
893af2afc0 | |||
3f89b9b183 | |||
d2a5d9092e | |||
9a35357ddb | |||
bc2db273bf | |||
416636bc48 | |||
fe8a2f70c0 | |||
846ad26e2c | |||
9d94d1cd41 | |||
802640a6da | |||
5b653ab8a0 | |||
9d8ab7ae23 | |||
135d3c319b | |||
e2a6b13c9a | |||
765f04ce5d | |||
503ee9ae13 | |||
4e4d7c4c22 | |||
5cff83ebbb | |||
b49d95d5db | |||
b9e4049a5e | |||
671ca83eed | |||
3792e0014d | |||
e4a50bd689 | |||
132f63b7f9 | |||
8a580e3b2f | |||
d0e572ff56 | |||
4a044ba250 | |||
35ef9f8c5e | |||
753a822341 | |||
3276e7e18a | |||
3b45f1c4a5 | |||
1c3656460e | |||
d263e30c91 | |||
ceadbcec84 | |||
a15abe38fb | |||
7a106539d1 | |||
4ad783e40c | |||
0729a2a47f | |||
30aa38379a | |||
d955a1632c | |||
8ec694090b | |||
d75b93612d |
3
.gitignore
vendored
3
.gitignore
vendored
@ -2,4 +2,5 @@
|
||||
.vscode
|
||||
!.idea/runConfigurations/
|
||||
/target/
|
||||
Cargo.lock
|
||||
Cargo.lock
|
||||
callgrind.out.*
|
9
.idea/runConfigurations/all.xml
generated
9
.idea/runConfigurations/all.xml
generated
@ -3,10 +3,13 @@
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="emulateTerminal" value="false" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
<method v="2">
|
||||
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
|
||||
</method>
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/filter.xml
generated
12
.idea/runConfigurations/filter.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="filter" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test filter """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/lib.xml
generated
12
.idea/runConfigurations/lib.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="lib" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test lib """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
2
.idea/runConfigurations/parser.xml
generated
2
.idea/runConfigurations/parser.xml
generated
@ -1,7 +1,7 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="parser" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test parser """ />
|
||||
<option name="command" value="test --package jsonpath_lib --lib parser::parser_tests" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
|
12
.idea/runConfigurations/serde.xml
generated
12
.idea/runConfigurations/serde.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="serde" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test serde """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
2
.idea/runConfigurations/tokenizer.xml
generated
2
.idea/runConfigurations/tokenizer.xml
generated
@ -1,7 +1,7 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="tokenizer" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test tokenizer """ />
|
||||
<option name="command" value="test --package jsonpath_lib --lib parser::tokenizer_tests" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
|
49
.travis.yml
49
.travis.yml
@ -1,5 +1,9 @@
|
||||
language: rust
|
||||
sudo: false
|
||||
sudo: required
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libssl-dev
|
||||
|
||||
cache: cargo
|
||||
|
||||
@ -13,33 +17,27 @@ matrix:
|
||||
- rust: stable
|
||||
os: linux
|
||||
env: RUST_BACKTRACE=1
|
||||
before_cache: |
|
||||
if [[ "$TRAVIS_RUST_VERSION" == stable ]]; then
|
||||
cargo install cargo-tarpaulin -f
|
||||
fi
|
||||
before_script:
|
||||
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
|
||||
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
|
||||
- cargo install-update -a
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
|
||||
- rustup component add clippy
|
||||
script:
|
||||
- cargo build --verbose --all
|
||||
- cargo test --verbose --all
|
||||
- cd wasm
|
||||
- wasm-pack build
|
||||
- ./clippy.sh
|
||||
after_success: |
|
||||
cargo tarpaulin --exclude-files wasm parser/mod.rs --out Xml
|
||||
bash <(curl -s https://codecov.io/bash)
|
||||
- rust: stable
|
||||
os: osx
|
||||
env: RUST_BACKTRACE=1
|
||||
before_script:
|
||||
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
|
||||
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
|
||||
- cargo install-update -a
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
|
||||
script:
|
||||
- cargo clean
|
||||
- cargo build --verbose --all
|
||||
- cargo test --verbose --all
|
||||
- cd wasm
|
||||
- wasm-pack build
|
||||
- language: node_js
|
||||
os: linux
|
||||
node_js:
|
||||
- 'node'
|
||||
- '11'
|
||||
- '10'
|
||||
- '9'
|
||||
@ -49,16 +47,13 @@ matrix:
|
||||
- sh /tmp/rustup.sh -y
|
||||
- export PATH="$HOME/.cargo/bin:$PATH"
|
||||
- source "$HOME/.cargo/env"
|
||||
- cd nodejs
|
||||
- node -v
|
||||
- npm -v
|
||||
- npm install
|
||||
before_script:
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
|
||||
script:
|
||||
- npm test
|
||||
- bash ./build-wasm.sh
|
||||
- language: node_js
|
||||
os: osx
|
||||
node_js:
|
||||
- 'node'
|
||||
- '11'
|
||||
- '10'
|
||||
- '9'
|
||||
@ -68,9 +63,7 @@ matrix:
|
||||
- sh /tmp/rustup.sh -y
|
||||
- export PATH="$HOME/.cargo/bin:$PATH"
|
||||
- source "$HOME/.cargo/env"
|
||||
- cd nodejs
|
||||
- node -v
|
||||
- npm -v
|
||||
- npm install
|
||||
before_script:
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
|
||||
script:
|
||||
- npm test
|
||||
- bash ./build-wasm.sh
|
21
Cargo.toml
21
Cargo.toml
@ -1,15 +1,16 @@
|
||||
[package]
|
||||
name = "jsonpath_lib"
|
||||
version = "0.1.8"
|
||||
version = "0.2.5"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
|
||||
description = "JsonPath in Rust and Webassembly - Webassembly Demo: https://freestrings.github.io/jsonpath"
|
||||
description = "It is JsonPath engine written in Rust. it provide a similar API interface in Webassembly and Javascript too. - Webassembly Demo: https://freestrings.github.io/jsonpath"
|
||||
readme = "README.md"
|
||||
|
||||
keywords = ["library", "jsonpath", "json", "webassembly"]
|
||||
keywords = ["jsonpath", "json", "webassembly", "lua", "query"]
|
||||
categories = ['wasm', "parser-implementations", "api-bindings"]
|
||||
|
||||
repository = "https://github.com/freestrings/jsonpath"
|
||||
documentation = "https://docs.rs/jsonpath_lib/0.1.0/jsonpath_lib"
|
||||
documentation = "https://docs.rs/jsonpath_lib/0.2.5/jsonpath_lib"
|
||||
license = "MIT"
|
||||
|
||||
[badges]
|
||||
@ -17,18 +18,16 @@ travis-ci = { repository = "freestrings/jsonpath", branch = "master" }
|
||||
|
||||
[dependencies]
|
||||
log = "0.4"
|
||||
env_logger = "0.6.0"
|
||||
env_logger = "0.7"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
indexmap = "1.0.2"
|
||||
|
||||
[dev-dependencies]
|
||||
bencher = "0.1.5"
|
||||
array_tool = "1.0.3"
|
||||
|
||||
[lib]
|
||||
name = "jsonpath_lib"
|
||||
path = "src/lib.rs"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[profile.release]
|
||||
#[profile.release]
|
||||
#debug = true
|
||||
#lto = false
|
||||
#lto = false
|
725
README.md
725
README.md
@ -2,54 +2,383 @@
|
||||
|
||||
[](https://travis-ci.org/freestrings/jsonpath)
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
`Rust` 버전 [JsonPath](https://goessner.net/articles/JsonPath/) 구현이다. `Webassembly`와 `Javascript`에서도 역시 동일한 API 인터페이스를 제공 한다.
|
||||
`Rust` 버전 [JsonPath](https://goessner.net/articles/JsonPath/) 구현으로 `Webassembly`와 `Javascript`에서도 유사한 API 인터페이스를 제공 한다.
|
||||
|
||||
It is an implementation for [JsonPath](https://goessner.net/articles/JsonPath/) written in `Rust`. it provide the same API interface in `Webassembly` and` Javascript` also.
|
||||
It is JsonPath [JsonPath](https://goessner.net/articles/JsonPath/) engine written in `Rust`. it provide a similar API interface in `Webassembly` and` Javascript` too.
|
||||
|
||||
- [Webassembly Demo](https://freestrings.github.io/jsonpath/)
|
||||
- [Rust documentation](https://docs.rs/jsonpath_lib/0.1.6/jsonpath_lib)
|
||||
- [NPM jsonpath-wasm - webassembly](https://www.npmjs.com/package/jsonpath-wasm)
|
||||
|
||||
## Why?
|
||||
## Rust API
|
||||
|
||||
To enjoy Rust!
|
||||
<details><summary><b>jsonpath_lib crate</b></summary>
|
||||
|
||||
## API
|
||||
Go to [`jsonpath_lib` creates.io](https://crates.io/crates/jsonpath_lib)
|
||||
|
||||
[With Javascript](#with-javascript)
|
||||
```rust
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
```
|
||||
|
||||
- [jsonpath-wasm library](#jsonpath-wasm-library)
|
||||
- [jsonpath-rs library](#jsonpath-rs-library-only-nodejs)
|
||||
- [javascript - jsonpath.select(json: string|object, jsonpath: string)](#javascript---jsonpathselectjson-stringobject-jsonpath-string)
|
||||
- [javascript - jsonpath.compile(jsonpath: string)](#javascript---jsonpathcompilejsonpath-string)
|
||||
- [javascript - jsonpath.selector(json: string|object)](#javascript---jsonpathselectorjson-stringobject)
|
||||
- [javascript - alloc_json, dealloc_json](#javascript---alloc_json-dealloc_json)
|
||||
- [javascript-wasm - examples](https://github.com/freestrings/jsonpath/wiki/Javascript-examples)
|
||||
</details>
|
||||
|
||||
[With Rust (as library)](#with-rust-as-library)
|
||||
<details><summary><b>Rust - jsonpath::Selector struct</b></summary>
|
||||
|
||||
- [jsonpath_lib library](#jsonpath_lib-library)
|
||||
- [rust - jsonpath::select(json: &serde_json::value::Value, jsonpath: &str)](#rust---jsonpathselectjson-serde_jsonvaluevalue-jsonpath-str)
|
||||
- [rust - jsonpath::select_as_str(json_str: &str, jsonpath: &str)](#rust---jsonpathselect_as_strjson-str-jsonpath-str)
|
||||
- [rust - jsonpath::select_as\<T: `serde::de::DeserializeOwned`\>(json_str: &str, jsonpath: &str)](#rust---jsonpathselect_ast-serdededeserializeownedjson-str-jsonpath-str)
|
||||
- [rust - jsonpath::compile(jsonpath: &str)](#rust---jsonpathcompilejsonpath-str)
|
||||
- [rust - jsonpath::selector(json: &serde_json::value::Value)](#rust---jsonpathselectorjson-serde_jsonvaluevalue)
|
||||
- [rust - jsonpath::selector_as\<T: `serde::de::DeserializeOwned`\>(json: &serde_json::value::Value)](#rust---jsonpathselector_ast-serdededeserializeownedjson-serde_jsonvaluevalue)
|
||||
- [rust - examples](https://github.com/freestrings/jsonpath/wiki/rust-examples)
|
||||
```rust
|
||||
#[derive(Deserialize, PartialEq, Debug)]
|
||||
struct Friend {
|
||||
name: String,
|
||||
age: Option<u8>,
|
||||
}
|
||||
|
||||
[With AWS API Gateway](#)
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
[Simple time check - webassembly](https://github.com/freestrings/jsonpath/wiki/Simple-timecheck---jsonpath-wasm)
|
||||
let mut selector = Selector::new();
|
||||
|
||||
[Simple time check - native addon for NodeJs](https://github.com/freestrings/jsonpath/wiki/Simple-timecheck-jsonpath-native)
|
||||
let result = selector
|
||||
.path("$..[?(@.age >= 30)]").unwrap()
|
||||
.value(&json_obj)
|
||||
.select().unwrap();
|
||||
|
||||
## With Javascript
|
||||
assert_eq!(vec![&json!({"name": "친구3", "age": 30})], result);
|
||||
|
||||
### jsonpath-wasm library
|
||||
let result = selector.select_as_str().unwrap();
|
||||
assert_eq!(r#"[{"name":"친구3","age":30}]"#, result);
|
||||
|
||||
let result = selector.select_as::<Friend>().unwrap();
|
||||
assert_eq!(vec![Friend { name: "친구3".to_string(), age: Some(30) }], result);
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Rust - jsonpath::SelectorMut struct</b></summary>
|
||||
|
||||
```rust
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let mut selector_mut = SelectorMut::new();
|
||||
|
||||
let result = selector_mut
|
||||
.str_path("$..[?(@.age == 20)].age").unwrap()
|
||||
.value(json_obj)
|
||||
.replace_with(&mut |v| {
|
||||
let age = if let Value::Number(n) = v {
|
||||
n.as_u64().unwrap() * 2
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
Some(json!(age))
|
||||
}).unwrap()
|
||||
.take().unwrap();
|
||||
|
||||
assert_eq!(result, json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 40},
|
||||
{"name": "친구2", "age": 40}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}));
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Rust - jsonpath::select(json: &serde_json::value::Value, jsonpath: &str)</b></summary>
|
||||
|
||||
```rust
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let json = jsonpath::select(&json_obj, "$..friends[0]").unwrap();
|
||||
|
||||
assert_eq!(json, vec![
|
||||
&json!({"name": "친구3", "age": 30}),
|
||||
&json!({"name": "친구1", "age": 20})
|
||||
]);
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details><summary><b>Rust - jsonpath::select_as_str(json_str: &str, jsonpath: &str)</b></summary>
|
||||
|
||||
```rust
|
||||
let ret = jsonpath::select_as_str(r#"
|
||||
{
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
}
|
||||
"#, "$..friends[0]").unwrap();
|
||||
|
||||
assert_eq!(ret, r#"[{"name":"친구3","age":30},{"name":"친구1","age":20}]"#);
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Rust - jsonpath::select_as<T: `serde::de::DeserializeOwned`>(json_str: &str, jsonpath: &str)</b></summary>
|
||||
|
||||
```rust
|
||||
#[derive(Deserialize, PartialEq, Debug)]
|
||||
struct Person {
|
||||
name: String,
|
||||
age: u8,
|
||||
phones: Vec<String>,
|
||||
}
|
||||
|
||||
let ret: Vec<Person> = jsonpath::select_as(r#"
|
||||
{
|
||||
"person":
|
||||
{
|
||||
"name": "Doe John",
|
||||
"age": 44,
|
||||
"phones": [
|
||||
"+44 1234567",
|
||||
"+44 2345678"
|
||||
]
|
||||
}
|
||||
}
|
||||
"#, "$.person").unwrap();
|
||||
|
||||
let person = Person {
|
||||
name: "Doe John".to_string(),
|
||||
age: 44,
|
||||
phones: vec!["+44 1234567".to_string(), "+44 2345678".to_string()],
|
||||
};
|
||||
|
||||
assert_eq!(ret[0], person);
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Rust - jsonpath::compile(jsonpath: &str)</b></summary>
|
||||
|
||||
```rust
|
||||
let mut template = jsonpath::compile("$..friends[0]");
|
||||
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let json = template(&json_obj).unwrap();
|
||||
|
||||
assert_eq!(json, vec![
|
||||
&json!({"name": "친구3", "age": 30}),
|
||||
&json!({"name": "친구1", "age": 20})
|
||||
]);
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Rust - jsonpath::selector(json: &serde_json::value::Value)</b></summary>
|
||||
|
||||
```rust
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let mut selector = jsonpath::selector(&json_obj);
|
||||
|
||||
let json = selector("$..friends[0]").unwrap();
|
||||
|
||||
assert_eq!(json, vec![
|
||||
&json!({"name": "친구3", "age": 30}),
|
||||
&json!({"name": "친구1", "age": 20})
|
||||
]);
|
||||
|
||||
let json = selector("$..friends[1]").unwrap();
|
||||
|
||||
assert_eq!(json, vec![
|
||||
&json!({"name": "친구4"}),
|
||||
&json!({"name": "친구2", "age": 20})
|
||||
]);
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Rust - jsonpath::selector_as<T: serde::de::DeserializeOwned>(json: &serde_json::value::Value)</b></summary>
|
||||
|
||||
```rust
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
#[derive(Deserialize, PartialEq, Debug)]
|
||||
struct Friend {
|
||||
name: String,
|
||||
age: Option<u8>,
|
||||
}
|
||||
|
||||
let mut selector = jsonpath::selector_as::<Friend>(&json_obj);
|
||||
|
||||
let json = selector("$..friends[0]").unwrap();
|
||||
|
||||
let ret = vec!(
|
||||
Friend { name: "친구3".to_string(), age: Some(30) },
|
||||
Friend { name: "친구1".to_string(), age: Some(20) }
|
||||
);
|
||||
assert_eq!(json, ret);
|
||||
|
||||
let json = selector("$..friends[1]").unwrap();
|
||||
|
||||
let ret = vec!(
|
||||
Friend { name: "친구4".to_string(), age: None },
|
||||
Friend { name: "친구2".to_string(), age: Some(20) }
|
||||
);
|
||||
|
||||
assert_eq!(json, ret);
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Rust - jsonpath::delete(value: &Value, path: &str)</b></summary>
|
||||
|
||||
```rust
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let ret = jsonpath::delete(json_obj, "$..[?(20 == @.age)]").unwrap();
|
||||
|
||||
assert_eq!(ret, json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}));
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Rust - jsonpath::replace_with<F: FnMut(&Value) -> Value>(value: &Value, path: &str, fun: &mut F)</b></summary>
|
||||
|
||||
```rust
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let ret = jsonpath::replace_with(json_obj, "$..[?(@.age == 20)].age", &mut |v| {
|
||||
let age = if let Value::Number(n) = v {
|
||||
n.as_u64().unwrap() * 2
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
Some(json!(age))
|
||||
}).unwrap();
|
||||
|
||||
assert_eq!(ret, json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 40},
|
||||
{"name": "친구2", "age": 40}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}));
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
[Rust - Other Examples](https://github.com/freestrings/jsonpath/wiki/rust-examples)
|
||||
|
||||
## Javascript API
|
||||
|
||||
<details><summary><b>npm package</b></summary>
|
||||
|
||||
##### jsonpath-wasm
|
||||
|
||||
Goto [`jsonpath-wasm` npmjs.org](https://www.npmjs.com/package/jsonpath-wasm)
|
||||
|
||||
*(not yet published `jsonpath-wasm`)*
|
||||
```javascript
|
||||
// browser
|
||||
import * as jsonpath from "jsonpath-wasm";
|
||||
@ -57,15 +386,104 @@ import * as jsonpath from "jsonpath-wasm";
|
||||
const jsonpath = require('jsonpath-wasm');
|
||||
```
|
||||
|
||||
### jsonpath-rs library (Only NodeJS)
|
||||
##### jsonpath-wasm
|
||||
`wasm-bindgen` 리턴 타입 제약 때문에 빌더 패턴은 지원하지 않는다.
|
||||
|
||||
`jsonpath-rs` is native addon for NodeJs
|
||||
It does not support `builder-pattern` due to the `return type` restriction of `wasm-bindgen`.
|
||||
|
||||
```javascript
|
||||
const jsonpath = require('jsonpath-rs');
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let selector = new jsonpath.Selector();
|
||||
selector.path('$..friends[0]');
|
||||
selector.value(jsonObj);
|
||||
|
||||
let retObj = selector.select();
|
||||
|
||||
console.log(JSON.stringify(ret) == JSON.stringify(retObj));
|
||||
|
||||
// => true
|
||||
```
|
||||
|
||||
### javascript - jsonpath.select(json: string|object, jsonpath: string)
|
||||
빌더 패턴 제약은 `Selector class`와 동일하다.
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
'school': {
|
||||
'friends': [
|
||||
{'name': '친구1', 'age': 20},
|
||||
{'name': '친구2', 'age': 20},
|
||||
],
|
||||
},
|
||||
'friends': [
|
||||
{'name': '친구3', 'age': 30},
|
||||
{'name': '친구4'},
|
||||
],
|
||||
};
|
||||
|
||||
let selector = new jsonpath.SelectorMut();
|
||||
selector.path('$..[?(@.age == 20)]');
|
||||
|
||||
{
|
||||
selector.value(jsonObj);
|
||||
selector.deleteValue();
|
||||
|
||||
let resultObj = {
|
||||
'school': {'friends': [null, null]},
|
||||
'friends': [
|
||||
{'name': '친구3', 'age': 30},
|
||||
{'name': '친구4'},
|
||||
],
|
||||
};
|
||||
console.log(JSON.stringify(selector.take()) !== JSON.stringify(resultObj));
|
||||
|
||||
// => true
|
||||
}
|
||||
|
||||
{
|
||||
selector.value(jsonObj);
|
||||
selector.replaceWith((v) => {
|
||||
v.age = v.age * 2;
|
||||
return v;
|
||||
});
|
||||
|
||||
let resultObj = {
|
||||
'school': {
|
||||
'friends': [
|
||||
{'name': '친구1', 'age': 40},
|
||||
{'name': '친구2', 'age': 40},
|
||||
],
|
||||
},
|
||||
'friends': [
|
||||
{'name': '친구3', 'age': 30},
|
||||
{'name': '친구4'},
|
||||
],
|
||||
};
|
||||
console.log(JSON.stringify(selector.take()) !== JSON.stringify(resultObj));
|
||||
|
||||
// => true
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.select(json: string|object, jsonpath: string)</b></summary>
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
@ -98,7 +516,9 @@ console.log(
|
||||
// => true, true
|
||||
```
|
||||
|
||||
### javascript - jsonpath.compile(jsonpath: string)
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.compile(jsonpath: string)</b></summary>
|
||||
|
||||
```javascript
|
||||
let template = jsonpath.compile('$..friends[0]');
|
||||
@ -157,8 +577,10 @@ console.log(
|
||||
// => true, true
|
||||
```
|
||||
|
||||
### javascript - jsonpath.selector(json: string|object)
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.selector(json: string|object)</b></summary>
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
@ -198,17 +620,11 @@ console.log(
|
||||
// => true, true
|
||||
```
|
||||
|
||||
### javascript - alloc_json, dealloc_json
|
||||
</details>
|
||||
|
||||
*(not supported in `jsonpath-rs`)*
|
||||
|
||||
wasm-bindgen은 Javascript와 Webassembly 간 값을 주고받을 때 JSON 객체는 String으로 변환되기 때문에, 반복해서 사용되는 JSON 객체를 Webassembly 영역에 생성해 두면 성능에 도움이 된다.
|
||||
|
||||
Since wasm-bindgen converts JSON objects to String when exchanging values between Javascript and Webassembly, it is helpful to create repeated Json objects in Webassembly area.
|
||||
<details><summary><b>Javascript - jsonpath.deleteValue(json: string|object, path: string)</b></summary>
|
||||
|
||||
```javascript
|
||||
const jsonpath = require('@nodejs/jsonpath-wasm');
|
||||
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
@ -222,55 +638,24 @@ let jsonObj = {
|
||||
]
|
||||
};
|
||||
|
||||
// allocate jsonObj in webassembly
|
||||
let ptr = jsonpath.alloc_json(jsonObj);
|
||||
let _1 = jsonpath.deleteValue(jsonObj, '$..friends[0]');
|
||||
let result = jsonpath.deleteValue(_1, '$..friends[1]');
|
||||
|
||||
// `0` is invalid pointer
|
||||
if(ptr == 0) {
|
||||
console.error('invalid ptr');
|
||||
}
|
||||
console.log(JSON.stringify(result) !== JSON.stringify({
|
||||
"school": { "friends": [null, null]},
|
||||
"friends": [null, null]
|
||||
}));
|
||||
|
||||
let path = '$..friends[0]';
|
||||
let template = jsonpath.compile(path);
|
||||
let selector = jsonpath.selector(jsonObj);
|
||||
// create selector as pointer
|
||||
let ptrSelector = jsonpath.selector(ptr);
|
||||
// => true
|
||||
|
||||
let ret1 = selector(path)
|
||||
let ret2 = ptrSelector(path)
|
||||
let ret3 = template(jsonObj);
|
||||
// select as pointer
|
||||
let ret4 = template(ptr);
|
||||
let ret5 = jsonpath.select(jsonObj, path);
|
||||
// select as pointer
|
||||
let ret6 = jsonpath.select(ptr, path);
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret1) == JSON.stringify(ret2),
|
||||
JSON.stringify(ret1) == JSON.stringify(ret3),
|
||||
JSON.stringify(ret1) == JSON.stringify(ret4),
|
||||
JSON.stringify(ret1) == JSON.stringify(ret5),
|
||||
JSON.stringify(ret1) == JSON.stringify(ret6));
|
||||
|
||||
// => true true true true true
|
||||
|
||||
jsonpath.dealloc_json(ptr);
|
||||
```
|
||||
|
||||
## With Rust (as library)
|
||||
</details>
|
||||
|
||||
### jsonpath_lib library
|
||||
<details><summary><b>Javascript - jsonpath.replaceWith(json: string|object, path: string, fun: function(json: object) => json: object</b></summary>
|
||||
|
||||
```rust
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
```
|
||||
|
||||
### rust - jsonpath::select(json: &serde_json::value::Value, jsonpath: &str)
|
||||
|
||||
```rust
|
||||
let json_obj = json!({
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
@ -280,171 +665,31 @@ let json_obj = json!({
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let json = jsonpath::select(&json_obj, "$..friends[0]").unwrap();
|
||||
|
||||
let ret = json!([
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
]);
|
||||
assert_eq!(json, ret);
|
||||
```
|
||||
|
||||
### rust - jsonpath::select_as_str(json: &str, jsonpath: &str)
|
||||
|
||||
```rust
|
||||
let ret = jsonpath::select_as_str(r#"
|
||||
{
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
}
|
||||
"#, "$..friends[0]").unwrap();
|
||||
|
||||
assert_eq!(ret, r#"[{"name":"친구3","age":30},{"name":"친구1","age":20}]"#);
|
||||
```
|
||||
|
||||
### rust - jsonpath::select_as\<T: `serde::de::DeserializeOwned`\>(json: &str, jsonpath: &str)
|
||||
|
||||
```rust
|
||||
#[derive(Deserialize, PartialEq, Debug)]
|
||||
struct Person {
|
||||
name: String,
|
||||
age: u8,
|
||||
phones: Vec<String>,
|
||||
}
|
||||
|
||||
let ret: Person = jsonpath::select_as(r#"
|
||||
{
|
||||
"person":
|
||||
{
|
||||
"name": "Doe John",
|
||||
"age": 44,
|
||||
"phones": [
|
||||
"+44 1234567",
|
||||
"+44 2345678"
|
||||
]
|
||||
}
|
||||
}
|
||||
"#, "$.person").unwrap();
|
||||
|
||||
let person = Person {
|
||||
name: "Doe John".to_string(),
|
||||
age: 44,
|
||||
phones: vec!["+44 1234567".to_string(), "+44 2345678".to_string()],
|
||||
};
|
||||
|
||||
assert_eq!(person, ret);
|
||||
```
|
||||
let result = jsonpath.replaceWith(jsonObj, '$..friends[0]', (v) => {
|
||||
v.age = v.age * 2;
|
||||
return v;
|
||||
});
|
||||
|
||||
### rust - jsonpath::compile(jsonpath: &str)
|
||||
|
||||
```rust
|
||||
let mut template = jsonpath::compile("$..friends[0]");
|
||||
|
||||
let json_obj = json!({
|
||||
console.log(JSON.stringify(result) === JSON.stringify({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구1", "age": 40},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구3", "age": 60},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
]
|
||||
}));
|
||||
|
||||
let json = template(&json_obj).unwrap();
|
||||
// => true
|
||||
|
||||
let ret = json!([
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
]);
|
||||
|
||||
assert_eq!(json, ret);
|
||||
```
|
||||
|
||||
### rust - jsonpath::selector(json: &serde_json::value::Value)
|
||||
</details>
|
||||
|
||||
```rust
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let mut selector = jsonpath::selector(&json_obj);
|
||||
|
||||
let json = selector("$..friends[0]").unwrap();
|
||||
|
||||
let ret = json!([
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
]);
|
||||
|
||||
assert_eq!(json, ret);
|
||||
|
||||
let json = selector("$..friends[1]").unwrap();
|
||||
|
||||
let ret = json!([
|
||||
{"name": "친구4"},
|
||||
{"name": "친구2", "age": 20}
|
||||
]);
|
||||
|
||||
assert_eq!(json, ret);
|
||||
```
|
||||
|
||||
### rust - jsonpath::selector_as\<T: `serde::de::DeserializeOwned`\>(json: &serde_json::value::Value)
|
||||
|
||||
```rust
|
||||
let json_obj = json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
#[derive(Serialize, Deserialize, PartialEq, Debug)]
|
||||
struct Friend {
|
||||
name: String,
|
||||
age: Option<u8>,
|
||||
}
|
||||
|
||||
let mut selector = jsonpath::selector_as::<Vec<Friend>>(&json_obj);
|
||||
|
||||
let json = selector("$..friends[0]").unwrap();
|
||||
|
||||
let ret = vec!(
|
||||
Friend { name: "친구3".to_string(), age: Some(30) },
|
||||
Friend { name: "친구1".to_string(), age: Some(20) }
|
||||
);
|
||||
assert_eq!(json, ret);
|
||||
|
||||
let json = selector("$..friends[1]").unwrap();
|
||||
|
||||
let ret = vec!(
|
||||
Friend { name: "친구4".to_string(), age: None },
|
||||
Friend { name: "친구2".to_string(), age: Some(20) }
|
||||
);
|
||||
|
||||
assert_eq!(json, ret);
|
||||
```
|
||||
[Javascript - Other Examples](https://github.com/freestrings/jsonpath/wiki/Javascript-examples)
|
9
bench.sh
Executable file
9
bench.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
#
|
||||
# rustup default nightly
|
||||
#
|
||||
|
||||
cargo bench --manifest-path ./benchmark/Cargo.toml
|
15
benches/bench_bin/.idea/bench_bin.iml
generated
15
benches/bench_bin/.idea/bench_bin.iml
generated
@ -1,15 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/examples" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/benches" isTestSource="true" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
4
benches/bench_bin/.idea/encodings.xml
generated
4
benches/bench_bin/.idea/encodings.xml
generated
@ -1,4 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Encoding" addBOMForNewFiles="with NO BOM" />
|
||||
</project>
|
9
benches/bench_bin/.idea/misc.xml
generated
9
benches/bench_bin/.idea/misc.xml
generated
@ -1,9 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="CargoProjects">
|
||||
<cargoProject FILE="$PROJECT_DIR$/Cargo.toml" />
|
||||
</component>
|
||||
<component name="RustProjectSettings">
|
||||
<option name="toolchainHomeDirectory" value="$USER_HOME$/.cargo/bin" />
|
||||
</component>
|
||||
</project>
|
8
benches/bench_bin/.idea/modules.xml
generated
8
benches/bench_bin/.idea/modules.xml
generated
@ -1,8 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/bench_bin.iml" filepath="$PROJECT_DIR$/.idea/bench_bin.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
6
benches/bench_bin/.idea/vcs.xml
generated
6
benches/bench_bin/.idea/vcs.xml
generated
@ -1,6 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
|
||||
</component>
|
||||
</project>
|
152
benches/bench_bin/.idea/workspace.xml
generated
152
benches/bench_bin/.idea/workspace.xml
generated
@ -1,152 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="3fa6f740-0ee1-4afb-b0ae-9239bf5ced3d" name="Default Changelist" comment="">
|
||||
<change beforePath="$PROJECT_DIR$/../bench.rs" beforeDir="false" afterPath="$PROJECT_DIR$/../bench.rs" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/src/main.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/main.rs" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/../bench_node_vs_rust.sh" beforeDir="false" afterPath="$PROJECT_DIR$/../bench_node_vs_rust.sh" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/../javascript/bench.js" beforeDir="false" afterPath="$PROJECT_DIR$/../javascript/bench.js" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/../../nodejs/lib/index.js" beforeDir="false" afterPath="$PROJECT_DIR$/../../nodejs/lib/index.js" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/../../nodejs/native/src/lib.rs" beforeDir="false" afterPath="$PROJECT_DIR$/../../nodejs/native/src/lib.rs" afterDir="false" />
|
||||
</list>
|
||||
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
||||
<option name="LAST_RESOLUTION" value="IGNORE" />
|
||||
</component>
|
||||
<component name="FileEditorManager">
|
||||
<leaf SIDE_TABS_SIZE_LIMIT_KEY="300" />
|
||||
</component>
|
||||
<component name="Git.Settings">
|
||||
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$/../.." />
|
||||
</component>
|
||||
<component name="IdeDocumentHistory">
|
||||
<option name="CHANGED_PATHS">
|
||||
<list>
|
||||
<option value="$PROJECT_DIR$/../../src/lib.rs" />
|
||||
<option value="$PROJECT_DIR$/src/main.rs" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="ProjectFrameBounds" extendedState="6">
|
||||
<option name="x" value="67" />
|
||||
<option name="y" value="27" />
|
||||
<option name="width" value="1533" />
|
||||
<option name="height" value="1053" />
|
||||
</component>
|
||||
<component name="ProjectLevelVcsManager" settingsEditedManually="true" />
|
||||
<component name="ProjectView">
|
||||
<navigator proportions="" version="1">
|
||||
<foldersAlwaysOnTop value="true" />
|
||||
</navigator>
|
||||
<panes>
|
||||
<pane id="Scope" />
|
||||
<pane id="PackagesPane" />
|
||||
<pane id="ProjectPane">
|
||||
<subPane>
|
||||
<expand>
|
||||
<path>
|
||||
<item name="bench_bin" type="b2602c69:ProjectViewProjectNode" />
|
||||
<item name="bench_bin" type="462c0819:PsiDirectoryNode" />
|
||||
</path>
|
||||
<path>
|
||||
<item name="bench_bin" type="b2602c69:ProjectViewProjectNode" />
|
||||
<item name="bench_bin" type="462c0819:PsiDirectoryNode" />
|
||||
<item name="src" type="462c0819:PsiDirectoryNode" />
|
||||
</path>
|
||||
</expand>
|
||||
<select />
|
||||
</subPane>
|
||||
</pane>
|
||||
</panes>
|
||||
</component>
|
||||
<component name="PropertiesComponent">
|
||||
<property name="last_opened_file_path" value="$PROJECT_DIR$" />
|
||||
<property name="org.rust.cargo.project.model.PROJECT_DISCOVERY" value="true" />
|
||||
</component>
|
||||
<component name="RunDashboard">
|
||||
<option name="ruleStates">
|
||||
<list>
|
||||
<RuleState>
|
||||
<option name="name" value="ConfigurationTypeDashboardGroupingRule" />
|
||||
</RuleState>
|
||||
<RuleState>
|
||||
<option name="name" value="StatusDashboardGroupingRule" />
|
||||
</RuleState>
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="SvnConfiguration">
|
||||
<configuration />
|
||||
</component>
|
||||
<component name="TaskManager">
|
||||
<task active="true" id="Default" summary="Default task">
|
||||
<changelist id="3fa6f740-0ee1-4afb-b0ae-9239bf5ced3d" name="Default Changelist" comment="" />
|
||||
<created>1552690262696</created>
|
||||
<option name="number" value="Default" />
|
||||
<option name="presentableId" value="Default" />
|
||||
<updated>1552690262696</updated>
|
||||
</task>
|
||||
<servers />
|
||||
</component>
|
||||
<component name="ToolWindowManager">
|
||||
<frame x="67" y="25" width="1853" height="1055" extended-state="6" />
|
||||
<layout>
|
||||
<window_info active="true" content_ui="combo" id="Project" order="0" visible="true" weight="0.23076923" />
|
||||
<window_info id="Structure" order="1" side_tool="true" weight="0.25" />
|
||||
<window_info id="Designer" order="2" />
|
||||
<window_info id="Favorites" order="3" side_tool="true" />
|
||||
<window_info anchor="bottom" id="Message" order="0" />
|
||||
<window_info anchor="bottom" id="Find" order="1" />
|
||||
<window_info anchor="bottom" id="Run" order="2" weight="0.32829374" />
|
||||
<window_info anchor="bottom" id="Debug" order="3" weight="0.4" />
|
||||
<window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
|
||||
<window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
|
||||
<window_info anchor="bottom" id="TODO" order="6" />
|
||||
<window_info anchor="bottom" id="Version Control" order="7" />
|
||||
<window_info anchor="bottom" id="Terminal" order="8" />
|
||||
<window_info anchor="bottom" id="Event Log" order="9" side_tool="true" />
|
||||
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="0" type="SLIDING" weight="0.4" />
|
||||
<window_info anchor="right" id="Ant Build" order="1" weight="0.25" />
|
||||
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="2" weight="0.25" />
|
||||
<window_info anchor="right" id="Maven" order="3" />
|
||||
<window_info anchor="right" id="Cargo" order="4" />
|
||||
<window_info anchor="right" id="Palette	" order="5" />
|
||||
</layout>
|
||||
</component>
|
||||
<component name="editorHistoryManager">
|
||||
<entry file="file://$PROJECT_DIR$/../../src/lib.rs">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="353">
|
||||
<caret line="297" column="1" lean-forward="true" selection-start-line="297" selection-start-column="1" selection-end-line="297" selection-end-column="1" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$USER_HOME$/.cargo/registry/src/github.com-1ecc6299db9ec823/serde_json-1.0.39/src/de.rs">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="279">
|
||||
<caret line="2311" column="47" selection-start-line="2311" selection-start-column="47" selection-end-line="2311" selection-end-column="47" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$USER_HOME$/.rustup/toolchains/nightly-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/src/libcore/str/mod.rs">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="197">
|
||||
<caret line="3895" column="11" selection-start-line="3895" selection-start-column="11" selection-end-line="3895" selection-end-column="11" />
|
||||
<folding>
|
||||
<element signature="e#126082#126083#0" expanded="true" />
|
||||
<element signature="e#126120#126121#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$PROJECT_DIR$/src/main.rs">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="-680">
|
||||
<caret line="6" column="13" selection-start-line="6" selection-start-column="13" selection-end-line="6" selection-end-column="13" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</component>
|
||||
</project>
|
@ -1,7 +0,0 @@
|
||||
[package]
|
||||
name = "bench_bin"
|
||||
version = "0.1.1"
|
||||
|
||||
[dependencies]
|
||||
jsonpath_lib = {path = "../../"}
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
@ -1,43 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
DIR="$(pwd)"
|
||||
|
||||
cd "${DIR}"/bench_bin && cargo build --release
|
||||
|
||||
ITER=100000
|
||||
|
||||
printf "\n\n$..book[?(@.price<30 && @.category=="fiction")] (loop ${ITER})"
|
||||
printf "\n\n"
|
||||
|
||||
__default () {
|
||||
echo "Rust - select: " && time ./bench.sh select ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath - query: " && time ./bench.sh jsonpath ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-rs - select:" && time ./bench.sh nativeSelect ${ITER}
|
||||
}
|
||||
|
||||
__extra () {
|
||||
echo "Rust - selector: " && time ./bench.sh selector ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
echo "Rust - compile: " && time ./bench.sh compile ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath - query: " && time ./bench.sh jsonpath ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-rs - selector: " && time ./bench.sh nativeSelector ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-rs - compile: " && time ./bench.sh nativeCompile ${ITER}
|
||||
}
|
||||
|
||||
if [ "$1" = "extra" ]; then
|
||||
__extra
|
||||
else
|
||||
__default
|
||||
fi
|
222
benches/javascript/package-lock.json
generated
222
benches/javascript/package-lock.json
generated
@ -1,222 +0,0 @@
|
||||
{
|
||||
"name": "jsonpath-benches",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"JSONSelect": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/JSONSelect/-/JSONSelect-0.4.0.tgz",
|
||||
"integrity": "sha1-oI7cxn6z/L6Z7WMIVTRKDPKCu40="
|
||||
},
|
||||
"cjson": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/cjson/-/cjson-0.2.1.tgz",
|
||||
"integrity": "sha1-c82KrWXZ4VBfmvF0TTt5wVJ2gqU="
|
||||
},
|
||||
"colors": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/colors/-/colors-0.5.1.tgz",
|
||||
"integrity": "sha1-fQAj6usVTo7p/Oddy5I9DtFmd3Q="
|
||||
},
|
||||
"deep-is": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz",
|
||||
"integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ="
|
||||
},
|
||||
"ebnf-parser": {
|
||||
"version": "0.1.10",
|
||||
"resolved": "https://registry.npmjs.org/ebnf-parser/-/ebnf-parser-0.1.10.tgz",
|
||||
"integrity": "sha1-zR9rpHfFY4xAyX7ZtXLbW6tdgzE="
|
||||
},
|
||||
"escodegen": {
|
||||
"version": "0.0.21",
|
||||
"resolved": "https://registry.npmjs.org/escodegen/-/escodegen-0.0.21.tgz",
|
||||
"integrity": "sha1-U9ZSz6EDA4gnlFilJmxf/HCcY8M=",
|
||||
"requires": {
|
||||
"esprima": "~1.0.2",
|
||||
"estraverse": "~0.0.4",
|
||||
"source-map": ">= 0.1.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"esprima": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz",
|
||||
"integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0="
|
||||
}
|
||||
}
|
||||
},
|
||||
"esprima": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz",
|
||||
"integrity": "sha1-dqD9Zvz+FU/SkmZ9wmQBl1CxZXs="
|
||||
},
|
||||
"estraverse": {
|
||||
"version": "0.0.4",
|
||||
"resolved": "https://registry.npmjs.org/estraverse/-/estraverse-0.0.4.tgz",
|
||||
"integrity": "sha1-AaCTLf7ldGhKWYr1pnw7+bZCjbI="
|
||||
},
|
||||
"esutils": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz",
|
||||
"integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs="
|
||||
},
|
||||
"fast-levenshtein": {
|
||||
"version": "2.0.6",
|
||||
"resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
|
||||
"integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc="
|
||||
},
|
||||
"jison": {
|
||||
"version": "0.4.13",
|
||||
"resolved": "https://registry.npmjs.org/jison/-/jison-0.4.13.tgz",
|
||||
"integrity": "sha1-kEFwfWIkE2f1iDRTK58ZwsNvrHg=",
|
||||
"requires": {
|
||||
"JSONSelect": "0.4.0",
|
||||
"cjson": "~0.2.1",
|
||||
"ebnf-parser": "~0.1.9",
|
||||
"escodegen": "0.0.21",
|
||||
"esprima": "1.0.x",
|
||||
"jison-lex": "0.2.x",
|
||||
"lex-parser": "~0.1.3",
|
||||
"nomnom": "1.5.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"esprima": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz",
|
||||
"integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0="
|
||||
}
|
||||
}
|
||||
},
|
||||
"jison-lex": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/jison-lex/-/jison-lex-0.2.1.tgz",
|
||||
"integrity": "sha1-rEuBXozOUTLrErXfz+jXB7iETf4=",
|
||||
"requires": {
|
||||
"lex-parser": "0.1.x",
|
||||
"nomnom": "1.5.2"
|
||||
}
|
||||
},
|
||||
"jsonpath": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jsonpath/-/jsonpath-1.0.1.tgz",
|
||||
"integrity": "sha512-HY5kSg82LHIs0r0h9gYBwpNc1w1qGY0qJ7al01W1bJltsN2lp+mjjA/a79gXWuvD6Xf8oPkD2d5uKMZQXTGzqA==",
|
||||
"requires": {
|
||||
"esprima": "1.2.2",
|
||||
"jison": "0.4.13",
|
||||
"static-eval": "2.0.2",
|
||||
"underscore": "1.7.0"
|
||||
}
|
||||
},
|
||||
"levn": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
|
||||
"integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=",
|
||||
"requires": {
|
||||
"prelude-ls": "~1.1.2",
|
||||
"type-check": "~0.3.2"
|
||||
}
|
||||
},
|
||||
"lex-parser": {
|
||||
"version": "0.1.4",
|
||||
"resolved": "https://registry.npmjs.org/lex-parser/-/lex-parser-0.1.4.tgz",
|
||||
"integrity": "sha1-ZMTwJfF/1Tv7RXY/rrFvAVp0dVA="
|
||||
},
|
||||
"nomnom": {
|
||||
"version": "1.5.2",
|
||||
"resolved": "https://registry.npmjs.org/nomnom/-/nomnom-1.5.2.tgz",
|
||||
"integrity": "sha1-9DRUSKhTz71cDSYyDyR3qwUm/i8=",
|
||||
"requires": {
|
||||
"colors": "0.5.x",
|
||||
"underscore": "1.1.x"
|
||||
},
|
||||
"dependencies": {
|
||||
"underscore": {
|
||||
"version": "1.1.7",
|
||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.1.7.tgz",
|
||||
"integrity": "sha1-QLq4S60Z0jAJbo1u9ii/8FXYPbA="
|
||||
}
|
||||
}
|
||||
},
|
||||
"optionator": {
|
||||
"version": "0.8.2",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz",
|
||||
"integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=",
|
||||
"requires": {
|
||||
"deep-is": "~0.1.3",
|
||||
"fast-levenshtein": "~2.0.4",
|
||||
"levn": "~0.3.0",
|
||||
"prelude-ls": "~1.1.2",
|
||||
"type-check": "~0.3.2",
|
||||
"wordwrap": "~1.0.0"
|
||||
}
|
||||
},
|
||||
"prelude-ls": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz",
|
||||
"integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ="
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.7.3",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
|
||||
"integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
|
||||
"optional": true
|
||||
},
|
||||
"static-eval": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.0.2.tgz",
|
||||
"integrity": "sha512-N/D219Hcr2bPjLxPiV+TQE++Tsmrady7TqAJugLy7Xk1EumfDWS/f5dtBbkRCGE7wKKXuYockQoj8Rm2/pVKyg==",
|
||||
"requires": {
|
||||
"escodegen": "^1.8.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"escodegen": {
|
||||
"version": "1.11.1",
|
||||
"resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.11.1.tgz",
|
||||
"integrity": "sha512-JwiqFD9KdGVVpeuRa68yU3zZnBEOcPs0nKW7wZzXky8Z7tffdYUHbe11bPCV5jYlK6DVdKLWLm0f5I/QlL0Kmw==",
|
||||
"requires": {
|
||||
"esprima": "^3.1.3",
|
||||
"estraverse": "^4.2.0",
|
||||
"esutils": "^2.0.2",
|
||||
"optionator": "^0.8.1",
|
||||
"source-map": "~0.6.1"
|
||||
}
|
||||
},
|
||||
"esprima": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-3.1.3.tgz",
|
||||
"integrity": "sha1-/cpRzuYTOJXjyI1TXOSdv/YqRjM="
|
||||
},
|
||||
"estraverse": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz",
|
||||
"integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM="
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"type-check": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz",
|
||||
"integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=",
|
||||
"requires": {
|
||||
"prelude-ls": "~1.1.2"
|
||||
}
|
||||
},
|
||||
"underscore": {
|
||||
"version": "1.7.0",
|
||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz",
|
||||
"integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk="
|
||||
},
|
||||
"wordwrap": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
|
||||
"integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus="
|
||||
}
|
||||
}
|
||||
}
|
3
benches/package-lock.json
generated
3
benches/package-lock.json
generated
@ -1,3 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1
|
||||
}
|
4
benchmark/.gitignore
vendored
Normal file
4
benchmark/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
.idea/*
|
||||
.vscode
|
||||
/target/
|
||||
Cargo.lock
|
17
benchmark/Cargo.toml
Normal file
17
benchmark/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "jsonpath_lib_benches"
|
||||
version = "0.1.0"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
description = "jsonpath_lib benchmark"
|
||||
license = "MIT"
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
jsonpath_lib = { path = "../" }
|
||||
|
||||
[dev-dependencies]
|
||||
bencher = "0.1.5"
|
||||
|
||||
[[bin]]
|
||||
name = "jsonpath_lib_benches"
|
||||
path = "src/main.rs"
|
7
benchmark/bench_bin/Cargo.toml
Normal file
7
benchmark/bench_bin/Cargo.toml
Normal file
@ -0,0 +1,7 @@
|
||||
[package]
|
||||
name = "bench_bin"
|
||||
version = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
jsonpath_lib = {path = "../../"}
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
@ -36,7 +36,8 @@ __extra () {
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm - compile: " && time ./bench.sh wasmCompile ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm - compile-alloc: " && time ./bench.sh wasmCompileAlloc ${ITER}
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm - Selector: " && time ./bench.sh wasmSelectorClass ${ITER}
|
||||
printf "\n"
|
||||
}
|
||||
|
||||
if [ "$1" = "extra" ]; then
|
@ -1,4 +1,5 @@
|
||||
#![feature(test)]
|
||||
extern crate bencher;
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
@ -9,6 +10,8 @@ use std::io::Read;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use jsonpath::{SelectorMut, Selector};
|
||||
|
||||
use self::test::Bencher;
|
||||
|
||||
fn read_json(path: &str) -> String {
|
||||
@ -19,7 +22,7 @@ fn read_json(path: &str) -> String {
|
||||
}
|
||||
|
||||
fn get_string() -> String {
|
||||
read_json("./benches/example.json")
|
||||
read_json("./example.json")
|
||||
}
|
||||
|
||||
fn get_json() -> Value {
|
||||
@ -98,7 +101,37 @@ fn bench_select_as(b: &mut Bencher) {
|
||||
|
||||
b.iter(move || {
|
||||
for _ in 1..100 {
|
||||
let _: Book = jsonpath::select_as(&json, r#"$..book[?(@.price<30 && @.category=="fiction")][0]"#).unwrap();
|
||||
let _: Vec<Book> = jsonpath::select_as(&json, r#"$..book[?(@.price<30 && @.category=="fiction")][0]"#).unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_delete(b: &mut Bencher) {
|
||||
let json = get_json();
|
||||
let mut selector = SelectorMut::default();
|
||||
let _ = selector.str_path(get_path());
|
||||
|
||||
b.iter(move || {
|
||||
for _ in 1..100 {
|
||||
let _ = selector.value(json.clone()).delete();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_select_to_compare_with_delete(b: &mut Bencher) {
|
||||
let json = &get_json();
|
||||
|
||||
let mut selector = Selector::default();
|
||||
let _ = selector.str_path(get_path());
|
||||
|
||||
b.iter(move || {
|
||||
for _ in 1..100 {
|
||||
let json = json.clone();
|
||||
let mut s = Selector::default();
|
||||
let _ = s.compiled_path(selector.node_ref().unwrap()).value(&json);
|
||||
let _ = s.select();
|
||||
}
|
||||
});
|
||||
}
|
89
benchmark/benches/bench_example.rs
Normal file
89
benchmark/benches/bench_example.rs
Normal file
@ -0,0 +1,89 @@
|
||||
#![feature(test)]
|
||||
extern crate bencher;
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
extern crate test;
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use self::test::Bencher;
|
||||
|
||||
fn read_json(path: &str) -> String {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
contents
|
||||
}
|
||||
|
||||
fn get_string() -> String {
|
||||
read_json("./example.json")
|
||||
}
|
||||
|
||||
fn get_json() -> Value {
|
||||
let string = get_string();
|
||||
serde_json::from_str(string.as_str()).unwrap()
|
||||
}
|
||||
|
||||
fn get_path(i: usize) -> &'static str {
|
||||
let paths = vec![
|
||||
"$.store.book[*].author", //0
|
||||
"$..author", //1
|
||||
"$.store.*", //2
|
||||
"$.store..price", //3
|
||||
"$..book[2]", //4
|
||||
"$..book[-2]", //5
|
||||
"$..book[0,1]", //6
|
||||
"$..book[:2]", //7
|
||||
"$..book[1:2]", //8
|
||||
"$..book[-2:]", //9
|
||||
"$..book[2:]", //10
|
||||
"$..book[?(@.isbn)]", //11
|
||||
"$.store.book[?(@.price == 10)]", //12
|
||||
"$..*", //13
|
||||
"$..book[ ?( (@.price < 13 || $.store.bicycle.price < @.price) && @.price <=10 ) ]", //14
|
||||
"$.store.book[?( (@.price < 10 || @.price > 10) && @.price > 10 )]"
|
||||
];
|
||||
paths[i]
|
||||
}
|
||||
|
||||
fn _selector(b: &mut Bencher, index: usize) {
|
||||
let json = get_json();
|
||||
b.iter(move || {
|
||||
for _ in 1..100 {
|
||||
let mut selector = jsonpath::Selector::default();
|
||||
let _ = selector.str_path(get_path(index));
|
||||
selector.value(&json);
|
||||
let r = selector.select();
|
||||
if r.is_err() {
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
macro_rules! selector {
|
||||
($name:ident, $i:expr) => {
|
||||
#[bench]
|
||||
fn $name(b: &mut Bencher) { _selector(b, $i); }
|
||||
};
|
||||
}
|
||||
|
||||
selector!(example0_1, 0);
|
||||
selector!(example1_1, 1);
|
||||
selector!(example2_1, 2);
|
||||
selector!(example3_1, 3);
|
||||
selector!(example4_1, 4);
|
||||
selector!(example5_1, 5);
|
||||
selector!(example6_1, 6);
|
||||
selector!(example7_1, 7);
|
||||
selector!(example8_1, 8);
|
||||
selector!(example9_1, 9);
|
||||
selector!(example_10_1, 10);
|
||||
selector!(example_11_1, 11);
|
||||
selector!(example_12_1, 12);
|
||||
selector!(example_13_1, 13);
|
||||
selector!(example_14_1, 14);
|
||||
selector!(example_15_1, 15);
|
4501
benchmark/big_example.json
Normal file
4501
benchmark/big_example.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -34,4 +34,4 @@
|
||||
}
|
||||
},
|
||||
"expensive": 10
|
||||
}
|
||||
}
|
@ -42,8 +42,7 @@ function getJson() {
|
||||
}
|
||||
const path = '$..book[?(@.price<30 && @.category=="fiction")]';
|
||||
const jp = require('jsonpath');
|
||||
const jpw = require('@nodejs/jsonpath-wasm');
|
||||
const jpwRs = require('jsonpath-rs');
|
||||
const jpw = require('jsonpath-wasm');
|
||||
|
||||
function jsonpath() {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
@ -51,26 +50,6 @@ function jsonpath() {
|
||||
}
|
||||
}
|
||||
|
||||
function nativeCompile() {
|
||||
let template = jpwRs.compile(path);
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = template(JSON.stringify(json));
|
||||
}
|
||||
}
|
||||
|
||||
function nativeSelector() {
|
||||
let selector = jpwRs.selector(getJson());
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = selector(path);
|
||||
}
|
||||
}
|
||||
|
||||
function nativeSelect() {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = jpwRs.select(JSON.stringify(json), path);
|
||||
}
|
||||
}
|
||||
|
||||
function wasmSelector() {
|
||||
let selector = jpw.selector(getJson());
|
||||
for (var i = 0; i < iter; i++) {
|
||||
@ -85,42 +64,18 @@ function wasmCompile() {
|
||||
}
|
||||
}
|
||||
|
||||
function wasmCompileAlloc() {
|
||||
let ptr = jpw.alloc_json(getJson());
|
||||
if (ptr == 0) {
|
||||
console.error('Invalid pointer');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let template = jpw.compile(path);
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = template(ptr);
|
||||
}
|
||||
} finally {
|
||||
jpw.dealloc_json(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
function wasmSelect() {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = jpw.select(getJson(), path);
|
||||
}
|
||||
}
|
||||
|
||||
function wasmSelectAlloc() {
|
||||
let ptr = jpw.alloc_json(getJson());
|
||||
if (ptr == 0) {
|
||||
console.error('Invalid pointer');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = jpw.select(ptr, path);
|
||||
}
|
||||
} finally {
|
||||
jpw.dealloc_json(ptr);
|
||||
function wasmSelectorClass() {
|
||||
let selector = new jpw.Selector();
|
||||
for (var i = 0; i < iter; i++) {
|
||||
selector.path(path);
|
||||
selector.value(jsonStr);
|
||||
let _ = selector.select();
|
||||
}
|
||||
}
|
||||
|
1
benchmark/src/main.rs
Normal file
1
benchmark/src/main.rs
Normal file
@ -0,0 +1 @@
|
||||
fn main() {}
|
91
build-wasm.sh
Executable file
91
build-wasm.sh
Executable file
@ -0,0 +1,91 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# project_root
|
||||
DIR="$(pwd)"
|
||||
WASM="${DIR}"/wasm
|
||||
WASM_WWW="${WASM}"/www
|
||||
WASM_WWW_BENCH="${WASM}"/www_bench
|
||||
WASM_NODEJS_PKG="${WASM}"/nodejs_pkg
|
||||
WASM_WEB_PKG="${WASM}"/web_pkg
|
||||
WASM_TEST="${WASM}"/tests
|
||||
DOCS="${DIR}"/docs
|
||||
DOCS_BENCH="${DOCS}"/bench
|
||||
|
||||
__msg () {
|
||||
echo ">>>>>>>>>>$1<<<<<<<<<<"
|
||||
}
|
||||
|
||||
__cargo_clean () {
|
||||
rm -f "${DIR}"/Cargo.lock
|
||||
rm -f "${WASM}"/Cargo.lock
|
||||
cd "${WASM}" && cargo clean && \
|
||||
cd "${DIR}" && cargo clean
|
||||
}
|
||||
|
||||
echo
|
||||
__msg "clean wasm"
|
||||
rm -rf \
|
||||
"${WASM_NODEJS_PKG}" \
|
||||
"${WASM_WEB_PKG}" \
|
||||
"${WASM_WWW}"/dist \
|
||||
"${WASM_WWW}"/node_modules \
|
||||
"${WASM_WWW}"/package-lock.json \
|
||||
"${WASM_WWW_BENCH}"/dist \
|
||||
"${WASM_WWW_BENCH}"/node_modules \
|
||||
"${WASM_WWW_BENCH}"/package-lock.json \
|
||||
"${WASM_TEST}"/node_modules \
|
||||
"${WASM_TEST}"/package-lock.json
|
||||
|
||||
__msg "clean cargo clean"
|
||||
__cargo_clean
|
||||
|
||||
echo
|
||||
wasm_pack_version=$(wasm-pack -V)
|
||||
__msg "wasm-pack: ${wasm_pack_version}"
|
||||
|
||||
echo
|
||||
__msg "wasm-pack nodejs"
|
||||
cd "${WASM}" && wasm-pack build --release --target "nodejs" --out-dir "${WASM_NODEJS_PKG}"
|
||||
|
||||
__msg "npm install: wasm test"
|
||||
cd "${WASM_TEST}" && npm install "${WASM_NODEJS_PKG}" && npm install
|
||||
|
||||
echo
|
||||
__msg "wasm test"
|
||||
cd "${WASM_TEST}" && npm test
|
||||
|
||||
if [ "$1" = "docs" ]; then
|
||||
echo
|
||||
__msg "wasm-pack web"
|
||||
cd "${WASM}" && wasm-pack build --release --out-dir "${WASM_WEB_PKG}"
|
||||
|
||||
echo
|
||||
__msg "jsonpath-wasm npm link"
|
||||
cd "${WASM_WEB_PKG}" && npm link
|
||||
|
||||
__msg "npm install: wasm"
|
||||
cd "${WASM_WWW}" && npm install
|
||||
__msg "npm install: wasm_bench"
|
||||
cd "${WASM_WWW_BENCH}" && npm install
|
||||
|
||||
echo
|
||||
__msg "link"
|
||||
cd "${WASM_WWW}" && npm link jsonpath-wasm
|
||||
cd "${WASM_WWW_BENCH}" && npm link jsonpath-wasm
|
||||
|
||||
echo
|
||||
__msg "docs"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS}"/*.js "${DOCS}"/*.wasm "${DOCS}"/*.html && \
|
||||
cp "${WASM_WWW}"/dist/*.* "${DOCS}"/
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS_BENCH}"/*.js "${DOCS_BENCH}"/*.wasm "${DOCS_BENCH}"/*.html && \
|
||||
cp "${WASM_WWW_BENCH}"/dist/*.* "${DOCS_BENCH}"/
|
||||
fi
|
||||
|
||||
__msg "wasm done"
|
88
build.sh
88
build.sh
@ -1,88 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# project_root
|
||||
DIR="$(pwd)"
|
||||
WASM="${DIR}"/wasm
|
||||
WASM_WWW="${WASM}"/www
|
||||
WASM_WWW_BENCH="${WASM}"/www_bench
|
||||
WASM_BROWSER_PKG="${WASM}"/browser_pkg
|
||||
WASM_NODEJS_PKG="${WASM}"/nodejs_pkg
|
||||
BENCHES="${DIR}"/benches
|
||||
BENCHES_JS="${BENCHES}"/javascript
|
||||
NODEJS="${DIR}"/nodejs
|
||||
DOCS="${DIR}"/docs
|
||||
DOCS_BENCH="${DOCS}"/bench
|
||||
|
||||
__msg () {
|
||||
echo ">>>>>>>>>>$1<<<<<<<<<<"
|
||||
}
|
||||
|
||||
__cargo_clean () {
|
||||
cd "${BENCHES}"/bench_bin && cargo clean && \
|
||||
cd "${NODEJS}"/native && cargo clean && \
|
||||
cd "${WASM}" && cargo clean && \
|
||||
cd "${DIR}" && cargo clean
|
||||
}
|
||||
|
||||
echo
|
||||
__msg "clean"
|
||||
rm -rf \
|
||||
"${WASM_NODEJS_PKG}" \
|
||||
"${WASM_BROWSER_PKG}" \
|
||||
"${BENCHES_JS}"/node_modules \
|
||||
"${NODEJS}"/node_modules \
|
||||
"${WASM_WWW}"/node_modules \
|
||||
"${WASM_WWW_BENCH}"/node_modules \
|
||||
"${WASM_WWW}"/dist \
|
||||
"${WASM_WWW_BENCH}"/dist
|
||||
|
||||
if [ "$1" = "all" ]; then
|
||||
__msg "clean targets"
|
||||
__cargo_clean
|
||||
fi
|
||||
|
||||
__msg "npm install"
|
||||
echo
|
||||
cd "${WASM_WWW}" && npm install
|
||||
cd "${WASM_WWW_BENCH}" && npm install
|
||||
cd "${NODEJS}" && npm install
|
||||
cd "${BENCHES_JS}" && npm install
|
||||
|
||||
echo
|
||||
echo
|
||||
__msg "wasm-pack"
|
||||
cd "${WASM}" && \
|
||||
wasm-pack build --target=nodejs --scope nodejs --out-dir nodejs_pkg && \
|
||||
cd "${WASM_NODEJS_PKG}" && npm link
|
||||
|
||||
cd "${WASM}" && \
|
||||
wasm-pack build --target=browser --scope browser --out-dir browser_pkg && \
|
||||
cd "${WASM_BROWSER_PKG}" && npm link
|
||||
|
||||
echo
|
||||
__msg "link"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm link @browser/jsonpath-wasm
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm link @browser/jsonpath-wasm
|
||||
|
||||
cd "${BENCHES_JS}" && \
|
||||
npm link @nodejs/jsonpath-wasm && \
|
||||
npm link jsonpath-rs
|
||||
|
||||
echo
|
||||
__msg "docs"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS}"/*.js "${DOCS}"/*.wasm "${DOCS}"/*.html && \
|
||||
cp "${WASM_WWW}"/dist/*.* "${DOCS}"/
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS_BENCH}"/*.js "${DOCS_BENCH}"/*.wasm "${DOCS_BENCH}"/*.html && \
|
||||
cp "${WASM_WWW_BENCH}"/dist/*.* "${DOCS_BENCH}"/
|
||||
|
||||
__msg "done"
|
11
clippy.sh
Executable file
11
clippy.sh
Executable file
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
cargo clean
|
||||
cargo clippy -- -D warnings
|
||||
cargo build --verbose --all
|
||||
cargo clippy --all-targets --all-features -- -D warnings -A clippy::cognitive_complexity
|
||||
cargo test --verbose --all
|
||||
cd wasm && cargo clippy -- -D warnings -A clippy::suspicious_else_formatting
|
||||
cd ../
|
9
coverage.sh
Executable file
9
coverage.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# cargo install cargo-tarpaulin
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs -v --all
|
File diff suppressed because one or more lines are too long
38
docs/1.bootstrap.js
Normal file
38
docs/1.bootstrap.js
Normal file
File diff suppressed because one or more lines are too long
BIN
docs/a9530753c3f0aa3c5ead.module.wasm
Normal file
BIN
docs/a9530753c3f0aa3c5ead.module.wasm
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
docs/bench/a9530753c3f0aa3c5ead.module.wasm
Normal file
BIN
docs/bench/a9530753c3f0aa3c5ead.module.wasm
Normal file
Binary file not shown.
73
docs/bench/bootstrap.js
vendored
73
docs/bench/bootstrap.js
vendored
@ -10,7 +10,7 @@
|
||||
/******/ var moduleId, chunkId, i = 0, resolves = [];
|
||||
/******/ for(;i < chunkIds.length; i++) {
|
||||
/******/ chunkId = chunkIds[i];
|
||||
/******/ if(installedChunks[chunkId]) {
|
||||
/******/ if(Object.prototype.hasOwnProperty.call(installedChunks, chunkId) && installedChunks[chunkId]) {
|
||||
/******/ resolves.push(installedChunks[chunkId][0]);
|
||||
/******/ }
|
||||
/******/ installedChunks[chunkId] = 0;
|
||||
@ -52,47 +52,53 @@
|
||||
/******/ function promiseResolve() { return Promise.resolve(); }
|
||||
/******/
|
||||
/******/ var wasmImportObjects = {
|
||||
/******/ "../browser_pkg/jsonpath_wasm_bg.wasm": function() {
|
||||
/******/ "../web_pkg/jsonpath_wasm_bg.wasm": function() {
|
||||
/******/ return {
|
||||
/******/ "./jsonpath_wasm": {
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "./jsonpath_wasm.js": {
|
||||
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__widl_f_log_1_": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__widl_f_log_1_"](p0i32);
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_number_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_number_get"](p0i32,p1i32);
|
||||
/******/ "__wbg_error_e7d3e8dbb31828c8": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbg_error_e7d3e8dbb31828c8"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_call_1ad0eb4a7ab279eb": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbg_call_1ad0eb4a7ab279eb"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_is_string": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_throw": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper102": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper102"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_rethrow": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper104": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper104"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper28": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper28"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper26": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper26"](p0i32,p1i32,p2i32);
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ };
|
||||
@ -155,6 +161,8 @@
|
||||
/******/ }
|
||||
/******/ script.src = jsonpScriptSrc(chunkId);
|
||||
/******/
|
||||
/******/ // create error before stack unwound to get useful stacktrace later
|
||||
/******/ var error = new Error();
|
||||
/******/ onScriptComplete = function (event) {
|
||||
/******/ // avoid mem leaks in IE.
|
||||
/******/ script.onerror = script.onload = null;
|
||||
@ -164,7 +172,8 @@
|
||||
/******/ if(chunk) {
|
||||
/******/ var errorType = event && (event.type === 'load' ? 'missing' : event.type);
|
||||
/******/ var realSrc = event && event.target && event.target.src;
|
||||
/******/ var error = new Error('Loading chunk ' + chunkId + ' failed.\n(' + errorType + ': ' + realSrc + ')');
|
||||
/******/ error.message = 'Loading chunk ' + chunkId + ' failed.\n(' + errorType + ': ' + realSrc + ')';
|
||||
/******/ error.name = 'ChunkLoadError';
|
||||
/******/ error.type = errorType;
|
||||
/******/ error.request = realSrc;
|
||||
/******/ chunk[1](error);
|
||||
@ -182,7 +191,7 @@
|
||||
/******/
|
||||
/******/ // Fetch + compile chunk loading for webassembly
|
||||
/******/
|
||||
/******/ var wasmModules = {"0":["../browser_pkg/jsonpath_wasm_bg.wasm"]}[chunkId] || [];
|
||||
/******/ var wasmModules = {"1":["../web_pkg/jsonpath_wasm_bg.wasm"]}[chunkId] || [];
|
||||
/******/
|
||||
/******/ wasmModules.forEach(function(wasmModuleId) {
|
||||
/******/ var installedWasmModuleData = installedWasmModules[wasmModuleId];
|
||||
@ -192,7 +201,7 @@
|
||||
/******/ promises.push(installedWasmModuleData);
|
||||
/******/ else {
|
||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../browser_pkg/jsonpath_wasm_bg.wasm":"c615fa3fad4c084c8bcd"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../web_pkg/jsonpath_wasm_bg.wasm":"a9530753c3f0aa3c5ead"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var promise;
|
||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||
@ -293,7 +302,7 @@
|
||||
/*! no static exports found */
|
||||
/***/ (function(module, exports, __webpack_require__) {
|
||||
|
||||
eval("// A dependency graph that contains any wasm must all be imported\n// asynchronously. This `bootstrap.js` file does the single async import, so\n// that no one else needs to worry about it again.\nPromise.all(/*! import() */[__webpack_require__.e(1), __webpack_require__.e(0)]).then(__webpack_require__.bind(null, /*! ./index.js */ \"./index.js\"))\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n\n//# sourceURL=webpack:///./bootstrap.js?");
|
||||
eval("// A dependency graph that contains any wasm must all be imported\n// asynchronously. This `bootstrap.js` file does the single async import, so\n// that no one else needs to worry about it again.\nPromise.all(/*! import() */[__webpack_require__.e(0), __webpack_require__.e(1)]).then(__webpack_require__.bind(null, /*! ./index.js */ \"./index.js\"))\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n\n//# sourceURL=webpack:///./bootstrap.js?");
|
||||
|
||||
/***/ })
|
||||
|
||||
|
Binary file not shown.
73
docs/bootstrap.js
vendored
73
docs/bootstrap.js
vendored
@ -10,7 +10,7 @@
|
||||
/******/ var moduleId, chunkId, i = 0, resolves = [];
|
||||
/******/ for(;i < chunkIds.length; i++) {
|
||||
/******/ chunkId = chunkIds[i];
|
||||
/******/ if(installedChunks[chunkId]) {
|
||||
/******/ if(Object.prototype.hasOwnProperty.call(installedChunks, chunkId) && installedChunks[chunkId]) {
|
||||
/******/ resolves.push(installedChunks[chunkId][0]);
|
||||
/******/ }
|
||||
/******/ installedChunks[chunkId] = 0;
|
||||
@ -52,47 +52,53 @@
|
||||
/******/ function promiseResolve() { return Promise.resolve(); }
|
||||
/******/
|
||||
/******/ var wasmImportObjects = {
|
||||
/******/ "../browser_pkg/jsonpath_wasm_bg.wasm": function() {
|
||||
/******/ "../web_pkg/jsonpath_wasm_bg.wasm": function() {
|
||||
/******/ return {
|
||||
/******/ "./jsonpath_wasm": {
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "./jsonpath_wasm.js": {
|
||||
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__widl_f_log_1_": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__widl_f_log_1_"](p0i32);
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_number_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_number_get"](p0i32,p1i32);
|
||||
/******/ "__wbg_error_e7d3e8dbb31828c8": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbg_error_e7d3e8dbb31828c8"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_call_1ad0eb4a7ab279eb": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbg_call_1ad0eb4a7ab279eb"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_is_string": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_throw": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper102": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper102"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_rethrow": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper104": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper104"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper28": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper28"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper26": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper26"](p0i32,p1i32,p2i32);
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ };
|
||||
@ -155,6 +161,8 @@
|
||||
/******/ }
|
||||
/******/ script.src = jsonpScriptSrc(chunkId);
|
||||
/******/
|
||||
/******/ // create error before stack unwound to get useful stacktrace later
|
||||
/******/ var error = new Error();
|
||||
/******/ onScriptComplete = function (event) {
|
||||
/******/ // avoid mem leaks in IE.
|
||||
/******/ script.onerror = script.onload = null;
|
||||
@ -164,7 +172,8 @@
|
||||
/******/ if(chunk) {
|
||||
/******/ var errorType = event && (event.type === 'load' ? 'missing' : event.type);
|
||||
/******/ var realSrc = event && event.target && event.target.src;
|
||||
/******/ var error = new Error('Loading chunk ' + chunkId + ' failed.\n(' + errorType + ': ' + realSrc + ')');
|
||||
/******/ error.message = 'Loading chunk ' + chunkId + ' failed.\n(' + errorType + ': ' + realSrc + ')';
|
||||
/******/ error.name = 'ChunkLoadError';
|
||||
/******/ error.type = errorType;
|
||||
/******/ error.request = realSrc;
|
||||
/******/ chunk[1](error);
|
||||
@ -182,7 +191,7 @@
|
||||
/******/
|
||||
/******/ // Fetch + compile chunk loading for webassembly
|
||||
/******/
|
||||
/******/ var wasmModules = {"0":["../browser_pkg/jsonpath_wasm_bg.wasm"]}[chunkId] || [];
|
||||
/******/ var wasmModules = {"1":["../web_pkg/jsonpath_wasm_bg.wasm"]}[chunkId] || [];
|
||||
/******/
|
||||
/******/ wasmModules.forEach(function(wasmModuleId) {
|
||||
/******/ var installedWasmModuleData = installedWasmModules[wasmModuleId];
|
||||
@ -192,7 +201,7 @@
|
||||
/******/ promises.push(installedWasmModuleData);
|
||||
/******/ else {
|
||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../browser_pkg/jsonpath_wasm_bg.wasm":"c615fa3fad4c084c8bcd"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../web_pkg/jsonpath_wasm_bg.wasm":"a9530753c3f0aa3c5ead"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var promise;
|
||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||
@ -293,7 +302,7 @@
|
||||
/*! no static exports found */
|
||||
/***/ (function(module, exports, __webpack_require__) {
|
||||
|
||||
eval("// A dependency graph that contains any wasm must all be imported\n// asynchronously. This `bootstrap.js` file does the single async import, so\n// that no one else needs to worry about it again.\n__webpack_require__.e(/*! import() */ 0).then(__webpack_require__.bind(null, /*! ./index.js */ \"./index.js\"))\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n\n\n//# sourceURL=webpack:///./bootstrap.js?");
|
||||
eval("// A dependency graph that contains any wasm must all be imported\n// asynchronously. This `bootstrap.js` file does the single async import, so\n// that no one else needs to worry about it again.\nPromise.all(/*! import() */[__webpack_require__.e(0), __webpack_require__.e(1)]).then(__webpack_require__.bind(null, /*! ./index.js */ \"./index.js\"))\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n\n\n//# sourceURL=webpack:///./bootstrap.js?");
|
||||
|
||||
/***/ })
|
||||
|
||||
|
Binary file not shown.
138
docs/index.html
138
docs/index.html
@ -1,9 +1,82 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<!-- Global site tag (gtag.js) - Google Analytics -->
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-3020058-1"></script>
|
||||
<script>
|
||||
window.dataLayer = window.dataLayer || [];
|
||||
|
||||
function gtag() {dataLayer.push(arguments);}
|
||||
|
||||
gtag('js', new Date());
|
||||
|
||||
gtag('config', 'UA-3020058-1');
|
||||
</script>
|
||||
|
||||
<meta charset="utf-8">
|
||||
<meta property="og:image" content="https://avatars0.githubusercontent.com/u/1104423?s=400&v=4"/>
|
||||
<meta property="og:site_name" content="GitHub"/>
|
||||
<meta property="og:type" content="object"/>
|
||||
<meta property="og:title" content="freestrings/jsonpath"/>
|
||||
<meta property="og:url" content="https://github.com/freestrings/jsonpath"/>
|
||||
<meta property="og:description" content="JsonPath evaluator with Webassembly via Rust - freestrings/jsonpath"/>
|
||||
<title>JsonPath evaluator - Webassembly via Rust</title>
|
||||
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css">
|
||||
<style>
|
||||
/**
|
||||
* GitHub Corners, page css
|
||||
* Author: Tim Holman
|
||||
*/
|
||||
.code textarea {
|
||||
border: 2px solid #eee;
|
||||
outline: 0px;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
font-family: monospace;
|
||||
font-size: 10px;
|
||||
}
|
||||
|
||||
.github-corner:hover .octo-arm {
|
||||
animation: octocat-wave 560ms ease-in-out;
|
||||
}
|
||||
|
||||
@keyframes octocat-wave {
|
||||
0% {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
|
||||
20% {
|
||||
transform: rotate(-25deg);
|
||||
}
|
||||
|
||||
40% {
|
||||
transform: rotate(10deg);
|
||||
}
|
||||
|
||||
60% {
|
||||
transform: rotate(-25deg);
|
||||
}
|
||||
|
||||
80% {
|
||||
transform: rotate(10deg);
|
||||
}
|
||||
|
||||
100% {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 500px) {
|
||||
.github-corner:hover .octo-arm {
|
||||
animation: none;
|
||||
}
|
||||
|
||||
.github-corner .octo-arm {
|
||||
animation: octocat-wave 560ms ease-in-out;
|
||||
}
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body role="document">
|
||||
<div class="container">
|
||||
@ -15,7 +88,59 @@
|
||||
</div>
|
||||
-->
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="col-md-3">
|
||||
<span class="badge badge-dark" style="margin-bottom: 15px">JsonPath</span> <span>(click to try)</span>
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="path"><a href="#/$.store.book[*].author">$.store.book[*].author</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#$..author">$..author</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$.store.*</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$.store..price</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..book[2]</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..book[-2]</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..book[0,1]</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..book[:2]</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..book[1:2]</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..book[-2:]</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..book[2:]</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..book[?(@.isbn)]</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$.store.book[?(@.price < 10)]</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..*</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="path"><a href="#">$..book[ ?( (@.price < 13 || $.store.bicycle.price < @.price) && @.price <=10 ) ]</a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<span class="badge badge-dark" style="margin-bottom: 15px">Evaluator</span>
|
||||
<div class="form-group">
|
||||
<textarea id="json-example" class="form-control" style="min-width: 100%" rows="20"></textarea>
|
||||
@ -29,12 +154,21 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<div class="col-md-4">
|
||||
<span class="badge badge-dark" style="margin-bottom: 15px">Result</span>
|
||||
<pre class="prettyprint result" id="read-result" style="background-color: transparent; border: none;"></pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<a href="https://github.com/freestrings/jsonpath" class="github-corner" aria-label="View source on GitHub">
|
||||
<svg width="80" height="80" viewBox="0 0 250 250" style="position: absolute; top: 0px; right: 0px; border: 0px;" aria-hidden="true">
|
||||
<path d="M0,0 L115,115 L130,115 L142,142 L250,250 L250,0 Z" fill="#151513"></path>
|
||||
<path class="octo-arm" d="M128.3,109.0 C113.8,99.7 119.0,89.6 119.0,89.6 C122.0,82.7 120.5,78.6 120.5,78.6 C119.2,72.0 123.4,76.3 123.4,76.3 C127.3,80.9 125.5,87.3 125.5,87.3 C122.9,97.6 130.6,101.9 134.4,103.2" fill="#ffffff" style="transform-origin: 130px 106px;"></path>
|
||||
<path class="octo-body"
|
||||
d="M115.0,115.0 C114.9,115.1 118.7,116.5 119.8,115.4 L133.7,101.6 C136.9,99.2 139.9,98.4 142.2,98.6 C133.8,88.0 127.5,74.4 143.8,58.0 C148.5,53.4 154.0,51.2 159.7,51.0 C160.3,49.4 163.2,43.6 171.4,40.1 C171.4,40.1 176.1,42.5 178.8,56.2 C183.1,58.6 187.2,61.8 190.9,65.4 C194.5,69.0 197.7,73.2 200.1,77.6 C213.8,80.2 216.3,84.9 216.3,84.9 C212.7,93.1 206.9,96.0 205.4,96.6 C205.1,102.4 203.0,107.8 198.3,112.5 C181.9,128.9 168.3,122.5 157.7,114.1 C157.9,116.9 156.7,120.9 152.7,124.9 L141.0,136.5 C139.8,137.7 141.6,141.9 141.8,141.8 Z"
|
||||
fill="#ffffff"></path>
|
||||
</svg>
|
||||
</a>
|
||||
<script src="./bootstrap.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
5
lua/.gitignore
vendored
Normal file
5
lua/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
.idea/*
|
||||
.vscode
|
||||
/target/
|
||||
Cargo.lock
|
||||
docker_example/ab_results/**
|
14
lua/Cargo.toml
Normal file
14
lua/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "jsonpath_lua"
|
||||
version = "0.1.0"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
license = "MIT"
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
jsonpath_lib = { path = "../" }
|
||||
|
||||
[[bin]]
|
||||
name = "bench"
|
||||
path = "bench_lua_vs_rust/example.rs"
|
||||
|
22
lua/bench_lua_vs_rust/example.lua
Normal file
22
lua/bench_lua_vs_rust/example.lua
Normal file
@ -0,0 +1,22 @@
|
||||
local jsonpath = require("jsonpath")
|
||||
|
||||
local iter;
|
||||
if arg[1] == nil or arg[1] == '' then
|
||||
iter = 5000;
|
||||
else
|
||||
iter = tonumber(arg[1]);
|
||||
end
|
||||
|
||||
print(string.format("%s - %u", "lua iter", iter));
|
||||
|
||||
local file = io.open("../../benchmark/example.json", "r");
|
||||
io.input(file)
|
||||
local data = io.read("*a");
|
||||
io.close(file);
|
||||
|
||||
jsonpath.init('../target/release/deps/libjsonpath_lib.so')
|
||||
local template = jsonpath.compile("$..book[?(@.price<30 && @.category==\"fiction\")]");
|
||||
for i = 0, iter do
|
||||
local r = template(data);
|
||||
-- print(r);
|
||||
end
|
46
lua/bench_lua_vs_rust/example.rs
Normal file
46
lua/bench_lua_vs_rust/example.rs
Normal file
@ -0,0 +1,46 @@
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
fn read_json(path: &str) -> String {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
contents
|
||||
}
|
||||
|
||||
fn get_string() -> String {
|
||||
read_json("../../benchmark/example.json")
|
||||
}
|
||||
|
||||
fn get_json() -> Value {
|
||||
let string = get_string();
|
||||
serde_json::from_str(string.as_str()).unwrap()
|
||||
}
|
||||
|
||||
fn get_path() -> &'static str {
|
||||
r#"$..book[?(@.price<30 && @.category=="fiction")]"#
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
let iter = if args.len() < 2 { 5000_usize } else { args[1].as_str().parse::<usize>().unwrap() };
|
||||
|
||||
println!("rust iter - {}", iter);
|
||||
|
||||
let json = get_json();
|
||||
for _ in 0..iter {
|
||||
let mut selector = jsonpath::Selector::default();
|
||||
let _ = selector.str_path(get_path());
|
||||
selector.value(&json);
|
||||
let r = selector.select();
|
||||
if r.is_err() {
|
||||
panic!();
|
||||
}
|
||||
// println!("{:?}", serde_json::to_string(&r.expect("")).unwrap());
|
||||
}
|
||||
}
|
27
lua/bench_lua_vs_rust/run.sh
Executable file
27
lua/bench_lua_vs_rust/run.sh
Executable file
@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
|
||||
# cd lua/bench_lua_vs_rust && ./run.sh
|
||||
|
||||
set -e
|
||||
|
||||
# http://luajit.org/index.html
|
||||
|
||||
# cargo clean && \
|
||||
cargo build --release
|
||||
|
||||
export JSONPATH_LIB_PATH="${PWD}/../target/release/deps"
|
||||
export LUA_PATH="${PWD}/../?.lua;"
|
||||
|
||||
echo
|
||||
time cargo run --release --bin bench -- 1000
|
||||
echo
|
||||
time luajit example.lua 1000
|
||||
echo
|
||||
time cargo run --release --bin bench -- 5000
|
||||
echo
|
||||
time luajit example.lua 5000
|
||||
echo
|
||||
time cargo run --release --bin bench -- 10000
|
||||
echo
|
||||
time luajit example.lua 10000
|
||||
|
107
lua/docker_example/default.conf
Normal file
107
lua/docker_example/default.conf
Normal file
@ -0,0 +1,107 @@
|
||||
lua_package_path '/etc/jsonpath/?.lua;;';
|
||||
|
||||
access_log /var/log/access.log;
|
||||
error_log /var/log/error.log info;
|
||||
|
||||
lua_shared_dict jsonpaths 1m;
|
||||
|
||||
init_by_lua_block {
|
||||
local pathStrings = {
|
||||
"$.store.book[*].author",
|
||||
"$..author",
|
||||
"$.store.*",
|
||||
"$.store..price",
|
||||
"$..book[2]",
|
||||
"$..book[-2]",
|
||||
"$..book[0,1]",
|
||||
"$..book[:2]",
|
||||
"$..book[1:2]",
|
||||
"$..book[-2:]",
|
||||
"$..book[2:]",
|
||||
"$..book[?(@.isbn)]",
|
||||
"$.store.book[?(@.price == 10)]",
|
||||
"$..*",
|
||||
"$..book[ ?( (@.price < 13 || $.store.bicycle.price < @.price) && @.price <=10 ) ]",
|
||||
"$.store.book[?( (@.price < 10 || @.price > 10) && @.price > 10 )]",
|
||||
"$..[?(@.originPrice > 1)]",
|
||||
"$.pickBanner[?(@.originPrice > 1)]"
|
||||
}
|
||||
|
||||
local jp = require("jsonpath")
|
||||
jp.init("/etc/jsonpath/libjsonpath_lib.so")
|
||||
local jsonpaths = ngx.shared.jsonpaths
|
||||
|
||||
for i, path in ipairs(pathStrings) do
|
||||
jsonpaths:set(i, path)
|
||||
jp.compile(path)
|
||||
end
|
||||
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
gzip on;
|
||||
gzip_types text/plain application/json;
|
||||
#gzip_comp_level 6;
|
||||
#gzip_vary on;
|
||||
|
||||
location / {
|
||||
add_header 'Cache-Control' 'no-store, no-cache, must-revalidate, proxy-revalidate, max-age=0';
|
||||
expires off;
|
||||
|
||||
default_type 'text/plain';
|
||||
root /etc/jsonpath/example;
|
||||
}
|
||||
|
||||
location /filter {
|
||||
# https://developer.mozilla.org/ko/docs/Web/HTTP/Headers/Accept-Encoding
|
||||
proxy_set_header Accept-Encoding "*";
|
||||
|
||||
default_type 'text/plain';
|
||||
|
||||
rewrite /filter/(.*) /$1 break;
|
||||
proxy_pass http://localhost;
|
||||
|
||||
header_filter_by_lua_block {
|
||||
ngx.header["content-length"] = nil
|
||||
|
||||
local args = ngx.req.get_uri_args()
|
||||
local jsonpaths = ngx.shared.jsonpaths
|
||||
local path = jsonpaths:get(args['path'])
|
||||
|
||||
if path == nil then
|
||||
ngx.exit(ngx.HTTP_BAD_REQUEST)
|
||||
end
|
||||
}
|
||||
|
||||
body_filter_by_lua_block {
|
||||
local chunk, eof = ngx.arg[1], ngx.arg[2]
|
||||
local buf = ngx.ctx.buf
|
||||
|
||||
if eof then
|
||||
if buf then
|
||||
local args = ngx.req.get_uri_args()
|
||||
local path = ngx.shared.jsonpaths:get(args['path'])
|
||||
local jsonpath = require("jsonpath")
|
||||
local template = jsonpath.exec(path)
|
||||
local json = buf .. chunk
|
||||
local result = template(json)
|
||||
ngx.arg[1] = result
|
||||
return
|
||||
end
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
if buf then
|
||||
ngx.ctx.buf = buf .. chunk
|
||||
else
|
||||
ngx.ctx.buf = chunk
|
||||
end
|
||||
|
||||
ngx.arg[1] = nil
|
||||
}
|
||||
}
|
||||
}
|
3
lua/docker_example/init.lua
Normal file
3
lua/docker_example/init.lua
Normal file
@ -0,0 +1,3 @@
|
||||
local jsonpath = require("jsonpath")
|
||||
jsonpath.init("/etc/jsonpath/libjsonpath_lib.so")
|
||||
ngx.log(ngx.INFO, "loaded libjsonpath_lib.so")
|
25
lua/docker_example/run.sh
Executable file
25
lua/docker_example/run.sh
Executable file
@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# cd lua && cargo build --release && cd docker_example && ./run.sh
|
||||
|
||||
set -v
|
||||
|
||||
[ "$(docker ps -a | grep jsonpath)" ] && docker kill jsonpath
|
||||
|
||||
docker run -d --rm --name jsonpath \
|
||||
-v "${PWD}/../../benchmark/example.json":/etc/jsonpath/example/example.json:ro \
|
||||
-v "${PWD}/../../benchmark/big_example.json":/etc/jsonpath/example/big_example.json:ro \
|
||||
-v "${PWD}/../jsonpath.lua":/etc/jsonpath/jsonpath.lua:ro \
|
||||
-v "${PWD}/init.lua":/etc/jsonpath/init.lua:ro \
|
||||
-v "${PWD}/../target/release/deps/libjsonpath_lib.so":/etc/jsonpath/libjsonpath_lib.so:ro \
|
||||
-v "${PWD}/default.conf":/etc/nginx/conf.d/default.conf \
|
||||
-p 8080:80 \
|
||||
openresty/openresty:bionic
|
||||
|
||||
#for i in {1..16}; do
|
||||
# curl http://localhost:8080/filter/example.json?path=${i}
|
||||
# echo
|
||||
#done
|
||||
|
||||
#ab -n 1000 -c 10 http://localhost:8080/filter/big_example.json?path=17
|
||||
#ab -n 1000 -c 10 http://localhost:8080/filter/big_example.json?path=18
|
60
lua/jsonpath.lua
Normal file
60
lua/jsonpath.lua
Normal file
@ -0,0 +1,60 @@
|
||||
local ffi = require('ffi')
|
||||
|
||||
ffi.cdef [[
|
||||
const char* ffi_select(const char *json_str, const char *path);
|
||||
void *ffi_path_compile(const char *path);
|
||||
const char* ffi_select_with_compiled_path(void *ptr, const char *json_str);
|
||||
]]
|
||||
|
||||
local jsonpath
|
||||
local cache = {}
|
||||
local module = {}
|
||||
|
||||
local function existsVaiable(var)
|
||||
for k, _ in pairs(_G) do
|
||||
if k == var then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local _ngx
|
||||
if existsVaiable('ngx') then
|
||||
_ngx = ngx
|
||||
else
|
||||
_ngx = {}
|
||||
_ngx.log = function(level, msg)
|
||||
print('['..level..'] ' .. msg)
|
||||
end
|
||||
end
|
||||
|
||||
function module.compile(path)
|
||||
assert(jsonpath, '"libjsonpath_lib" is not loaded')
|
||||
|
||||
if(cache[path] == nil) then
|
||||
cache[path] = jsonpath.ffi_path_compile(path)
|
||||
_ngx.log(_ngx.INFO, 'compile : [' .. path .. ']')
|
||||
end
|
||||
end
|
||||
|
||||
function module.exec(path)
|
||||
local compiledPath = cache[path]
|
||||
|
||||
if(cache[path] == nil) then
|
||||
assert(jsonpath, path .. ": is not compiled")
|
||||
end
|
||||
|
||||
return function(jsonStr)
|
||||
local result = jsonpath.ffi_select_with_compiled_path(compiledPath, jsonStr)
|
||||
return ffi.string(result);
|
||||
end
|
||||
end
|
||||
|
||||
function module.init(path)
|
||||
if jsonpath == nil then
|
||||
jsonpath = ffi.load(path)
|
||||
_ngx.log(_ngx.INFO, '"' .. path .. '" initialized')
|
||||
end
|
||||
end
|
||||
|
||||
return module
|
7
nodejs/.gitignore
vendored
7
nodejs/.gitignore
vendored
@ -1,7 +0,0 @@
|
||||
native/target
|
||||
native/index.node
|
||||
native/artifacts.json
|
||||
**/*~
|
||||
**/node_modules
|
||||
.idea
|
||||
build
|
154
nodejs/README.md
154
nodejs/README.md
@ -1,154 +0,0 @@
|
||||
# jsonpath-rs
|
||||
|
||||
[](https://travis-ci.org/freestrings/jsonpath)
|
||||
|
||||
It is [JsonPath](https://goessner.net/articles/JsonPath/) implementation. The core implementation is written in Rust.
|
||||
|
||||
## Notice
|
||||
|
||||
Pre-built 바이너리는 제공하진 않고 소스를 컴파일해서 설치한다. 만약 Rust가 설치되지 않았다면 자동으로 설치된다.
|
||||
|
||||
Build from source instead of using pre-built binary, and if Rust is not installed, the latest version is automatically installed.
|
||||
|
||||
> Not yet tested in Windows
|
||||
|
||||
## 목차
|
||||
|
||||
* [jsonpath.select(json: string|object, jsonpath: string)](#json-stringobject-jsonpath-string)
|
||||
* [jsonpath.compile(jsonpath: string)](#compilejsonpath-string)
|
||||
* [jsonpath.selector(json: string|object)](#selectorjson-stringobject)
|
||||
* [Simple time check](https://github.com/freestrings/jsonpath/wiki/Simple-timecheck-jsonpath-native)
|
||||
* [Other Examples](https://github.com/freestrings/jsonpath/wiki/Javascript-examples)
|
||||
|
||||
### jsonpath.select(json: string|object, jsonpath: string)
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
|
||||
let selectAsString = jsonpath.select(JSON.stringify(jsonObj), '$..friends[0]');
|
||||
let selectAsObj = jsonpath.select(jsonObj, '$..friends[0]');
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret) == JSON.stringify(selectAsString),
|
||||
JSON.stringify(ret) == JSON.stringify(selectAsObj)
|
||||
);
|
||||
|
||||
// => true, true
|
||||
```
|
||||
|
||||
### jsonpath.compile(jsonpath: string)
|
||||
|
||||
```javascript
|
||||
let template = jsonpath.compile('$..friends[0]');
|
||||
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let selectAsString = template(JSON.stringify(jsonObj));
|
||||
let selectAsObj = template(jsonObj);
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret) == JSON.stringify(selectAsString),
|
||||
JSON.stringify(ret) == JSON.stringify(selectAsObj)
|
||||
);
|
||||
|
||||
// => true, true
|
||||
|
||||
let jsonObj2 = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "Millicent Norman"},
|
||||
{"name": "Vincent Cannon"}
|
||||
]
|
||||
},
|
||||
"friends": [ {"age": 30}, {"age": 40} ]
|
||||
};
|
||||
|
||||
let ret2 = [
|
||||
{"age": 30},
|
||||
{"name": "Millicent Norman"}
|
||||
];
|
||||
|
||||
let selectAsString2 = template(JSON.stringify(jsonObj2));
|
||||
let selectAsObj2 = template(jsonObj2);
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret2) == JSON.stringify(selectAsString2),
|
||||
JSON.stringify(ret2) == JSON.stringify(selectAsObj2)
|
||||
);
|
||||
|
||||
// => true, true
|
||||
```
|
||||
|
||||
### jsonpath.selector(json: string|object)
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret1 = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let ret2 = [
|
||||
{"name": "친구4"},
|
||||
{"name": "친구2", "age": 20}
|
||||
];
|
||||
|
||||
let selector = jsonpath.selector(jsonObj);
|
||||
// or as json string
|
||||
// let selector = jsonpath.selector(JSON.stringify(jsonObj));
|
||||
|
||||
let select1 = selector('$..friends[0]');
|
||||
let select2 = selector('$..friends[1]');
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret1) == JSON.stringify(select1),
|
||||
JSON.stringify(ret2) == JSON.stringify(select2)
|
||||
);
|
||||
|
||||
// => true, true
|
||||
```
|
@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
if ! [ -x "$(command -v rustc)" ]; then
|
||||
echo "install rust"
|
||||
curl https://sh.rustup.rs -sSf > /tmp/rustup.sh
|
||||
sh /tmp/rustup.sh -y
|
||||
export PATH="$HOME/.cargo/bin:$PATH"
|
||||
source "$HOME/.cargo/env"
|
||||
fi
|
||||
|
||||
neon build --release
|
@ -1,34 +0,0 @@
|
||||
const { Compile, Selector, selectStr } = require('../native');
|
||||
|
||||
function compile(path) {
|
||||
let compile = new Compile(path);
|
||||
return (json) => {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
return JSON.parse(compile.template(json));
|
||||
};
|
||||
}
|
||||
|
||||
function selector(json) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
let selector = new Selector(json);
|
||||
return (path) => {
|
||||
return JSON.parse(selector.selector(path));
|
||||
}
|
||||
}
|
||||
|
||||
function select(json, path) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
return JSON.parse(selectStr(json, path));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
compile,
|
||||
selector,
|
||||
select
|
||||
};
|
5
nodejs/native/.gitignore
vendored
5
nodejs/native/.gitignore
vendored
@ -1,5 +0,0 @@
|
||||
.idea/*
|
||||
.vscode
|
||||
!.idea/runConfigurations/
|
||||
/target/
|
||||
Cargo.lock
|
@ -1,24 +0,0 @@
|
||||
[package]
|
||||
name = "jsonpath4nodejs"
|
||||
version = "0.1.1"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
description = "jsonpath_lib bindings for nodejs"
|
||||
keywords = ["library", "jsonpath", "json", "nodejs"]
|
||||
repository = "https://github.com/freestrings/jsonpath"
|
||||
license = "MIT"
|
||||
|
||||
build = "build.rs"
|
||||
exclude = ["artifacts.json", "index.node"]
|
||||
|
||||
[build-dependencies]
|
||||
neon-build = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
jsonpath_lib = "0.1.8"
|
||||
neon = "0.2.0"
|
||||
neon-serde = "0.1.1"
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
|
||||
[lib]
|
||||
name = "jsonpath4nodejs"
|
||||
crate-type = ["dylib"]
|
@ -1,7 +0,0 @@
|
||||
extern crate neon_build;
|
||||
|
||||
fn main() {
|
||||
neon_build::setup(); // must be called in build.rs
|
||||
|
||||
// add project-specific build logic here...
|
||||
}
|
@ -1,127 +0,0 @@
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
#[macro_use]
|
||||
extern crate neon;
|
||||
extern crate neon_serde;
|
||||
extern crate serde_json;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
use jsonpath::filter::value_filter::JsonValueFilter;
|
||||
use jsonpath::parser::parser::{Node, NodeVisitor, Parser};
|
||||
use jsonpath::ref_value::model::{RefValue, RefValueWrapper};
|
||||
use neon::prelude::*;
|
||||
use serde_json::Value;
|
||||
|
||||
///
|
||||
/// `neon_serde::from_value` has very poor performance.
|
||||
///
|
||||
fn select(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsValue>(0)?;
|
||||
let json: Value = neon_serde::from_value(&mut ctx, json_val)?;
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
|
||||
match jsonpath::select(&json, path.as_str()) {
|
||||
Ok(value) => Ok(neon_serde::to_value(&mut ctx, &value)?),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
}
|
||||
}
|
||||
|
||||
fn select_str(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsString>(0)?.value();
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
match jsonpath::select_as_str(&json_val, path.as_str()) {
|
||||
Ok(value) => Ok(JsString::new(&mut ctx, &value).upcast()),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Compile {
|
||||
node: Node
|
||||
}
|
||||
|
||||
pub struct Selector {
|
||||
json: RefValueWrapper
|
||||
}
|
||||
|
||||
declare_types! {
|
||||
pub class JsCompile for Compile {
|
||||
init(mut ctx) {
|
||||
let path = ctx.argument::<JsString>(0)?.value();
|
||||
let mut parser = Parser::new(path.as_str());
|
||||
|
||||
let node = match parser.compile() {
|
||||
Ok(node) => node,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
|
||||
Ok(Compile { node })
|
||||
}
|
||||
|
||||
method template(mut ctx) {
|
||||
let this = ctx.this();
|
||||
|
||||
let node = {
|
||||
let guard = ctx.lock();
|
||||
let this = this.borrow(&guard);
|
||||
this.node.clone()
|
||||
};
|
||||
|
||||
let json_str = ctx.argument::<JsString>(0)?.value();
|
||||
let ref_value: RefValue = match serde_json::from_str(&json_str) {
|
||||
Ok(ref_value) => ref_value,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
|
||||
let mut jf = JsonValueFilter::new_from_value(ref_value.into());
|
||||
jf.visit(node);
|
||||
match serde_json::to_string(&jf.take_value().deref()) {
|
||||
Ok(json_str) => Ok(JsString::new(&mut ctx, &json_str).upcast()),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub class JsSelector for Selector {
|
||||
init(mut ctx) {
|
||||
let json_str = ctx.argument::<JsString>(0)?.value();
|
||||
let ref_value: RefValue = match serde_json::from_str(&json_str) {
|
||||
Ok(ref_value) => ref_value,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
|
||||
Ok(Selector { json: ref_value.into() })
|
||||
}
|
||||
|
||||
method selector(mut ctx) {
|
||||
let this = ctx.this();
|
||||
|
||||
let json = {
|
||||
let guard = ctx.lock();
|
||||
let this = this.borrow(&guard);
|
||||
this.json.clone()
|
||||
};
|
||||
|
||||
let path = ctx.argument::<JsString>(0)?.value();
|
||||
let mut parser = Parser::new(path.as_str());
|
||||
|
||||
let node = match parser.compile() {
|
||||
Ok(node) => node,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
|
||||
let mut jf = JsonValueFilter::new_from_value(json);
|
||||
jf.visit(node);
|
||||
match serde_json::to_string(&jf.take_value().deref()) {
|
||||
Ok(json_str) => Ok(JsString::new(&mut ctx, &json_str).upcast()),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
register_module!(mut m, {
|
||||
m.export_class::<JsCompile>("Compile").expect("Compile class error");
|
||||
m.export_class::<JsSelector>("Selector").expect("Selector class error");
|
||||
m.export_function("select", select)?;
|
||||
m.export_function("selectStr", select_str)?;
|
||||
Ok(())
|
||||
});
|
2511
nodejs/package-lock.json
generated
2511
nodejs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,29 +0,0 @@
|
||||
{
|
||||
"name": "jsonpath-rs",
|
||||
"version": "0.1.6",
|
||||
"description": "It is JsonPath implementation. The core implementation is written in Rust",
|
||||
"author": "Changseok Han <freestrings@gmail.com>",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"jsonpath",
|
||||
"rust-binding",
|
||||
"rust",
|
||||
"rustlang",
|
||||
"json"
|
||||
],
|
||||
"main": "lib/index.js",
|
||||
"dependencies": {
|
||||
"neon-cli": "^0.2.0"
|
||||
},
|
||||
"scripts": {
|
||||
"install": "./build.sh",
|
||||
"test": "mocha"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^6.0.2"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/freestrings/jsonpath.git"
|
||||
}
|
||||
}
|
@ -1,99 +0,0 @@
|
||||
const jsonpath = require('../lib/index.js');
|
||||
|
||||
describe('compile test', () => {
|
||||
it('basic', (done) => {
|
||||
let template = jsonpath.compile('$.a');
|
||||
let result = template({'a': 1});
|
||||
if (result === 1) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('selector test', () => {
|
||||
it('basic', (done) => {
|
||||
let selector = jsonpath.selector({'a': 1});
|
||||
let result = selector('$.a');
|
||||
if (result === 1) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('select test', () => {
|
||||
it('basic', (done) => {
|
||||
let result = jsonpath.select({'a': 1}, '$.a');
|
||||
if (result === 1) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('filter test', () => {
|
||||
it('complex filter1', (done) => {
|
||||
let json = {
|
||||
'store': {
|
||||
'book': [
|
||||
{
|
||||
'category': 'reference',
|
||||
'author': 'Nigel Rees',
|
||||
'title': 'Sayings of the Century',
|
||||
'price': 8.95,
|
||||
},
|
||||
{
|
||||
'category': 'fiction',
|
||||
'author': 'Evelyn Waugh',
|
||||
'title': 'Sword of Honour',
|
||||
'price': 12.99,
|
||||
},
|
||||
{
|
||||
'category': 'fiction',
|
||||
'author': 'Herman Melville',
|
||||
'title': 'Moby Dick',
|
||||
'isbn': '0-553-21311-3',
|
||||
'price': 8.99,
|
||||
},
|
||||
{
|
||||
'category': 'fiction',
|
||||
'author': 'J. R. R. Tolkien',
|
||||
'title': 'The Lord of the Rings',
|
||||
'isbn': '0-395-19395-8',
|
||||
'price': 22.99,
|
||||
},
|
||||
],
|
||||
'bicycle': {
|
||||
'color': 'red',
|
||||
'price': 19.95,
|
||||
},
|
||||
},
|
||||
'expensive': 10,
|
||||
};
|
||||
|
||||
let target = [
|
||||
{
|
||||
category: 'fiction',
|
||||
author: 'Evelyn Waugh',
|
||||
title: 'Sword of Honour',
|
||||
price: 12.99,
|
||||
},
|
||||
{
|
||||
category: 'fiction',
|
||||
author: 'J. R. R. Tolkien',
|
||||
title: 'The Lord of the Rings',
|
||||
isbn: '0-395-19395-8',
|
||||
price: 22.99,
|
||||
},
|
||||
{
|
||||
category: 'reference',
|
||||
author: 'Nigel Rees',
|
||||
title: 'Sayings of the Century',
|
||||
price: 8.95,
|
||||
}]
|
||||
;
|
||||
|
||||
let result = jsonpath.select(json, '$..book[?((@.price == 12.99 || $.store.bicycle.price < @.price) || @.category == "reference")]');
|
||||
if (JSON.stringify(result) === JSON.stringify(target)) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
9
profiling.sh
Executable file
9
profiling.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
valgrind \
|
||||
--tool=callgrind \
|
||||
--dump-instr=yes \
|
||||
--collect-jumps=yes \
|
||||
--simulate-cache=yes $1 -- $2
|
59
src/ffi/mod.rs
Normal file
59
src/ffi/mod.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::os::raw::{c_char, c_void};
|
||||
|
||||
use {parser, select, select_as_str};
|
||||
|
||||
const INVALID_PATH: &str = "invalid path";
|
||||
const INVALID_JSON: &str = "invalud json";
|
||||
|
||||
fn to_str(v: *const c_char, err_msg: &str) -> &str {
|
||||
unsafe { CStr::from_ptr(v) }.to_str().expect(err_msg)
|
||||
}
|
||||
|
||||
fn to_char_ptr(v: &str) -> *const c_char {
|
||||
let s = CString::new(v).unwrap_or_else(|_| panic!("invalid string: {}", v));
|
||||
let ptr = s.as_ptr();
|
||||
std::mem::forget(s);
|
||||
ptr
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn ffi_select(json_str: *const c_char, path: *const c_char) -> *const c_char {
|
||||
let json_str = to_str(json_str, INVALID_JSON);
|
||||
let path = to_str(path, INVALID_PATH);
|
||||
match select_as_str(json_str, path) {
|
||||
Ok(v) => to_char_ptr(v.as_str()),
|
||||
Err(e) => {
|
||||
panic!("{:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
#[allow(clippy::forget_copy)]
|
||||
pub extern "C" fn ffi_path_compile(path: *const c_char) -> *mut c_void {
|
||||
let path = to_str(path, INVALID_PATH);
|
||||
let ref_node = Box::into_raw(Box::new(parser::Parser::compile(path).unwrap()));
|
||||
let ptr = ref_node as *mut c_void;
|
||||
std::mem::forget(ref_node);
|
||||
ptr
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn ffi_select_with_compiled_path(
|
||||
path_ptr: *mut c_void,
|
||||
json_ptr: *const c_char,
|
||||
) -> *const c_char {
|
||||
let node = unsafe { Box::from_raw(path_ptr as *mut parser::Node) };
|
||||
let json_str = to_str(json_ptr, INVALID_JSON);
|
||||
let json = serde_json::from_str(json_str)
|
||||
.unwrap_or_else(|_| panic!("invalid json string: {}", json_str));
|
||||
|
||||
let mut selector = select::Selector::default();
|
||||
let found = selector.compiled_path(&node).value(&json).select().unwrap();
|
||||
std::mem::forget(node);
|
||||
|
||||
let result = serde_json::to_string(&found)
|
||||
.unwrap_or_else(|_| panic!("json serialize error: {:?}", found));
|
||||
to_char_ptr(result.as_str())
|
||||
}
|
@ -1,191 +0,0 @@
|
||||
#[derive(Debug)]
|
||||
pub enum CmpType {
|
||||
Eq,
|
||||
Ne,
|
||||
Gt,
|
||||
Ge,
|
||||
Lt,
|
||||
Le,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CmpCondType {
|
||||
And,
|
||||
Or,
|
||||
}
|
||||
|
||||
pub trait PrivCmp {
|
||||
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool;
|
||||
|
||||
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool;
|
||||
|
||||
fn cmp_string(&self, v1: &String, v2: &String) -> bool;
|
||||
}
|
||||
|
||||
pub trait IntoType {
|
||||
fn into_type(&self) -> CmpType;
|
||||
}
|
||||
|
||||
pub struct CmpEq;
|
||||
|
||||
impl PrivCmp for CmpEq {
|
||||
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool {
|
||||
v1 == v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool {
|
||||
v1 == v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &String, v2: &String) -> bool {
|
||||
v1 == v2
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoType for CmpEq {
|
||||
fn into_type(&self) -> CmpType {
|
||||
CmpType::Eq
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CmpNe;
|
||||
|
||||
impl PrivCmp for CmpNe {
|
||||
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool {
|
||||
v1 != v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool {
|
||||
v1 != v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &String, v2: &String) -> bool {
|
||||
v1 != v2
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoType for CmpNe {
|
||||
fn into_type(&self) -> CmpType {
|
||||
CmpType::Ne
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CmpGt;
|
||||
|
||||
impl PrivCmp for CmpGt {
|
||||
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool {
|
||||
v1 > v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool {
|
||||
v1 > v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &String, v2: &String) -> bool {
|
||||
v1 > v2
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoType for CmpGt {
|
||||
fn into_type(&self) -> CmpType {
|
||||
CmpType::Gt
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CmpGe;
|
||||
|
||||
impl PrivCmp for CmpGe {
|
||||
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool {
|
||||
v1 >= v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool {
|
||||
v1 >= v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &String, v2: &String) -> bool {
|
||||
v1 >= v2
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoType for CmpGe {
|
||||
fn into_type(&self) -> CmpType {
|
||||
CmpType::Ge
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CmpLt;
|
||||
|
||||
impl PrivCmp for CmpLt {
|
||||
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool {
|
||||
v1 < v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool {
|
||||
v1 < v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &String, v2: &String) -> bool {
|
||||
v1 < v2
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoType for CmpLt {
|
||||
fn into_type(&self) -> CmpType {
|
||||
CmpType::Lt
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CmpLe;
|
||||
|
||||
impl PrivCmp for CmpLe {
|
||||
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool {
|
||||
v1 <= v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool {
|
||||
v1 <= v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &String, v2: &String) -> bool {
|
||||
v1 <= v2
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoType for CmpLe {
|
||||
fn into_type(&self) -> CmpType {
|
||||
CmpType::Le
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CmpAnd;
|
||||
|
||||
impl PrivCmp for CmpAnd {
|
||||
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool {
|
||||
*v1 && *v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool {
|
||||
v1 > &0_f64 && v2 > &0_f64
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &String, v2: &String) -> bool {
|
||||
!v1.is_empty() && !v2.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CmpOr;
|
||||
|
||||
impl PrivCmp for CmpOr {
|
||||
fn cmp_bool(&self, v1: &bool, v2: &bool) -> bool {
|
||||
*v1 || *v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: &f64, v2: &f64) -> bool {
|
||||
v1 > &0_f64 || v2 > &0_f64
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &String, v2: &String) -> bool {
|
||||
!v1.is_empty() || !v2.is_empty()
|
||||
}
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
mod cmp;
|
||||
mod term;
|
||||
pub mod value_filter;
|
||||
pub mod value_wrapper;
|
@ -1,154 +0,0 @@
|
||||
use super::cmp::*;
|
||||
use super::value_filter::ValueFilterKey;
|
||||
use super::value_wrapper::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TermContext {
|
||||
Constants(ExprTerm),
|
||||
Json(Option<ValueFilterKey>, ValueWrapper),
|
||||
}
|
||||
|
||||
impl TermContext {
|
||||
fn cmp<F: PrivCmp + IntoType>(&self, other: &TermContext, cmp_fn: F, default: bool) -> TermContext {
|
||||
match self {
|
||||
TermContext::Constants(et) => {
|
||||
match other {
|
||||
TermContext::Constants(oet) => {
|
||||
trace!("const-const");
|
||||
TermContext::Constants(ExprTerm::Bool(et.cmp(oet, cmp_fn, default)))
|
||||
}
|
||||
TermContext::Json(key, v) => {
|
||||
trace!("const-json");
|
||||
TermContext::Json(None, v.take_with(key, et, cmp_fn, true))
|
||||
}
|
||||
}
|
||||
}
|
||||
TermContext::Json(key, v) => {
|
||||
match other {
|
||||
TermContext::Json(key_other, ov) => {
|
||||
trace!("json-json");
|
||||
|
||||
fn is_json(t: &TermContext) -> bool {
|
||||
match t {
|
||||
TermContext::Json(_, _) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
let mut c = v.into_term(key);
|
||||
let mut oc = ov.into_term(key_other);
|
||||
if is_json(&c) && is_json(&oc) {
|
||||
v.cmp(&ov, cmp_fn.into_type())
|
||||
} else {
|
||||
c.cmp(&mut oc, cmp_fn, default)
|
||||
}
|
||||
}
|
||||
TermContext::Constants(et) => {
|
||||
trace!("json-const");
|
||||
TermContext::Json(None, v.take_with(key, et, cmp_fn, false))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cmp_cond(&self, other: &TermContext, cmp_cond_type: CmpCondType) -> TermContext {
|
||||
match self {
|
||||
TermContext::Constants(et) => {
|
||||
match other {
|
||||
TermContext::Constants(oet) => {
|
||||
match cmp_cond_type {
|
||||
CmpCondType::Or => {
|
||||
TermContext::Constants(ExprTerm::Bool(et.cmp(oet, CmpOr, false)))
|
||||
}
|
||||
CmpCondType::And => {
|
||||
TermContext::Constants(ExprTerm::Bool(et.cmp(oet, CmpAnd, false)))
|
||||
}
|
||||
}
|
||||
}
|
||||
TermContext::Json(_, v) => {
|
||||
TermContext::Json(None, ValueWrapper::new(v.get_val().clone(), false))
|
||||
}
|
||||
}
|
||||
}
|
||||
TermContext::Json(_, v) => {
|
||||
match other {
|
||||
TermContext::Json(_, ov) => {
|
||||
match cmp_cond_type {
|
||||
CmpCondType::Or => TermContext::Json(None, v.union(ov)),
|
||||
CmpCondType::And => TermContext::Json(None, v.intersect(ov)),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
TermContext::Json(None, ValueWrapper::new(v.get_val().clone(), false))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eq(&self, other: &TermContext) -> TermContext {
|
||||
trace!("eq");
|
||||
self.cmp(other, CmpEq, false)
|
||||
}
|
||||
|
||||
pub fn ne(&self, other: &TermContext) -> TermContext {
|
||||
trace!("ne");
|
||||
self.cmp(other, CmpNe, true)
|
||||
}
|
||||
|
||||
pub fn gt(&self, other: &TermContext) -> TermContext {
|
||||
trace!("gt");
|
||||
self.cmp(other, CmpGt, false)
|
||||
}
|
||||
|
||||
pub fn ge(&self, other: &TermContext) -> TermContext {
|
||||
trace!("ge");
|
||||
self.cmp(other, CmpGe, false)
|
||||
}
|
||||
|
||||
pub fn lt(&self, other: &TermContext) -> TermContext {
|
||||
trace!("lt");
|
||||
self.cmp(other, CmpLt, false)
|
||||
}
|
||||
|
||||
pub fn le(&self, other: &TermContext) -> TermContext {
|
||||
trace!("le");
|
||||
self.cmp(other, CmpLe, false)
|
||||
}
|
||||
|
||||
pub fn and(&self, other: &TermContext) -> TermContext {
|
||||
self.cmp_cond(other, CmpCondType::And)
|
||||
}
|
||||
|
||||
pub fn or(&self, other: &TermContext) -> TermContext {
|
||||
self.cmp_cond(other, CmpCondType::Or)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ExprTerm {
|
||||
String(String),
|
||||
Number(f64),
|
||||
Bool(bool),
|
||||
}
|
||||
|
||||
impl ExprTerm {
|
||||
fn cmp<F: PrivCmp>(&self, other: &ExprTerm, cmp_fn: F, default: bool) -> bool {
|
||||
match self {
|
||||
ExprTerm::Bool(v1) => match other {
|
||||
ExprTerm::Bool(v2) => cmp_fn.cmp_bool(v1, v2),
|
||||
_ => default
|
||||
}
|
||||
ExprTerm::Number(v1) => match other {
|
||||
ExprTerm::Number(v2) => cmp_fn.cmp_f64(v1, v2),
|
||||
_ => default
|
||||
}
|
||||
ExprTerm::String(v1) => match other {
|
||||
ExprTerm::String(v2) => cmp_fn.cmp_string(v1, v2),
|
||||
_ => default
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,624 +0,0 @@
|
||||
use std::error::Error;
|
||||
use std::ops::Deref;
|
||||
use std::result::Result;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use filter::term::*;
|
||||
use filter::value_wrapper::*;
|
||||
use ref_value::model::*;
|
||||
use parser::parser::{ParseToken, FilterToken, NodeVisitor};
|
||||
|
||||
trait ArrayIndex {
|
||||
fn index(&self, v: &RefValueWrapper) -> usize;
|
||||
|
||||
fn take_value(&self, v: &RefValueWrapper) -> RefValueWrapper {
|
||||
let idx = self.index(v);
|
||||
match v.get(idx) {
|
||||
Some(v) => v.clone(),
|
||||
_ => RefValue::Null.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ArrayIndex for f64 {
|
||||
fn index(&self, v: &RefValueWrapper) -> usize {
|
||||
if v.is_array() && self < &0_f64 {
|
||||
(v.as_array().unwrap().len() as f64 + self) as usize
|
||||
} else {
|
||||
*self as usize
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ArrayIndex for isize {
|
||||
fn index(&self, v: &RefValueWrapper) -> usize {
|
||||
if v.is_array() && self < &0_isize {
|
||||
(v.as_array().unwrap().len() as isize + self) as usize
|
||||
} else {
|
||||
*self as usize
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ArrayIndex for usize {
|
||||
fn index(&self, _: &RefValueWrapper) -> usize {
|
||||
*self as usize
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ValueFilterKey {
|
||||
Num(usize),
|
||||
String(String),
|
||||
All,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ValueFilter {
|
||||
vw: ValueWrapper,
|
||||
last_key: Option<ValueFilterKey>,
|
||||
filter_mode: bool,
|
||||
}
|
||||
|
||||
impl ValueFilter {
|
||||
pub fn new(v: RefValueWrapper, is_leaves: bool, filter_mode: bool) -> Self {
|
||||
ValueFilter { vw: ValueWrapper::new(v, is_leaves), last_key: None, filter_mode }
|
||||
}
|
||||
|
||||
fn iter_to_value_vec<'a, I: Iterator<Item=&'a RefValueWrapper>>(iter: I) -> Vec<RefValueWrapper> {
|
||||
iter
|
||||
.map(|v| v.clone())
|
||||
.filter(|v| !v.is_null())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get_nested_array<F: ArrayIndex>(v: &RefValueWrapper, key: F, filter_mode: bool) -> RefValueWrapper {
|
||||
if v.is_array() && v.as_array().unwrap().get(key.index(v)).is_some() {
|
||||
if filter_mode {
|
||||
v.clone()
|
||||
} else {
|
||||
let idx = key.index(v);
|
||||
v.get(idx).unwrap().clone()
|
||||
}
|
||||
} else {
|
||||
key.take_value(v)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_nested_object(v: &RefValueWrapper, key: &String, filter_mode: bool) -> RefValueWrapper {
|
||||
if v.is_object() && v.as_object().unwrap().contains_key(key) {
|
||||
if filter_mode {
|
||||
v.clone()
|
||||
} else {
|
||||
v.get(key.clone()).unwrap().clone()
|
||||
}
|
||||
} else {
|
||||
RefValue::Null.into()
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_all(key: Option<&String>, v: &RefValueWrapper, buf: &mut Vec<RefValueWrapper>) {
|
||||
match v.deref() {
|
||||
RefValue::Array(vec) => {
|
||||
if key.is_none() {
|
||||
for v in vec {
|
||||
buf.push(v.clone());
|
||||
}
|
||||
}
|
||||
for i in vec {
|
||||
Self::collect_all(key, i, buf);
|
||||
}
|
||||
}
|
||||
RefValue::Object(v) => {
|
||||
for (k, v) in v.into_iter() {
|
||||
if match key {
|
||||
Some(map_key) => map_key == k,
|
||||
_ => true
|
||||
} {
|
||||
buf.push(v.clone());
|
||||
}
|
||||
}
|
||||
for (_, v) in v.into_iter() {
|
||||
Self::collect_all(key, v, buf);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn step_leaves_all(&mut self) -> &ValueWrapper {
|
||||
debug!("step_leaves_all");
|
||||
let mut buf = Vec::new();
|
||||
Self::collect_all(None, &self.vw.get_val(), &mut buf);
|
||||
trace!("step_leaves_all - {:?}", buf);
|
||||
self.last_key = Some(ValueFilterKey::All);
|
||||
self.vw = ValueWrapper::new(RefValue::Array(buf).into(), true);
|
||||
&self.vw
|
||||
}
|
||||
|
||||
pub fn step_leaves_str(&mut self, key: &str) -> &ValueWrapper {
|
||||
self.step_leaves_string(&key.to_string())
|
||||
}
|
||||
|
||||
pub fn step_leaves_string(&mut self, key: &String) -> &ValueWrapper {
|
||||
debug!("step_leaves_string");
|
||||
let mut buf = Vec::new();
|
||||
Self::collect_all(Some(key), &self.vw.get_val(), &mut buf);
|
||||
trace!("step_leaves_string - {:?}", buf);
|
||||
self.last_key = Some(ValueFilterKey::String(key.clone()));
|
||||
self.vw = ValueWrapper::new(RefValue::Array(buf).into(), true);
|
||||
&self.vw
|
||||
}
|
||||
|
||||
pub fn step_in_all(&mut self) -> &ValueWrapper {
|
||||
debug!("step_in_all");
|
||||
|
||||
let vec = match self.vw.get_val().deref() {
|
||||
RefValue::Object(ref map) => {
|
||||
Self::iter_to_value_vec(map.values())
|
||||
}
|
||||
RefValue::Array(ref list) => {
|
||||
Self::iter_to_value_vec(list.iter())
|
||||
}
|
||||
RefValue::Null => Vec::new(),
|
||||
_ => vec![self.vw.get_val().clone()]
|
||||
};
|
||||
|
||||
self.last_key = Some(ValueFilterKey::All);
|
||||
self.vw.replace(RefValue::Array(vec).into());
|
||||
trace!("step_in_all - {:?}", self.vw.get_val());
|
||||
&self.vw
|
||||
}
|
||||
|
||||
pub fn step_in_num(&mut self, key: &f64) -> &ValueWrapper {
|
||||
debug!("step_in_num");
|
||||
trace!("step_in_num - before: leaves {}, filterMode {} - {:?}"
|
||||
, self.vw.is_leaves()
|
||||
, self.filter_mode
|
||||
, self.vw.get_val());
|
||||
|
||||
let v = if self.vw.is_leaves() {
|
||||
let filter_mode = self.filter_mode;
|
||||
match self.vw.get_val().deref() {
|
||||
RefValue::Array(ref vec) => {
|
||||
let mut ret = Vec::new();
|
||||
for v in vec {
|
||||
let wrapper = Self::get_nested_array(v, *key, filter_mode);
|
||||
if !wrapper.is_null() {
|
||||
ret.push(wrapper.clone());
|
||||
}
|
||||
}
|
||||
RefValue::Array(ret).into()
|
||||
}
|
||||
_ => key.take_value(&self.vw.get_val())
|
||||
}
|
||||
} else {
|
||||
key.take_value(&self.vw.get_val())
|
||||
};
|
||||
|
||||
self.last_key = Some(ValueFilterKey::Num(key.index(&v)));
|
||||
self.vw.replace(v);
|
||||
trace!("step_in_num - after: {:?}", self.vw.get_val());
|
||||
&self.vw
|
||||
}
|
||||
|
||||
pub fn step_in_str(&mut self, key: &str) -> &ValueWrapper {
|
||||
self.step_in_string(&key.to_string())
|
||||
}
|
||||
|
||||
pub fn step_in_string(&mut self, key: &String) -> &ValueWrapper {
|
||||
debug!("step_in_string");
|
||||
trace!("step_in_string - before: {},{},{:?}"
|
||||
, self.vw.is_leaves()
|
||||
, self.filter_mode
|
||||
, self.vw.get_val());
|
||||
|
||||
let filter_mode = self.filter_mode;
|
||||
let is_leaves = self.vw.is_leaves();
|
||||
let val = match self.vw.get_val().deref() {
|
||||
RefValue::Array(ref vec) if is_leaves => {
|
||||
let mut buf = Vec::new();
|
||||
for mut v in vec {
|
||||
if v.is_array() {
|
||||
let vec = v.as_array().unwrap();
|
||||
let mut ret = Vec::new();
|
||||
for v in vec {
|
||||
let nested_wrapper = Self::get_nested_object(v, key, filter_mode);
|
||||
if !nested_wrapper.is_null() {
|
||||
ret.push(nested_wrapper.clone());
|
||||
}
|
||||
}
|
||||
buf.append(&mut ret);
|
||||
} else {
|
||||
match v.get(key.clone()) {
|
||||
Some(v) => buf.push(v.clone()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RefValue::Array(buf).into()
|
||||
}
|
||||
RefValue::Array(ref vec) if !is_leaves => {
|
||||
let mut ret = Vec::new();
|
||||
for v in vec {
|
||||
let wrapper = Self::get_nested_object(v, key, filter_mode);
|
||||
if !wrapper.is_null() {
|
||||
ret.push(wrapper.clone());
|
||||
}
|
||||
}
|
||||
RefValue::Array(ret).into()
|
||||
}
|
||||
_ => {
|
||||
match self.vw.get_val().get(key.clone()) {
|
||||
Some(v) => v.clone(),
|
||||
_ => RefValue::Null.into()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
self.last_key = Some(ValueFilterKey::String(key.clone()));
|
||||
self.vw.replace(val);
|
||||
trace!("step_in_string - after: {},{},{:?}"
|
||||
, self.vw.is_leaves()
|
||||
, self.filter_mode
|
||||
, self.vw.get_val());
|
||||
&self.vw
|
||||
}
|
||||
}
|
||||
|
||||
pub struct JsonValueFilter {
|
||||
json: RefValueWrapper,
|
||||
filter_stack: Vec<ValueFilter>,
|
||||
token_stack: Vec<ParseToken>,
|
||||
term_stack: Vec<TermContext>,
|
||||
}
|
||||
|
||||
impl JsonValueFilter {
|
||||
pub fn new(json: &str) -> Result<Self, String> {
|
||||
let json: RefValue = serde_json::from_str(json)
|
||||
.map_err(|e| e.description().to_string())?;
|
||||
Ok(JsonValueFilter::new_from_value(json .into()))
|
||||
}
|
||||
|
||||
pub fn new_from_value(json: RefValueWrapper) -> Self {
|
||||
JsonValueFilter {
|
||||
json,
|
||||
filter_stack: Vec::new(),
|
||||
token_stack: Vec::new(),
|
||||
term_stack: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_peek_token_array(&self) -> bool {
|
||||
if let Some(ParseToken::Array) = self.token_stack.last() {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn push_value_filter(&mut self, from_current: bool) {
|
||||
if from_current {
|
||||
self.filter_stack.last()
|
||||
.map(|vf| {
|
||||
ValueFilter::new(vf.vw.get_val().clone(), vf.vw.is_leaves(), from_current)
|
||||
})
|
||||
.and_then(|vf| {
|
||||
Some(self.filter_stack.push(vf))
|
||||
});
|
||||
} else {
|
||||
self.filter_stack.push({
|
||||
ValueFilter::new(self.json.clone(), false, from_current)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn replace_filter_stack(&mut self, v: RefValueWrapper, is_leaves: bool) {
|
||||
if self.filter_stack.is_empty() {
|
||||
self.filter_stack.push(ValueFilter::new(v, is_leaves, false));
|
||||
} else {
|
||||
match self.filter_stack.last_mut() {
|
||||
Some(vf) => {
|
||||
vf.vw.set_leaves(is_leaves);
|
||||
if v.is_null() {
|
||||
vf.vw.replace(v);
|
||||
} else if v.is_array() && v.as_array().unwrap().is_empty() {
|
||||
vf.vw.replace(RefValue::Null.into());
|
||||
} else if vf.vw.is_array() {
|
||||
vf.vw.replace(v);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_value(&self) -> Value {
|
||||
match self.filter_stack.last() {
|
||||
Some(v) => v.vw.into_value(),
|
||||
_ => Value::Null
|
||||
}
|
||||
}
|
||||
|
||||
pub fn take_value(&mut self) -> RefValueWrapper {
|
||||
match self.filter_stack.last_mut() {
|
||||
Some(v) => v.vw.get_val().clone(),
|
||||
_ => RefValue::Null.into()
|
||||
}
|
||||
}
|
||||
|
||||
fn token_union<F: ArrayIndex>(&mut self, indices: Vec<F>) {
|
||||
self.token_stack.pop();
|
||||
|
||||
match self.filter_stack.last_mut() {
|
||||
Some(ref mut vf) if vf.vw.is_array() && vf.vw.is_leaves() => {
|
||||
let mut ret = Vec::new();
|
||||
if let RefValue::Array(val) = vf.vw.get_val().deref() {
|
||||
for mut v in val {
|
||||
for i in &indices {
|
||||
let v = i.take_value(v);
|
||||
if !v.is_null() {
|
||||
ret.push(v.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
vf.vw.replace(RefValue::Array(ret).into());
|
||||
}
|
||||
Some(ref mut vf) if vf.vw.is_array() && !vf.vw.is_leaves() => {
|
||||
let mut ret = Vec::new();
|
||||
for i in indices {
|
||||
let wrapper = i.take_value(&vf.vw.get_val());
|
||||
if !wrapper.is_null() {
|
||||
ret.push(wrapper.clone());
|
||||
}
|
||||
}
|
||||
vf.vw.replace(RefValue::Array(ret).into());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn token_range(&mut self, from: Option<isize>, to: Option<isize>) {
|
||||
self.token_stack.pop();
|
||||
|
||||
fn _from_to<F: ArrayIndex>(from: Option<F>, to: Option<F>, val: &RefValueWrapper) -> (usize, usize) {
|
||||
let from = match from {
|
||||
Some(v) => v.index(val),
|
||||
_ => 0
|
||||
};
|
||||
let to = match to {
|
||||
Some(v) => v.index(val),
|
||||
_ => {
|
||||
if let RefValue::Array(v) = val.deref() {
|
||||
v.len()
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
};
|
||||
(from, to)
|
||||
}
|
||||
|
||||
fn _range(from: usize, to: usize, v: &RefValueWrapper) -> Vec<RefValueWrapper> {
|
||||
trace!("range - {}:{}", from, to);
|
||||
|
||||
(from..to).into_iter()
|
||||
.map(|i| i.take_value(v))
|
||||
.filter(|v| !v.is_null())
|
||||
.map(|v| v.clone())
|
||||
.collect()
|
||||
}
|
||||
|
||||
match self.filter_stack.last_mut() {
|
||||
Some(ref mut vf) if vf.vw.is_array() && vf.vw.is_leaves() => {
|
||||
let mut buf = Vec::new();
|
||||
if let RefValue::Array(vec) = vf.vw.get_val().deref() {
|
||||
for mut v in vec {
|
||||
let (from, to) = _from_to(from, to, v);
|
||||
let mut v: Vec<RefValueWrapper> = _range(from, to, v);
|
||||
buf.append(&mut v);
|
||||
}
|
||||
}
|
||||
vf.vw.replace(RefValue::Array(buf).into());
|
||||
}
|
||||
Some(ref mut vf) if vf.vw.is_array() && !vf.vw.is_leaves() => {
|
||||
let (from, to) = _from_to(from, to, &vf.vw.get_val());
|
||||
let vec: Vec<RefValueWrapper> = _range(from, to, vf.vw.get_val());
|
||||
vf.vw.replace(RefValue::Array(vec).into());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn token_key(&mut self, key: String) {
|
||||
match self.filter_stack.last_mut() {
|
||||
Some(vf) => {
|
||||
match self.token_stack.pop() {
|
||||
Some(ParseToken::In) | Some(ParseToken::Array) => {
|
||||
vf.step_in_string(&key);
|
||||
}
|
||||
Some(ParseToken::Leaves) => {
|
||||
vf.step_leaves_string(&key);
|
||||
}
|
||||
_ => {
|
||||
self.term_stack.push(TermContext::Constants(ExprTerm::String(key)));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn token_all(&mut self) {
|
||||
match self.filter_stack.last_mut() {
|
||||
Some(vf) => {
|
||||
match self.token_stack.pop() {
|
||||
Some(ParseToken::In) => {
|
||||
vf.step_in_all();
|
||||
}
|
||||
Some(ParseToken::Leaves) => {
|
||||
vf.step_leaves_all();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn token_end_array(&mut self) {
|
||||
trace!("array_eof - term_stack: {:?}", self.term_stack);
|
||||
trace!("array_eof - filter_stack: {:?}", self.filter_stack);
|
||||
|
||||
match self.term_stack.pop() {
|
||||
Some(TermContext::Constants(ExprTerm::Number(v))) => {
|
||||
match self.filter_stack.last_mut() {
|
||||
Some(vf) => {
|
||||
vf.step_in_num(&v);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Some(TermContext::Constants(ExprTerm::Bool(false))) => {
|
||||
self.replace_filter_stack(RefValue::Null.into(), false);
|
||||
}
|
||||
Some(TermContext::Json(_, vw)) => {
|
||||
self.replace_filter_stack(vw.get_val().clone(), vw.is_leaves());
|
||||
}
|
||||
_ => {
|
||||
match self.filter_stack.pop() {
|
||||
Some(mut vf) => {
|
||||
let is_leaves = vf.vw.is_leaves();
|
||||
match vf.vw.get_val().deref() {
|
||||
RefValue::Null | RefValue::Bool(false) => {
|
||||
self.replace_filter_stack(RefValue::Null.into(), is_leaves);
|
||||
}
|
||||
_ => {
|
||||
self.replace_filter_stack(vf.vw.get_val().clone(), is_leaves);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn token_op(&mut self, ft: &FilterToken) {
|
||||
let right = self.term_stack.pop();
|
||||
let left = self.term_stack.pop();
|
||||
|
||||
trace!("left {:?}", left);
|
||||
trace!("right {:?}", right);
|
||||
|
||||
if left.is_some() && right.is_some() {
|
||||
let left = left.unwrap();
|
||||
let right = right.unwrap();
|
||||
|
||||
let tc = match ft {
|
||||
FilterToken::Equal => left.eq(&right),
|
||||
FilterToken::NotEqual => left.ne(&right),
|
||||
FilterToken::Greater => left.gt(&right),
|
||||
FilterToken::GreaterOrEqual => left.ge(&right),
|
||||
FilterToken::Little => left.lt(&right),
|
||||
FilterToken::LittleOrEqual => left.le(&right),
|
||||
FilterToken::And => left.and(&right),
|
||||
FilterToken::Or => left.or(&right),
|
||||
};
|
||||
self.term_stack.push(tc);
|
||||
}
|
||||
|
||||
trace!("filter - {:?}", self.term_stack)
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeVisitor for JsonValueFilter {
|
||||
fn visit_token(&mut self, token: ParseToken) {
|
||||
debug!("visit_token: {:?}", token);
|
||||
|
||||
match token {
|
||||
ParseToken::Absolute
|
||||
| ParseToken::Relative => {
|
||||
if self.is_peek_token_array() {
|
||||
self.token_stack.pop();
|
||||
}
|
||||
self.push_value_filter(ParseToken::Relative == token);
|
||||
}
|
||||
ParseToken::In
|
||||
| ParseToken::Leaves
|
||||
| ParseToken::Array => {
|
||||
self.token_stack.push(token);
|
||||
}
|
||||
ParseToken::ArrayEof => {
|
||||
self.token_end_array();
|
||||
}
|
||||
ParseToken::All => {
|
||||
self.token_all();
|
||||
}
|
||||
ParseToken::Key(key) => {
|
||||
self.token_key(key);
|
||||
}
|
||||
ParseToken::Filter(ref ft) => {
|
||||
self.token_op(ft);
|
||||
}
|
||||
ParseToken::Number(v) => {
|
||||
self.term_stack.push(TermContext::Constants(ExprTerm::Number(v)))
|
||||
}
|
||||
ParseToken::Range(from, to) => {
|
||||
self.token_range(from, to);
|
||||
}
|
||||
ParseToken::Union(v) => {
|
||||
self.token_union(v);
|
||||
}
|
||||
ParseToken::Eof => {
|
||||
debug!("visit_token eof");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn end_term(&mut self) {
|
||||
debug!("end_term");
|
||||
|
||||
if let Some(ParseToken::Array) = self.token_stack.last() {
|
||||
self.token_stack.pop();
|
||||
}
|
||||
|
||||
trace!("end_term - term_stack {:?}", self.term_stack);
|
||||
trace!("end_term - token_stack {:?}", self.token_stack);
|
||||
trace!("end_term - filter_stack {:?}", self.filter_stack);
|
||||
|
||||
if self.token_stack.is_empty() && self.filter_stack.len() > 1 {
|
||||
match self.filter_stack.pop() {
|
||||
Some(vf) => {
|
||||
self.term_stack.push(TermContext::Json(vf.last_key, vf.vw));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if match self.token_stack.last() {
|
||||
Some(ParseToken::Key(_))
|
||||
| Some(ParseToken::Number(_)) => true,
|
||||
_ => false
|
||||
} {
|
||||
match self.token_stack.pop() {
|
||||
Some(ParseToken::Key(ref v)) if v.eq_ignore_ascii_case("true") => {
|
||||
self.term_stack.push(TermContext::Constants(ExprTerm::Bool(true)))
|
||||
}
|
||||
Some(ParseToken::Key(ref v)) if v.eq_ignore_ascii_case("false") => {
|
||||
self.term_stack.push(TermContext::Constants(ExprTerm::Bool(false)))
|
||||
}
|
||||
Some(ParseToken::Key(v)) => {
|
||||
self.term_stack.push(TermContext::Constants(ExprTerm::String(v)))
|
||||
}
|
||||
Some(ParseToken::Number(v)) => {
|
||||
self.term_stack.push(TermContext::Constants(ExprTerm::Number(v)))
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,283 +0,0 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use indexmap::IndexSet;
|
||||
use serde_json::Value;
|
||||
|
||||
use ref_value::model::*;
|
||||
|
||||
use super::cmp::*;
|
||||
use super::term::*;
|
||||
use super::value_filter::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ValueWrapper {
|
||||
val: RefValueWrapper,
|
||||
is_leaves: bool,
|
||||
}
|
||||
|
||||
impl ValueWrapper {
|
||||
pub fn new(val: RefValueWrapper, leaves: bool) -> Self {
|
||||
ValueWrapper { val, is_leaves: leaves }
|
||||
}
|
||||
|
||||
pub fn is_leaves(&self) -> bool {
|
||||
self.is_leaves
|
||||
}
|
||||
|
||||
pub fn set_leaves(&mut self, is_leaves: bool) {
|
||||
self.is_leaves = is_leaves;
|
||||
}
|
||||
|
||||
pub fn cmp(&self, other: &ValueWrapper, cmp_type: CmpType) -> TermContext {
|
||||
match cmp_type {
|
||||
CmpType::Eq => {
|
||||
TermContext::Json(None, self.intersect(other))
|
||||
}
|
||||
CmpType::Ne => {
|
||||
TermContext::Json(None, self.except(other))
|
||||
}
|
||||
CmpType::Gt | CmpType::Ge | CmpType::Lt | CmpType::Le => {
|
||||
TermContext::Constants(ExprTerm::Bool(false))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cmp_with_term<F: PrivCmp>(val: &RefValueWrapper, et: &ExprTerm, cmp_fn: &F, default: bool, reverse: bool) -> bool {
|
||||
match val.deref() {
|
||||
RefValue::Bool(ref v1) => {
|
||||
match et {
|
||||
ExprTerm::Bool(v2) => if reverse { cmp_fn.cmp_bool(v2, v1) } else { cmp_fn.cmp_bool(v1, v2) },
|
||||
_ => default
|
||||
}
|
||||
}
|
||||
RefValue::Number(ref v1) => match et {
|
||||
ExprTerm::Number(v2) => if reverse { cmp_fn.cmp_f64(v2, &v1.as_f64().unwrap()) } else { cmp_fn.cmp_f64(&v1.as_f64().unwrap(), v2) },
|
||||
_ => default
|
||||
},
|
||||
RefValue::String(ref v1) => {
|
||||
match et {
|
||||
ExprTerm::String(v2) => if reverse { cmp_fn.cmp_string(v2, v1) } else { cmp_fn.cmp_string(v1, v2) },
|
||||
_ => default
|
||||
}
|
||||
}
|
||||
_ => default
|
||||
}
|
||||
}
|
||||
|
||||
fn take_object_in_array<F: PrivCmp>(&self, key: &String, et: &ExprTerm, cmp: &F, reverse: bool) -> Option<Self> {
|
||||
fn _filter_with_object<F: Fn(&RefValueWrapper) -> bool>(v: &RefValueWrapper, key: &String, fun: F) -> bool {
|
||||
match v.deref() {
|
||||
RefValue::Object(map) => {
|
||||
match map.get(key) {
|
||||
Some(val) => fun(val),
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
match self.val.deref() {
|
||||
RefValue::Array(vec) => {
|
||||
let mut ret = Vec::new();
|
||||
for v in vec {
|
||||
if _filter_with_object(v, key, |vv| {
|
||||
Self::cmp_with_term(vv, et, cmp, false, reverse)
|
||||
}) {
|
||||
ret.push(v.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Some(ValueWrapper::new(RefValue::Array(ret).into(), false))
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
fn take_with_key_type<F: PrivCmp>(&self, key: &Option<ValueFilterKey>, et: &ExprTerm, cmp: &F, reverse: bool) -> Option<Self> {
|
||||
match key {
|
||||
Some(ValueFilterKey::String(key)) => {
|
||||
self.take_object_in_array(key, et, cmp, reverse)
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn take_with<F: PrivCmp>(&self, key: &Option<ValueFilterKey>, et: &ExprTerm, cmp: F, reverse: bool) -> Self {
|
||||
match self.take_with_key_type(key, et, &cmp, reverse) {
|
||||
Some(vw) => vw,
|
||||
_ => {
|
||||
match &(*self.val) {
|
||||
RefValue::Array(vec) => {
|
||||
let mut ret = Vec::new();
|
||||
for v in vec {
|
||||
if Self::cmp_with_term(v, et, &cmp, false, reverse) {
|
||||
ret.push(v.clone());
|
||||
}
|
||||
}
|
||||
ValueWrapper::new(RefValue::Array(ret).into(), false)
|
||||
}
|
||||
_ => {
|
||||
if Self::cmp_with_term(&self.val, et, &cmp, false, reverse) {
|
||||
ValueWrapper::new(self.val.clone(), false)
|
||||
} else {
|
||||
ValueWrapper::new(RefValue::Null.into(), false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn replace(&mut self, val: RefValueWrapper) {
|
||||
let is_null = match val.deref() {
|
||||
RefValue::Array(v) => if v.is_empty() { true } else { false },
|
||||
RefValue::Object(m) => if m.is_empty() { true } else { false },
|
||||
_ => val.is_null()
|
||||
};
|
||||
self.val = if is_null {
|
||||
RefValue::Null.into()
|
||||
} else {
|
||||
val
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_val(&self) -> &RefValueWrapper {
|
||||
&self.val
|
||||
}
|
||||
|
||||
pub fn into_value(&self) -> Value {
|
||||
self.get_val().into()
|
||||
}
|
||||
|
||||
pub fn is_array(&self) -> bool {
|
||||
self.val.is_array()
|
||||
}
|
||||
|
||||
fn into_hashset(&self) -> IndexSet<RefValueWrapper> {
|
||||
trace!("into_hashset");
|
||||
let mut hashset = IndexSet::new();
|
||||
match self.val.deref() {
|
||||
RefValue::Array(ref v1) => {
|
||||
for v in v1 {
|
||||
hashset.insert(v.clone());
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
hashset.insert(self.val.clone());
|
||||
}
|
||||
}
|
||||
hashset
|
||||
}
|
||||
|
||||
pub fn except(&self, other: &Self) -> Self {
|
||||
trace!("except");
|
||||
let hashset = self.into_hashset();
|
||||
let mut ret: IndexSet<RefValueWrapper> = IndexSet::new();
|
||||
match other.val.deref() {
|
||||
RefValue::Array(ref v1) => {
|
||||
for v in v1 {
|
||||
if !hashset.contains(v) {
|
||||
ret.insert(v.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !hashset.contains(&other.val) {
|
||||
ret.insert(other.val.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let vec = ret.into_iter().map(|v| v.clone()).collect();
|
||||
ValueWrapper::new(RefValue::Array(vec).into(), false)
|
||||
}
|
||||
|
||||
pub fn intersect(&self, other: &Self) -> Self {
|
||||
trace!("intersect");
|
||||
let hashset = self.into_hashset();
|
||||
let mut ret: IndexSet<RefValueWrapper> = IndexSet::new();
|
||||
match other.val.deref() {
|
||||
RefValue::Array(ref v1) => {
|
||||
for v in v1 {
|
||||
if hashset.contains(v) {
|
||||
ret.insert(v.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if hashset.contains(&other.val) {
|
||||
ret.insert(other.val.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let vec = ret.into_iter().map(|v| v.clone()).collect();
|
||||
ValueWrapper::new(RefValue::Array(vec).into(), false)
|
||||
}
|
||||
|
||||
pub fn union(&self, other: &Self) -> Self {
|
||||
trace!("union");
|
||||
let mut hashset = self.into_hashset();
|
||||
match other.val.deref() {
|
||||
RefValue::Array(ref v1) => {
|
||||
for v in v1 {
|
||||
if !hashset.contains(v) {
|
||||
hashset.insert(v.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !hashset.contains(&other.val) {
|
||||
hashset.insert(other.val.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut vw = ValueWrapper::new(RefValue::Null.into(), false);
|
||||
let list = hashset.into_iter().map(|val| val.clone()).collect();
|
||||
vw.replace(RefValue::Array(list).into());
|
||||
vw
|
||||
}
|
||||
|
||||
pub fn into_term(&self, key: &Option<ValueFilterKey>) -> TermContext {
|
||||
match self.val.deref() {
|
||||
RefValue::String(ref s) => TermContext::Constants(ExprTerm::String(s.clone())),
|
||||
RefValue::Number(ref n) => TermContext::Constants(ExprTerm::Number(n.as_f64().unwrap())),
|
||||
RefValue::Bool(b) => TermContext::Constants(ExprTerm::Bool(*b)),
|
||||
_ => TermContext::Json(match key {
|
||||
Some(vk) => Some(vk.clone()),
|
||||
_ => None
|
||||
}, ValueWrapper::new(self.val.clone(), false))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn filter(&self, key: &Option<ValueFilterKey>) -> Self {
|
||||
trace!("filter");
|
||||
let v = match self.val.deref() {
|
||||
RefValue::Array(ref vec) => {
|
||||
let mut ret = Vec::new();
|
||||
for v in vec {
|
||||
if let Some(ValueFilterKey::String(k)) = key {
|
||||
if v.get(k.clone()).is_some() {
|
||||
ret.push(v.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
RefValue::Array(ret).into()
|
||||
}
|
||||
RefValue::Object(ref map) => {
|
||||
match key {
|
||||
Some(ValueFilterKey::String(k)) => match map.get(k) {
|
||||
Some(v) => v.clone(),
|
||||
_ => RefValue::Null.into()
|
||||
},
|
||||
_ => RefValue::Null.into()
|
||||
}
|
||||
}
|
||||
_ => self.val.clone()
|
||||
};
|
||||
|
||||
ValueWrapper::new(v, false)
|
||||
}
|
||||
}
|
590
src/lib.rs
590
src/lib.rs
@ -1,293 +1,228 @@
|
||||
//! JsonPath implementation for Rust
|
||||
//! JsonPath implementation written in Rust.
|
||||
//!
|
||||
//! # Example
|
||||
//! ```
|
||||
//! extern crate jsonpath_lib as jsonpath;
|
||||
//! #[macro_use] extern crate serde_json;
|
||||
//! extern crate jsonpath_lib as jsonpath;
|
||||
//! #[macro_use] extern crate serde_json;
|
||||
//! let json_obj = json!({
|
||||
//! "store": {
|
||||
//! "book": [
|
||||
//! {
|
||||
//! "category": "reference",
|
||||
//! "author": "Nigel Rees",
|
||||
//! "title": "Sayings of the Century",
|
||||
//! "price": 8.95
|
||||
//! },
|
||||
//! {
|
||||
//! "category": "fiction",
|
||||
//! "author": "Evelyn Waugh",
|
||||
//! "title": "Sword of Honour",
|
||||
//! "price": 12.99
|
||||
//! },
|
||||
//! {
|
||||
//! "category": "fiction",
|
||||
//! "author": "Herman Melville",
|
||||
//! "title": "Moby Dick",
|
||||
//! "isbn": "0-553-21311-3",
|
||||
//! "price": 8.99
|
||||
//! },
|
||||
//! {
|
||||
//! "category": "fiction",
|
||||
//! "author": "J. R. R. Tolkien",
|
||||
//! "title": "The Lord of the Rings",
|
||||
//! "isbn": "0-395-19395-8",
|
||||
//! "price": 22.99
|
||||
//! }
|
||||
//! ],
|
||||
//! "bicycle": {
|
||||
//! "color": "red",
|
||||
//! "price": 19.95
|
||||
//! }
|
||||
//! },
|
||||
//! "expensive": 10
|
||||
//! });
|
||||
//!
|
||||
//! let json_obj = json!({
|
||||
//! "store": {
|
||||
//! "book": [
|
||||
//! {
|
||||
//! "category": "reference",
|
||||
//! "author": "Nigel Rees",
|
||||
//! "title": "Sayings of the Century",
|
||||
//! "price": 8.95
|
||||
//! },
|
||||
//! {
|
||||
//! "category": "fiction",
|
||||
//! "author": "Evelyn Waugh",
|
||||
//! "title": "Sword of Honour",
|
||||
//! "price": 12.99
|
||||
//! },
|
||||
//! {
|
||||
//! "category": "fiction",
|
||||
//! "author": "Herman Melville",
|
||||
//! "title": "Moby Dick",
|
||||
//! "isbn": "0-553-21311-3",
|
||||
//! "price": 8.99
|
||||
//! },
|
||||
//! {
|
||||
//! "category": "fiction",
|
||||
//! "author": "J. R. R. Tolkien",
|
||||
//! "title": "The Lord of the Rings",
|
||||
//! "isbn": "0-395-19395-8",
|
||||
//! "price": 22.99
|
||||
//! }
|
||||
//! ],
|
||||
//! "bicycle": {
|
||||
//! "color": "red",
|
||||
//! "price": 19.95
|
||||
//! }
|
||||
//! },
|
||||
//! "expensive": 10
|
||||
//! });
|
||||
//! let mut selector = jsonpath::selector(&json_obj);
|
||||
//!
|
||||
//! let mut selector = jsonpath::selector(&json_obj);
|
||||
//! assert_eq!(selector("$.store.book[*].author").unwrap(),
|
||||
//! vec![
|
||||
//! "Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $.store.book[*].author
|
||||
//! //
|
||||
//! let json = selector("$.store.book[*].author").unwrap();
|
||||
//! let ret = json!(["Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"]);
|
||||
//! assert_eq!(json, ret);
|
||||
//! assert_eq!(selector("$..author").unwrap(),
|
||||
//! vec![
|
||||
//! "Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $..author
|
||||
//! //
|
||||
//! let json = selector("$..author").unwrap();
|
||||
//! let ret = json!(["Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"]);
|
||||
//! assert_eq!(json, ret);
|
||||
//! assert_eq!(selector("$.store.*").unwrap(),
|
||||
//! vec![
|
||||
//! &json!([
|
||||
//! { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95 },
|
||||
//! { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99 },
|
||||
//! { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99 },
|
||||
//! { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99 }
|
||||
//! ]),
|
||||
//! &json!({ "color": "red", "price": 19.95 })
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $.store.*
|
||||
//! //
|
||||
//! let json = selector("$.store.*").unwrap();
|
||||
//! let ret = json!([
|
||||
//! [
|
||||
//! {"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||
//! {"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||
//! {"category" : "fiction", "author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
//! {"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
//! ],
|
||||
//! {"color" : "red","price" : 19.95},
|
||||
//! ]);
|
||||
//! assert_eq!(ret, json);
|
||||
//! assert_eq!(selector("$.store..price").unwrap(),
|
||||
//! vec![
|
||||
//! 8.95, 12.99, 8.99, 22.99, 19.95
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $.store..price
|
||||
//! //
|
||||
//! let json = selector("$.store..price").unwrap();
|
||||
//! let ret = json!([8.95, 12.99, 8.99, 22.99, 19.95]);
|
||||
//! assert_eq!(ret, json);
|
||||
//! assert_eq!(selector("$..book[2]").unwrap(),
|
||||
//! vec![
|
||||
//! &json!({
|
||||
//! "category" : "fiction",
|
||||
//! "author" : "Herman Melville",
|
||||
//! "title" : "Moby Dick",
|
||||
//! "isbn" : "0-553-21311-3",
|
||||
//! "price" : 8.99
|
||||
//! })
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $..book[2]
|
||||
//! //
|
||||
//! let json = selector("$..book[2]").unwrap();
|
||||
//! let ret = json!([{
|
||||
//! "category" : "fiction",
|
||||
//! "author" : "Herman Melville",
|
||||
//! "title" : "Moby Dick",
|
||||
//! "isbn" : "0-553-21311-3",
|
||||
//! "price" : 8.99
|
||||
//! }]);
|
||||
//! assert_eq!(ret, json);
|
||||
//! assert_eq!(selector("$..book[-2]").unwrap(),
|
||||
//! vec![
|
||||
//! &json!({
|
||||
//! "category" : "fiction",
|
||||
//! "author" : "Herman Melville",
|
||||
//! "title" : "Moby Dick",
|
||||
//! "isbn" : "0-553-21311-3",
|
||||
//! "price" : 8.99
|
||||
//! })
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $..book[-2]
|
||||
//! //
|
||||
//! let json = selector("$..book[-2]").unwrap();
|
||||
//! let ret = json!([{
|
||||
//! "category" : "fiction",
|
||||
//! "author" : "Herman Melville",
|
||||
//! "title" : "Moby Dick",
|
||||
//! "isbn" : "0-553-21311-3",
|
||||
//! "price" : 8.99
|
||||
//! }]);
|
||||
//! assert_eq!(ret, json);
|
||||
//! assert_eq!(selector("$..book[0,1]").unwrap(),
|
||||
//! vec![
|
||||
//! &json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
//! &json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $..book[0,1]
|
||||
//! //
|
||||
//! let json = selector("$..book[0,1]").unwrap();
|
||||
//! let ret = json!([
|
||||
//! {"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95},
|
||||
//! {"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99}
|
||||
//! ]);
|
||||
//! assert_eq!(ret, json);
|
||||
//! assert_eq!(selector("$..book[:2]").unwrap(),
|
||||
//! vec![
|
||||
//! &json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
//! &json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $..book[:2]
|
||||
//! //
|
||||
//! let json = selector("$..book[:2]").unwrap();
|
||||
//! let ret = json!([
|
||||
//! {"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95},
|
||||
//! {"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99}
|
||||
//! ]);
|
||||
//! assert_eq!(ret, json);
|
||||
//! assert_eq!(selector("$..book[:2]").unwrap(),
|
||||
//! vec![
|
||||
//! &json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
//! &json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $..book[2:]
|
||||
//! //
|
||||
//! let json = selector("$..book[2:]").unwrap();
|
||||
//! let ret = json!([
|
||||
//! {"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
//! {"category" : "fiction","author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
//! ]);
|
||||
//! assert_eq!(ret, json);
|
||||
//! assert_eq!(selector("$..book[?(@.isbn)]").unwrap(),
|
||||
//! vec![
|
||||
//! &json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99}),
|
||||
//! &json!({"category" : "fiction","author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99})
|
||||
//! ]);
|
||||
//!
|
||||
//! //
|
||||
//! // $..book[?(@.isbn)]
|
||||
//! //
|
||||
//! let json = selector("$..book[?(@.isbn)]").unwrap();
|
||||
//! let ret = json!([
|
||||
//! {"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
//! {"category" : "fiction","author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
//! ]);
|
||||
//! assert_eq!(ret, json);
|
||||
//!
|
||||
//! //
|
||||
//! // $.store.book[?(@.price < 10)]
|
||||
//! //
|
||||
//! let json = selector("$.store.book[?(@.price < 10)]").unwrap();
|
||||
//! let ret = json!([
|
||||
//! {"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95},
|
||||
//! {"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99}
|
||||
//! ]);
|
||||
//! assert_eq!(ret, json);
|
||||
//! assert_eq!(selector("$.store.book[?(@.price < 10)]").unwrap(),
|
||||
//! vec![
|
||||
//! &json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
|
||||
//! &json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99})
|
||||
//! ]);
|
||||
//! ```
|
||||
|
||||
extern crate array_tool;
|
||||
extern crate core;
|
||||
extern crate env_logger;
|
||||
extern crate indexmap;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
#[macro_use]
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
|
||||
use std::error::Error;
|
||||
use std::ops::Deref;
|
||||
use std::result;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use filter::value_filter::JsonValueFilter;
|
||||
use parser::parser::{NodeVisitor, Parser};
|
||||
use ref_value::model::RefValueWrapper;
|
||||
pub use parser::Parser; // TODO private
|
||||
pub use select::JsonPathError;
|
||||
pub use select::{Selector, SelectorMut};
|
||||
|
||||
#[doc(hidden)]
|
||||
pub mod parser;
|
||||
mod ffi;
|
||||
#[doc(hidden)]
|
||||
pub mod filter;
|
||||
mod parser;
|
||||
#[doc(hidden)]
|
||||
pub mod ref_value;
|
||||
mod select;
|
||||
|
||||
fn query_from_str(json: &str, path: &str) -> result::Result<JsonValueFilter, String> {
|
||||
let mut jf = JsonValueFilter::new(json)?;
|
||||
let mut parser = Parser::new(path);
|
||||
parser.parse(&mut jf)?;
|
||||
Ok(jf)
|
||||
}
|
||||
|
||||
fn query_from_json_wrapper(json_wrapper: RefValueWrapper, path: &str) -> result::Result<JsonValueFilter, String> {
|
||||
let mut jf = JsonValueFilter::new_from_value(json_wrapper);
|
||||
let mut parser = Parser::new(path);
|
||||
parser.parse(&mut jf)?;
|
||||
Ok(jf)
|
||||
}
|
||||
|
||||
/// It is a highorder function that compile a JsonPath then returns a function.
|
||||
///
|
||||
/// this return function can be reused for different JsonObjects.
|
||||
/// It is a high-order function. it compile a jsonpath and then returns a closure that has JSON as argument. if you need to reuse a jsonpath, it is good for performance.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate jsonpath_lib as jsonpath;
|
||||
/// #[macro_use] extern crate serde_json;
|
||||
///
|
||||
/// let mut template = jsonpath::compile("$..friends[0]");
|
||||
/// let mut first_firend = jsonpath::compile("$..friends[0]");
|
||||
///
|
||||
/// let json_obj = json!({
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// ]});
|
||||
///
|
||||
/// let json = template(&json_obj).unwrap();
|
||||
/// let ret = json!([
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구1", "age": 20}
|
||||
/// let json = first_firend(&json_obj).unwrap();
|
||||
///
|
||||
/// assert_eq!(json, vec![
|
||||
/// &json!({"name": "친구3", "age": 30}),
|
||||
/// &json!({"name": "친구1", "age": 20})
|
||||
/// ]);
|
||||
/// assert_eq!(json, ret);
|
||||
/// ```
|
||||
pub fn compile<'a>(path: &'a str) -> impl FnMut(&Value) -> result::Result<Value, String> + 'a {
|
||||
let mut parser = Parser::new(path);
|
||||
let node = parser.compile();
|
||||
move |json| {
|
||||
match &node {
|
||||
Ok(n) => {
|
||||
let mut jf = JsonValueFilter::new_from_value(json.into());
|
||||
jf.visit(n.clone());
|
||||
Ok((&jf.take_value()).into())
|
||||
}
|
||||
Err(e) => Err(e.clone())
|
||||
pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> {
|
||||
let node = parser::Parser::compile(path);
|
||||
move |json| match &node {
|
||||
Ok(node) => {
|
||||
let mut selector = Selector::default();
|
||||
selector.compiled_path(node).value(json).select()
|
||||
}
|
||||
Err(e) => Err(JsonPathError::Path(e.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
/// It returns highorder function that return a function.
|
||||
///
|
||||
/// this function has a jsonpath as argument and return a serde_json::value::Value. so you can use different JsonPath for one JsonObject.
|
||||
/// It is a high-order function. it returns a closure that has a jsonpath string as argument. you can use diffenent jsonpath for one JSON object.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate jsonpath_lib as jsonpath;
|
||||
/// #[macro_use] extern crate serde_json;
|
||||
///
|
||||
/// let json_obj = json!({
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// ]});
|
||||
///
|
||||
/// let mut selector = jsonpath::selector(&json_obj);
|
||||
///
|
||||
/// let json = selector("$..friends[0]").unwrap();
|
||||
/// let ret = json!([
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구1", "age": 20}
|
||||
///
|
||||
/// assert_eq!(json, vec![
|
||||
/// &json!({"name": "친구3", "age": 30}),
|
||||
/// &json!({"name": "친구1", "age": 20})
|
||||
/// ]);
|
||||
/// assert_eq!(json, ret);
|
||||
///
|
||||
/// let json = selector("$..friends[1]").unwrap();
|
||||
/// let ret = json!([
|
||||
/// {"name": "친구4"},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
///
|
||||
/// assert_eq!(json, vec![
|
||||
/// &json!({"name": "친구4"}),
|
||||
/// &json!({"name": "친구2", "age": 20})
|
||||
/// ]);
|
||||
/// assert_eq!(json, ret);
|
||||
/// ```
|
||||
pub fn selector(json: &Value) -> impl FnMut(&str) -> result::Result<Value, String> {
|
||||
let wrapper: RefValueWrapper = json.into();
|
||||
move |path: &str| {
|
||||
let mut jf = query_from_json_wrapper(wrapper.clone(), path)?;
|
||||
Ok((&jf.take_value()).into())
|
||||
}
|
||||
#[allow(clippy::needless_lifetimes)]
|
||||
pub fn selector<'a>(json: &'a Value) -> impl FnMut(&str) -> Result<Vec<&'a Value>, JsonPathError> {
|
||||
let mut selector = Selector::default();
|
||||
let _ = selector.value(json);
|
||||
move |path: &str| selector.str_path(path)?.reset_value().select()
|
||||
}
|
||||
|
||||
/// It returns highorder function that returns a function.
|
||||
///
|
||||
/// this function has a jsonpath as argument and return a serde::Deserialize. so you can use different JsonPath for one JsonObject.
|
||||
/// It is the same to `selector` function. but it deserialize the result as given type `T`.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate jsonpath_lib as jsonpath;
|
||||
@ -297,26 +232,27 @@ pub fn selector(json: &Value) -> impl FnMut(&str) -> result::Result<Value, Strin
|
||||
/// use serde::{Deserialize, Serialize};
|
||||
///
|
||||
/// let json_obj = json!({
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// ]});
|
||||
///
|
||||
/// #[derive(Serialize, Deserialize, PartialEq, Debug)]
|
||||
/// #[derive(Deserialize, PartialEq, Debug)]
|
||||
/// struct Friend {
|
||||
/// name: String,
|
||||
/// age: Option<u8>,
|
||||
/// }
|
||||
///
|
||||
/// let mut selector = jsonpath::selector_as::<Vec<Friend>>(&json_obj);
|
||||
/// let mut selector = jsonpath::selector_as::<Friend>(&json_obj);
|
||||
///
|
||||
/// let json = selector("$..friends[0]").unwrap();
|
||||
///
|
||||
/// let ret = vec!(
|
||||
/// Friend { name: "친구3".to_string(), age: Some(30) },
|
||||
/// Friend { name: "친구1".to_string(), age: Some(20) }
|
||||
@ -324,67 +260,52 @@ pub fn selector(json: &Value) -> impl FnMut(&str) -> result::Result<Value, Strin
|
||||
/// assert_eq!(json, ret);
|
||||
///
|
||||
/// let json = selector("$..friends[1]").unwrap();
|
||||
///
|
||||
/// let ret = vec!(
|
||||
/// Friend { name: "친구4".to_string(), age: None },
|
||||
/// Friend { name: "친구2".to_string(), age: Some(20) }
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(json, ret);
|
||||
/// ```
|
||||
pub fn selector_as<T: serde::de::DeserializeOwned>(json: &Value) -> impl FnMut(&str) -> result::Result<T, String> {
|
||||
let wrapper: RefValueWrapper = json.into();
|
||||
move |path: &str| {
|
||||
let mut jf = query_from_json_wrapper(wrapper.clone(), path)?;
|
||||
T::deserialize(jf.take_value().deref()).map_err(|e| format!("{:?}", e))
|
||||
}
|
||||
pub fn selector_as<T: serde::de::DeserializeOwned>(
|
||||
json: &Value,
|
||||
) -> impl FnMut(&str) -> Result<Vec<T>, JsonPathError> + '_ {
|
||||
let mut selector = Selector::default();
|
||||
let _ = selector.value(json);
|
||||
move |path: &str| selector.str_path(path)?.reset_value().select_as()
|
||||
}
|
||||
|
||||
#[deprecated(since = "0.1.4", note = "Please use the selector function instead")]
|
||||
pub fn reader(json: &Value) -> impl FnMut(&str) -> result::Result<Value, String> {
|
||||
selector(json)
|
||||
}
|
||||
|
||||
/// Select a JsonObject. it return a serde_json::value::Value.
|
||||
/// It is a simple select function. but it compile the jsonpath argument every time.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate jsonpath_lib as jsonpath;
|
||||
/// #[macro_use] extern crate serde_json;
|
||||
///
|
||||
/// let json_obj = json!({
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// ]});
|
||||
///
|
||||
/// let json = jsonpath::select(&json_obj, "$..friends[0]").unwrap();
|
||||
///
|
||||
/// let ret = json!([
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구1", "age": 20}
|
||||
/// assert_eq!(json, vec![
|
||||
/// &json!({"name": "친구3", "age": 30}),
|
||||
/// &json!({"name": "친구1", "age": 20})
|
||||
/// ]);
|
||||
/// assert_eq!(json, ret);
|
||||
/// ```
|
||||
pub fn select(json: &Value, path: &str) -> result::Result<Value, String> {
|
||||
let mut jf = query_from_json_wrapper(json.into(), path)?;
|
||||
Ok((&jf.take_value()).into())
|
||||
pub fn select<'a>(json: &'a Value, path: &str) -> Result<Vec<&'a Value>, JsonPathError> {
|
||||
Selector::default().str_path(path)?.value(json).select()
|
||||
}
|
||||
|
||||
#[deprecated(since = "0.1.4", note = "Please use the select function instead")]
|
||||
pub fn read(json: &Value, path: &str) -> result::Result<Value, String> {
|
||||
select(json, path)
|
||||
}
|
||||
|
||||
#[deprecated(since = "0.1.7", note = "Please use the select_as_str function instead")]
|
||||
pub fn select_str(json: &str, path: &str) -> result::Result<String, String> {
|
||||
select_as_str(json, path)
|
||||
}
|
||||
|
||||
/// Select a JsonObject. it return a JsonObject as String.
|
||||
/// It is the same to `select` function but it return the result as string.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate jsonpath_lib as jsonpath;
|
||||
@ -407,12 +328,13 @@ pub fn select_str(json: &str, path: &str) -> result::Result<String, String> {
|
||||
///
|
||||
/// assert_eq!(ret, r#"[{"name":"친구3","age":30},{"name":"친구1","age":20}]"#);
|
||||
/// ```
|
||||
pub fn select_as_str(json: &str, path: &str) -> result::Result<String, String> {
|
||||
let mut jf = query_from_str(json, path)?;
|
||||
serde_json::to_string(&jf.take_value().deref()).map_err(|e| e.description().to_string())
|
||||
pub fn select_as_str(json_str: &str, path: &str) -> Result<String, JsonPathError> {
|
||||
let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
|
||||
let ret = Selector::default().str_path(path)?.value(&json).select()?;
|
||||
serde_json::to_string(&ret).map_err(|e| JsonPathError::Serde(e.to_string()))
|
||||
}
|
||||
|
||||
/// Select a JsonObject. it return a deserialized instance of type `T`
|
||||
/// It is the same to `select` function but it deserialize the the result as given type `T`.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate jsonpath_lib as jsonpath;
|
||||
@ -428,7 +350,7 @@ pub fn select_as_str(json: &str, path: &str) -> result::Result<String, String> {
|
||||
/// phones: Vec<String>,
|
||||
/// }
|
||||
///
|
||||
/// let ret: Person = jsonpath::select_as(r#"
|
||||
/// let ret: Vec<Person> = jsonpath::select_as(r#"
|
||||
/// {
|
||||
/// "person":
|
||||
/// {
|
||||
@ -448,9 +370,101 @@ pub fn select_as_str(json: &str, path: &str) -> result::Result<String, String> {
|
||||
/// phones: vec!["+44 1234567".to_string(), "+44 2345678".to_string()],
|
||||
/// };
|
||||
///
|
||||
/// assert_eq!(person, ret);
|
||||
/// assert_eq!(ret[0], person);
|
||||
/// ```
|
||||
pub fn select_as<T: serde::de::DeserializeOwned>(json: &str, path: &str) -> result::Result<T, String> {
|
||||
let mut jf = query_from_str(json, path)?;
|
||||
T::deserialize(jf.take_value().deref()).map_err(|e| e.description().to_string())
|
||||
}
|
||||
pub fn select_as<T: serde::de::DeserializeOwned>(
|
||||
json_str: &str,
|
||||
path: &str,
|
||||
) -> Result<Vec<T>, JsonPathError> {
|
||||
let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
|
||||
Selector::default().str_path(path)?.value(&json).select_as()
|
||||
}
|
||||
|
||||
/// Delete(= replace with null) the JSON property using the jsonpath.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate jsonpath_lib as jsonpath;
|
||||
/// #[macro_use] extern crate serde_json;
|
||||
///
|
||||
/// let json_obj = json!({
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// ]});
|
||||
///
|
||||
/// let ret = jsonpath::delete(json_obj, "$..[?(20 == @.age)]").unwrap();
|
||||
///
|
||||
/// assert_eq!(ret, json!({
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// null,
|
||||
/// null
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// ]}));
|
||||
/// ```
|
||||
pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
|
||||
let mut selector = SelectorMut::default();
|
||||
let value = selector.str_path(path)?.value(value).delete()?;
|
||||
Ok(value.take().unwrap_or(Value::Null))
|
||||
}
|
||||
|
||||
/// Select JSON properties using a jsonpath and transform the result and then replace it. via closure that implements `FnMut` you can transform the selected results.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate jsonpath_lib as jsonpath;
|
||||
/// #[macro_use] extern crate serde_json;
|
||||
///
|
||||
/// use serde_json::Value;
|
||||
///
|
||||
/// let json_obj = json!({
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 20},
|
||||
/// {"name": "친구2", "age": 20}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// ]});
|
||||
///
|
||||
/// let ret = jsonpath::replace_with(json_obj, "$..[?(@.age == 20)].age", &mut |v| {
|
||||
/// let age = if let Value::Number(n) = v {
|
||||
/// n.as_u64().unwrap() * 2
|
||||
/// } else {
|
||||
/// 0
|
||||
/// };
|
||||
///
|
||||
/// Some(json!(age))
|
||||
/// }).unwrap();
|
||||
///
|
||||
/// assert_eq!(ret, json!({
|
||||
/// "school": {
|
||||
/// "friends": [
|
||||
/// {"name": "친구1", "age": 40},
|
||||
/// {"name": "친구2", "age": 40}
|
||||
/// ]
|
||||
/// },
|
||||
/// "friends": [
|
||||
/// {"name": "친구3", "age": 30},
|
||||
/// {"name": "친구4"}
|
||||
/// ]}));
|
||||
/// ```
|
||||
pub fn replace_with<F>(value: Value, path: &str, fun: &mut F) -> Result<Value, JsonPathError>
|
||||
where
|
||||
F: FnMut(Value) -> Option<Value>,
|
||||
{
|
||||
let mut selector = SelectorMut::default();
|
||||
let value = selector.str_path(path)?.value(value).replace_with(fun)?;
|
||||
Ok(value.take().unwrap_or(Value::Null))
|
||||
}
|
||||
|
1511
src/parser/mod.rs
1511
src/parser/mod.rs
File diff suppressed because it is too large
Load Diff
@ -1,677 +0,0 @@
|
||||
use std::result::Result;
|
||||
|
||||
use super::tokenizer::*;
|
||||
|
||||
const DUMMY: usize = 0;
|
||||
|
||||
type ParseResult<T> = Result<T, String>;
|
||||
|
||||
mod utils {
|
||||
|
||||
pub fn string_to_isize<F>(string: &String, msg_handler: F) -> Result<isize, String>
|
||||
where F: Fn() -> String {
|
||||
match string.as_str().parse::<isize>() {
|
||||
Ok(n) => Ok(n),
|
||||
_ => Err(msg_handler())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string_to_f64<F>(string: &String, msg_handler: F) -> Result<f64, String>
|
||||
where F: Fn() -> String {
|
||||
match string.as_str().parse::<f64>() {
|
||||
Ok(n) => Ok(n),
|
||||
_ => Err(msg_handler())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum ParseToken {
|
||||
// '$'
|
||||
Absolute,
|
||||
// '@'
|
||||
Relative,
|
||||
// '.'
|
||||
In,
|
||||
// '..'
|
||||
Leaves,
|
||||
// '*'
|
||||
All,
|
||||
|
||||
Key(String),
|
||||
// []
|
||||
Array,
|
||||
// 메타토큰
|
||||
ArrayEof,
|
||||
// ?( filter )
|
||||
Filter(FilterToken),
|
||||
// 1 : 2
|
||||
Range(Option<isize>, Option<isize>),
|
||||
// 1, 2, 3
|
||||
Union(Vec<isize>),
|
||||
|
||||
Number(f64),
|
||||
|
||||
Eof,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum FilterToken {
|
||||
Equal,
|
||||
NotEqual,
|
||||
Little,
|
||||
LittleOrEqual,
|
||||
Greater,
|
||||
GreaterOrEqual,
|
||||
And,
|
||||
Or,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Node {
|
||||
left: Option<Box<Node>>,
|
||||
right: Option<Box<Node>>,
|
||||
token: ParseToken,
|
||||
}
|
||||
|
||||
pub struct Parser<'a> {
|
||||
tokenizer: PreloadedTokenizer<'a>
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
Parser { tokenizer: PreloadedTokenizer::new(input) }
|
||||
}
|
||||
|
||||
pub fn compile(&mut self) -> ParseResult<Node> {
|
||||
Ok(self.json_path()?)
|
||||
}
|
||||
|
||||
pub fn parse<V: NodeVisitor>(&mut self, visitor: &mut V) -> ParseResult<()> {
|
||||
let node = self.json_path()?;
|
||||
visitor.visit(node);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn json_path(&mut self) -> ParseResult<Node> {
|
||||
debug!("#json_path");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Absolute(_)) => {
|
||||
let node = self.node(ParseToken::Absolute);
|
||||
self.paths(node)
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn paths(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#paths");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => {
|
||||
self.eat_token();
|
||||
self.paths_dot(prev)
|
||||
}
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
self.eat_token();
|
||||
self.eat_whitespace();
|
||||
let node = self.array(prev)?;
|
||||
self.paths(node)
|
||||
}
|
||||
_ => {
|
||||
Ok(prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn paths_dot(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#paths_dot");
|
||||
let node = self.path(prev)?;
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Equal(_))
|
||||
| Ok(Token::NotEqual(_))
|
||||
| Ok(Token::Little(_))
|
||||
| Ok(Token::LittleOrEqual(_))
|
||||
| Ok(Token::Greater(_))
|
||||
| Ok(Token::GreaterOrEqual(_))
|
||||
| Ok(Token::And(_))
|
||||
| Ok(Token::Or(_)) => {
|
||||
Ok(node)
|
||||
}
|
||||
_ => {
|
||||
self.paths(node)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn path(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => {
|
||||
self.path_leaves(prev)
|
||||
}
|
||||
Ok(Token::Asterisk(_)) => {
|
||||
self.path_in_all(prev)
|
||||
}
|
||||
Ok(Token::Key(_, _)) => {
|
||||
self.path_in_key(prev)
|
||||
}
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
self.eat_token();
|
||||
self.array(prev)
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn path_leaves(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_leaves");
|
||||
self.eat_token();
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Asterisk(_)) => {
|
||||
self.path_leaves_all(prev)
|
||||
}
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
let mut leaves_node = self.node(ParseToken::Leaves);
|
||||
leaves_node.left = Some(Box::new(prev));
|
||||
Ok(self.paths(leaves_node)?)
|
||||
}
|
||||
_ => {
|
||||
self.path_leaves_key(prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn path_leaves_key(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_leaves_key");
|
||||
Ok(Node {
|
||||
token: ParseToken::Leaves,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.key()?)),
|
||||
})
|
||||
}
|
||||
|
||||
fn path_leaves_all(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_leaves_all");
|
||||
self.eat_token();
|
||||
Ok(Node {
|
||||
token: ParseToken::Leaves,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
|
||||
fn path_in_all(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_in_all");
|
||||
self.eat_token();
|
||||
Ok(Node {
|
||||
token: ParseToken::In,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
|
||||
fn path_in_key(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_in_key");
|
||||
Ok(Node {
|
||||
token: ParseToken::In,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.key()?)),
|
||||
})
|
||||
}
|
||||
|
||||
fn key(&mut self) -> ParseResult<Node> {
|
||||
debug!("#key");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(_, v)) => {
|
||||
Ok(self.node(ParseToken::Key(v)))
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn array_quota_value(&mut self) -> ParseResult<Node> {
|
||||
debug!("#array_quota_value");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::SingleQuoted(_, val))
|
||||
| Ok(Token::DoubleQuoted(_, val)) => {
|
||||
Ok(self.node(ParseToken::Key(val)))
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
Ok(self.node(ParseToken::Eof))
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn array_start(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#array_start");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Question(_)) => {
|
||||
self.eat_token();
|
||||
Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.filter()?)),
|
||||
})
|
||||
}
|
||||
Ok(Token::Asterisk(_)) => {
|
||||
self.eat_token();
|
||||
Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.array_value()?)),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn array(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#array");
|
||||
let ret = self.array_start(prev)?;
|
||||
self.eat_whitespace();
|
||||
self.close_token(ret, Token::CloseArray(DUMMY))
|
||||
}
|
||||
|
||||
fn array_value_key(&mut self) -> ParseResult<Node> {
|
||||
debug!("#array_value_key");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?;
|
||||
self.eat_whitespace();
|
||||
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Comma(_)) => {
|
||||
self.union(digit)
|
||||
}
|
||||
Ok(Token::Split(_)) => {
|
||||
self.range_from(digit)
|
||||
}
|
||||
_ => {
|
||||
Ok(self.node(ParseToken::Number(digit as f64)))
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn array_value(&mut self) -> ParseResult<Node> {
|
||||
debug!("#array_value");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => {
|
||||
self.array_value_key()
|
||||
}
|
||||
Ok(Token::Split(_)) => {
|
||||
self.eat_token();
|
||||
self.range_to()
|
||||
}
|
||||
Ok(Token::DoubleQuoted(_, _))
|
||||
| Ok(Token::SingleQuoted(_, _)) => {
|
||||
self.array_quota_value()
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
Ok(self.node(ParseToken::Eof))
|
||||
}
|
||||
_ => {
|
||||
self.eat_token();
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn union(&mut self, num: isize) -> ParseResult<Node> {
|
||||
debug!("#union");
|
||||
let mut values = vec![num];
|
||||
while match self.tokenizer.peek_token() {
|
||||
Ok(Token::Comma(_)) => true,
|
||||
_ => false
|
||||
} {
|
||||
self.eat_token();
|
||||
self.eat_whitespace();
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?;
|
||||
values.push(digit);
|
||||
}
|
||||
_ => {
|
||||
return Err(self.tokenizer.err_msg());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(self.node(ParseToken::Union(values)))
|
||||
}
|
||||
|
||||
fn range_from(&mut self, num: isize) -> ParseResult<Node> {
|
||||
debug!("#range_from");
|
||||
self.eat_token();
|
||||
self.eat_whitespace();
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => {
|
||||
self.range(num)
|
||||
}
|
||||
_ => {
|
||||
Ok(self.node(ParseToken::Range(Some(num), None)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn range_to(&mut self) -> ParseResult<Node> {
|
||||
debug!("#range_to");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(self.node(ParseToken::Range(None, Some(digit))))
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn range(&mut self, num: isize) -> ParseResult<Node> {
|
||||
debug!("#range");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_isize(val, || self.tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(self.node(ParseToken::Range(Some(num), Some(digit))))
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn filter(&mut self) -> ParseResult<Node> {
|
||||
debug!("#filter");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::OpenParenthesis(_)) => {
|
||||
let ret = self.exprs()?;
|
||||
self.eat_whitespace();
|
||||
self.close_token(ret, Token::CloseParenthesis(DUMMY))
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
Ok(self.node(ParseToken::Eof))
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn exprs(&mut self) -> ParseResult<Node> {
|
||||
self.eat_whitespace();
|
||||
debug!("#exprs");
|
||||
let node = match self.tokenizer.peek_token() {
|
||||
Ok(Token::OpenParenthesis(_)) => {
|
||||
self.eat_token();
|
||||
trace!("\t-exprs - open_parenthesis");
|
||||
let ret = self.exprs()?;
|
||||
self.eat_whitespace();
|
||||
self.close_token(ret, Token::CloseParenthesis(DUMMY))?
|
||||
}
|
||||
_ => {
|
||||
trace!("\t-exprs - else");
|
||||
self.expr()?
|
||||
}
|
||||
};
|
||||
self.eat_whitespace();
|
||||
self.condition_expr(node)
|
||||
}
|
||||
|
||||
fn condition_expr(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#condition_expr");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::And(_)) => {
|
||||
self.eat_token();
|
||||
Ok(Node {
|
||||
token: ParseToken::Filter(FilterToken::And),
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.exprs()?)),
|
||||
})
|
||||
}
|
||||
Ok(Token::Or(_)) => {
|
||||
self.eat_token();
|
||||
Ok(Node {
|
||||
token: ParseToken::Filter(FilterToken::Or),
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.exprs()?)),
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
Ok(prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expr(&mut self) -> ParseResult<Node> {
|
||||
debug!("#expr");
|
||||
|
||||
let has_prop_candidate = match self.tokenizer.peek_token() {
|
||||
Ok(Token::At(_)) => true,
|
||||
_ => false
|
||||
};
|
||||
|
||||
let node = self.term()?;
|
||||
self.eat_whitespace();
|
||||
|
||||
if match self.tokenizer.peek_token() {
|
||||
Ok(Token::Equal(_))
|
||||
| Ok(Token::NotEqual(_))
|
||||
| Ok(Token::Little(_))
|
||||
| Ok(Token::LittleOrEqual(_))
|
||||
| Ok(Token::Greater(_))
|
||||
| Ok(Token::GreaterOrEqual(_)) => true,
|
||||
_ => false
|
||||
} {
|
||||
self.op(node)
|
||||
} else if has_prop_candidate {
|
||||
Ok(node)
|
||||
} else {
|
||||
return Err(self.tokenizer.err_msg());
|
||||
}
|
||||
}
|
||||
|
||||
fn term_num(&mut self) -> ParseResult<Node> {
|
||||
debug!("#term_num");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, val)) => {
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => {
|
||||
self.term_num_float(val.as_str())
|
||||
}
|
||||
_ => {
|
||||
let number = utils::string_to_f64(&val, || self.tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(self.node(ParseToken::Number(number)))
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
Ok(self.node(ParseToken::Eof))
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn term_num_float(&mut self, mut num: &str) -> ParseResult<Node> {
|
||||
debug!("#term_num_float");
|
||||
self.eat_token();
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, frac)) => {
|
||||
let mut f = String::new();
|
||||
f.push_str(&mut num);
|
||||
f.push('.');
|
||||
f.push_str(frac.as_str());
|
||||
let number = utils::string_to_f64(&f, || self.tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(self.node(ParseToken::Number(number)))
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn term(&mut self) -> ParseResult<Node> {
|
||||
debug!("#term");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::At(_)) => {
|
||||
self.eat_token();
|
||||
let node = self.node(ParseToken::Relative);
|
||||
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Whitespace(_, _)) => {
|
||||
self.eat_whitespace();
|
||||
Ok(node)
|
||||
}
|
||||
_ => {
|
||||
self.paths(node)
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Token::Absolute(_)) => {
|
||||
self.json_path()
|
||||
}
|
||||
Ok(Token::DoubleQuoted(_, _))
|
||||
| Ok(Token::SingleQuoted(_, _)) => {
|
||||
self.array_quota_value()
|
||||
}
|
||||
Ok(Token::Key(_, _)) => {
|
||||
self.term_num()
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn op(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#op");
|
||||
let token = match self.tokenizer.next_token() {
|
||||
Ok(Token::Equal(_)) => {
|
||||
ParseToken::Filter(FilterToken::Equal)
|
||||
}
|
||||
Ok(Token::NotEqual(_)) => {
|
||||
ParseToken::Filter(FilterToken::NotEqual)
|
||||
}
|
||||
Ok(Token::Little(_)) => {
|
||||
ParseToken::Filter(FilterToken::Little)
|
||||
}
|
||||
Ok(Token::LittleOrEqual(_)) => {
|
||||
ParseToken::Filter(FilterToken::LittleOrEqual)
|
||||
}
|
||||
Ok(Token::Greater(_)) => {
|
||||
ParseToken::Filter(FilterToken::Greater)
|
||||
}
|
||||
Ok(Token::GreaterOrEqual(_)) => {
|
||||
ParseToken::Filter(FilterToken::GreaterOrEqual)
|
||||
}
|
||||
Err(TokenError::Eof) => {
|
||||
ParseToken::Eof
|
||||
}
|
||||
_ => {
|
||||
return Err(self.tokenizer.err_msg());
|
||||
}
|
||||
};
|
||||
|
||||
self.eat_whitespace();
|
||||
|
||||
Ok(Node {
|
||||
token,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(self.term()?)),
|
||||
})
|
||||
}
|
||||
|
||||
fn eat_whitespace(&mut self) {
|
||||
while let Ok(Token::Whitespace(_, _)) = self.tokenizer.peek_token() {
|
||||
let _ = self.tokenizer.next_token();
|
||||
}
|
||||
}
|
||||
|
||||
fn eat_token(&mut self) {
|
||||
let _ = self.tokenizer.next_token();
|
||||
}
|
||||
|
||||
fn node(&mut self, token: ParseToken) -> Node {
|
||||
Node { left: None, right: None, token: token }
|
||||
}
|
||||
|
||||
fn close_token(&mut self, ret: Node, token: Token) -> ParseResult<Node> {
|
||||
debug!("#close_token");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(ref t) if t.partial_eq(token) => {
|
||||
Ok(ret)
|
||||
}
|
||||
_ => {
|
||||
Err(self.tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait NodeVisitor {
|
||||
fn visit(&mut self, node: Node) {
|
||||
match node.token {
|
||||
ParseToken::Absolute
|
||||
| ParseToken::Relative
|
||||
| ParseToken::All
|
||||
| ParseToken::Key(_) => {
|
||||
self.visit_token(node.token);
|
||||
}
|
||||
ParseToken::In
|
||||
| ParseToken::Leaves => {
|
||||
node.left.map(|n| self.visit(*n));
|
||||
self.visit_token(node.token);
|
||||
node.right.map(|n| self.visit(*n));
|
||||
}
|
||||
| ParseToken::Range(_, _)
|
||||
| ParseToken::Union(_)
|
||||
| ParseToken::Number(_) => {
|
||||
self.visit_token(node.token);
|
||||
}
|
||||
|
||||
| ParseToken::Array => {
|
||||
node.left.map(|n| self.visit(*n));
|
||||
self.visit_token(node.token);
|
||||
node.right.map(|n| self.visit(*n));
|
||||
self.visit_token(ParseToken::ArrayEof);
|
||||
}
|
||||
ParseToken::Filter(FilterToken::And)
|
||||
| ParseToken::Filter(FilterToken::Or) => {
|
||||
node.left.map(|n| self.visit(*n));
|
||||
node.right.map(|n| self.visit(*n));
|
||||
self.visit_token(node.token);
|
||||
}
|
||||
ParseToken::Filter(_) => {
|
||||
node.left.map(|n| self.visit(*n));
|
||||
self.end_term();
|
||||
node.right.map(|n| self.visit(*n));
|
||||
self.end_term();
|
||||
self.visit_token(node.token);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_token(&mut self, token: ParseToken);
|
||||
fn end_term(&mut self) {}
|
||||
}
|
@ -12,10 +12,7 @@ pub struct PathReader<'a> {
|
||||
|
||||
impl<'a> PathReader<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
PathReader {
|
||||
input,
|
||||
pos: 0,
|
||||
}
|
||||
PathReader { input, pos: 0 }
|
||||
}
|
||||
|
||||
pub fn peek_char(&self) -> Result<(usize, char), ReaderError> {
|
||||
@ -24,8 +21,8 @@ impl<'a> PathReader<'a> {
|
||||
}
|
||||
|
||||
pub fn take_while<F>(&mut self, fun: F) -> Result<(usize, String), ReaderError>
|
||||
where
|
||||
F: Fn(&char) -> bool
|
||||
where
|
||||
F: Fn(&char) -> bool,
|
||||
{
|
||||
let mut char_len: usize = 0;
|
||||
let mut ret = String::new();
|
||||
@ -53,4 +50,4 @@ impl<'a> PathReader<'a> {
|
||||
pub fn current_pos(&self) -> usize {
|
||||
self.pos
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,32 +1,7 @@
|
||||
use std::io::Write;
|
||||
use std::result::Result;
|
||||
|
||||
use super::path_reader::{PathReader, ReaderError};
|
||||
|
||||
const ABSOLUTE: &'static str = "$";
|
||||
const DOT: &'static str = ".";
|
||||
const AT: &'static str = "@";
|
||||
const OPEN_ARRAY: &'static str = "[";
|
||||
const CLOSE_ARRAY: &'static str = "]";
|
||||
const ASTERISK: &'static str = "*";
|
||||
const QUESTION: &'static str = "?";
|
||||
const COMMA: &'static str = ",";
|
||||
const SPLIT: &'static str = ":";
|
||||
const OPEN_PARENTHESIS: &'static str = "(";
|
||||
const CLOSE_PARENTHESIS: &'static str = ")";
|
||||
const KEY: &'static str = "Key";
|
||||
const DOUBLE_QUOTA: &'static str = "\"";
|
||||
const SINGLE_QUOTA: &'static str = "'";
|
||||
const EQUAL: &'static str = "==";
|
||||
const GREATER_OR_EQUAL: &'static str = ">=";
|
||||
const GREATER: &'static str = ">";
|
||||
const LITTLE: &'static str = "<";
|
||||
const LITTLE_OR_EQUAL: &'static str = "<=";
|
||||
const NOT_EQUAL: &'static str = "!=";
|
||||
const AND: &'static str = "&&";
|
||||
const OR: &'static str = "||";
|
||||
const WHITESPACE: &'static str = " ";
|
||||
|
||||
const CH_DOLLA: char = '$';
|
||||
const CH_DOT: char = '.';
|
||||
const CH_ASTERISK: char = '*';
|
||||
@ -44,8 +19,8 @@ const CH_PIPE: char = '|';
|
||||
const CH_LITTLE: char = '<';
|
||||
const CH_GREATER: char = '>';
|
||||
const CH_EXCLAMATION: char = '!';
|
||||
const CH_SINGLE_QUOTA: char = '\'';
|
||||
const CH_DOUBLE_QUOTA: char = '"';
|
||||
const CH_SINGLE_QUOTE: char = '\'';
|
||||
const CH_DOUBLE_QUOTE: char = '"';
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum TokenError {
|
||||
@ -55,7 +30,7 @@ pub enum TokenError {
|
||||
|
||||
fn to_token_error(read_err: ReaderError) -> TokenError {
|
||||
match read_err {
|
||||
ReaderError::Eof => TokenError::Eof
|
||||
ReaderError::Eof => TokenError::Eof,
|
||||
}
|
||||
}
|
||||
|
||||
@ -87,76 +62,177 @@ pub enum Token {
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn partial_eq(&self, other: Token) -> bool {
|
||||
self.to_simple() == other.to_simple()
|
||||
}
|
||||
|
||||
fn to_simple(&self) -> &'static str {
|
||||
pub fn is_match_token_type(&self, other: Token) -> bool {
|
||||
match self {
|
||||
Token::Absolute(_) => ABSOLUTE,
|
||||
Token::Dot(_) => DOT,
|
||||
Token::At(_) => AT,
|
||||
Token::OpenArray(_) => OPEN_ARRAY,
|
||||
Token::CloseArray(_) => CLOSE_ARRAY,
|
||||
Token::Asterisk(_) => ASTERISK,
|
||||
Token::Question(_) => QUESTION,
|
||||
Token::Comma(_) => COMMA,
|
||||
Token::Split(_) => SPLIT,
|
||||
Token::OpenParenthesis(_) => OPEN_PARENTHESIS,
|
||||
Token::CloseParenthesis(_) => CLOSE_PARENTHESIS,
|
||||
Token::Key(_, _) => KEY,
|
||||
Token::DoubleQuoted(_, _) => DOUBLE_QUOTA,
|
||||
Token::SingleQuoted(_, _) => SINGLE_QUOTA,
|
||||
Token::Equal(_) => EQUAL,
|
||||
Token::GreaterOrEqual(_) => GREATER_OR_EQUAL,
|
||||
Token::Greater(_) => GREATER,
|
||||
Token::Little(_) => LITTLE,
|
||||
Token::LittleOrEqual(_) => LITTLE_OR_EQUAL,
|
||||
Token::NotEqual(_) => NOT_EQUAL,
|
||||
Token::And(_) => AND,
|
||||
Token::Or(_) => OR,
|
||||
Token::Whitespace(_, _) => WHITESPACE
|
||||
Token::Absolute(_) => match other {
|
||||
Token::Absolute(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Dot(_) => match other {
|
||||
Token::Dot(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::At(_) => match other {
|
||||
Token::At(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::OpenArray(_) => match other {
|
||||
Token::OpenArray(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::CloseArray(_) => match other {
|
||||
Token::CloseArray(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Asterisk(_) => match other {
|
||||
Token::Asterisk(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Question(_) => match other {
|
||||
Token::Question(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Comma(_) => match other {
|
||||
Token::Comma(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Split(_) => match other {
|
||||
Token::Split(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::OpenParenthesis(_) => match other {
|
||||
Token::OpenParenthesis(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::CloseParenthesis(_) => match other {
|
||||
Token::CloseParenthesis(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Key(_, _) => match other {
|
||||
Token::Key(_, _) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::DoubleQuoted(_, _) => match other {
|
||||
Token::DoubleQuoted(_, _) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::SingleQuoted(_, _) => match other {
|
||||
Token::SingleQuoted(_, _) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Equal(_) => match other {
|
||||
Token::Equal(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::GreaterOrEqual(_) => match other {
|
||||
Token::GreaterOrEqual(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Greater(_) => match other {
|
||||
Token::Greater(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Little(_) => match other {
|
||||
Token::Little(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::LittleOrEqual(_) => match other {
|
||||
Token::LittleOrEqual(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::NotEqual(_) => match other {
|
||||
Token::NotEqual(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::And(_) => match other {
|
||||
Token::And(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Or(_) => match other {
|
||||
Token::Or(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Whitespace(_, _) => match other {
|
||||
Token::Whitespace(_, _) => true,
|
||||
_ => false
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn simple_matched_token(ch: char, pos: usize) -> Option<Token> {
|
||||
match ch {
|
||||
CH_DOLLA => Some(Token::Absolute(pos)),
|
||||
CH_DOT => Some(Token::Dot(pos)),
|
||||
CH_ASTERISK => Some(Token::Asterisk(pos)),
|
||||
CH_LARRAY => Some(Token::OpenArray(pos)),
|
||||
CH_RARRAY => Some(Token::CloseArray(pos)),
|
||||
CH_LPAREN => Some(Token::OpenParenthesis(pos)),
|
||||
CH_RPAREN => Some(Token::CloseParenthesis(pos)),
|
||||
CH_AT => Some(Token::At(pos)),
|
||||
CH_QUESTION => Some(Token::Question(pos)),
|
||||
CH_COMMA => Some(Token::Comma(pos)),
|
||||
CH_SEMICOLON => Some(Token::Split(pos)),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Tokenizer<'a> {
|
||||
input: PathReader<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Tokenizer<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
trace!("input: {}", input);
|
||||
Tokenizer {
|
||||
input: PathReader::new(input),
|
||||
}
|
||||
}
|
||||
|
||||
fn single_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
fn dolla(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let fun = |c: &char| match c {
|
||||
&CH_DOT
|
||||
| &CH_ASTERISK
|
||||
| &CH_LARRAY
|
||||
| &CH_RARRAY
|
||||
| &CH_LPAREN
|
||||
| &CH_RPAREN
|
||||
| &CH_AT
|
||||
| &CH_QUESTION
|
||||
| &CH_COMMA
|
||||
| &CH_SEMICOLON
|
||||
| &CH_LITTLE
|
||||
| &CH_GREATER
|
||||
| &CH_EQUAL
|
||||
| &CH_AMPERSAND
|
||||
| &CH_PIPE
|
||||
| &CH_EXCLAMATION
|
||||
=> false,
|
||||
_ => !c.is_whitespace(),
|
||||
};
|
||||
let (_, mut vec) = self.input.take_while(fun).map_err(to_token_error)?;
|
||||
vec.insert(0, ch);
|
||||
|
||||
if vec.len() == 1 {
|
||||
Ok(Token::Absolute(pos))
|
||||
} else {
|
||||
Ok(Token::Key(pos, vec))
|
||||
}
|
||||
}
|
||||
|
||||
fn quote(&mut self, ch: char) -> Result<String, TokenError> {
|
||||
let (_, mut val) = self
|
||||
.input
|
||||
.take_while(|c| *c != ch)
|
||||
.map_err(to_token_error)?;
|
||||
|
||||
if let Some('\\') = val.chars().last() {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
let _ = val.pop();
|
||||
let (_, val_remain) = self
|
||||
.input
|
||||
.take_while(|c| *c != ch)
|
||||
.map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
val.push(ch);
|
||||
val.push_str(val_remain.as_str());
|
||||
} else {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
}
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
|
||||
fn single_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let val = self.quote(ch)?;
|
||||
Ok(Token::SingleQuoted(pos, val))
|
||||
}
|
||||
|
||||
fn double_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
fn double_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let val = self.quote(ch)?;
|
||||
Ok(Token::DoubleQuoted(pos, val))
|
||||
}
|
||||
|
||||
@ -167,7 +243,7 @@ impl<'a> Tokenizer<'a> {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
Ok(Token::Equal(pos))
|
||||
}
|
||||
_ => Err(TokenError::Position(pos))
|
||||
_ => Err(TokenError::Position(pos)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -178,7 +254,7 @@ impl<'a> Tokenizer<'a> {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
Ok(Token::NotEqual(pos))
|
||||
}
|
||||
_ => Err(TokenError::Position(pos))
|
||||
_ => Err(TokenError::Position(pos)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -211,7 +287,7 @@ impl<'a> Tokenizer<'a> {
|
||||
let _ = self.input.next_char().map_err(to_token_error);
|
||||
Ok(Token::And(pos))
|
||||
}
|
||||
_ => Err(TokenError::Position(pos))
|
||||
_ => Err(TokenError::Position(pos)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -222,27 +298,39 @@ impl<'a> Tokenizer<'a> {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
Ok(Token::Or(pos))
|
||||
}
|
||||
_ => Err(TokenError::Position(pos))
|
||||
_ => Err(TokenError::Position(pos)),
|
||||
}
|
||||
}
|
||||
|
||||
fn whitespace(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||
let (_, vec) = self.input.take_while(|c| c.is_whitespace()).map_err(to_token_error)?;
|
||||
let (_, vec) = self
|
||||
.input
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.map_err(to_token_error)?;
|
||||
Ok(Token::Whitespace(pos, vec.len()))
|
||||
}
|
||||
|
||||
fn other(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let fun = |c: &char| {
|
||||
match simple_matched_token(*c, pos) {
|
||||
Some(_) => false,
|
||||
_ if c == &CH_LITTLE
|
||||
|| c == &CH_GREATER
|
||||
|| c == &CH_EQUAL
|
||||
|| c == &CH_AMPERSAND
|
||||
|| c == &CH_PIPE
|
||||
|| c == &CH_EXCLAMATION => false,
|
||||
_ => !c.is_whitespace()
|
||||
}
|
||||
let fun = |c: &char| match c {
|
||||
&CH_DOLLA
|
||||
| &CH_DOT
|
||||
| &CH_ASTERISK
|
||||
| &CH_LARRAY
|
||||
| &CH_RARRAY
|
||||
| &CH_LPAREN
|
||||
| &CH_RPAREN
|
||||
| &CH_AT
|
||||
| &CH_QUESTION
|
||||
| &CH_COMMA
|
||||
| &CH_SEMICOLON
|
||||
| &CH_LITTLE
|
||||
| &CH_GREATER
|
||||
| &CH_EQUAL
|
||||
| &CH_AMPERSAND
|
||||
| &CH_PIPE
|
||||
| &CH_EXCLAMATION
|
||||
=> false,
|
||||
_ => !c.is_whitespace(),
|
||||
};
|
||||
let (_, mut vec) = self.input.take_while(fun).map_err(to_token_error)?;
|
||||
vec.insert(0, ch);
|
||||
@ -251,22 +339,28 @@ impl<'a> Tokenizer<'a> {
|
||||
|
||||
pub fn next_token(&mut self) -> Result<Token, TokenError> {
|
||||
let (pos, ch) = self.input.next_char().map_err(to_token_error)?;
|
||||
match simple_matched_token(ch, pos) {
|
||||
Some(t) => Ok(t),
|
||||
None => {
|
||||
match ch {
|
||||
CH_SINGLE_QUOTA => self.single_quota(pos, ch),
|
||||
CH_DOUBLE_QUOTA => self.double_quota(pos, ch),
|
||||
CH_EQUAL => self.equal(pos, ch),
|
||||
CH_GREATER => self.greater(pos, ch),
|
||||
CH_LITTLE => self.little(pos, ch),
|
||||
CH_AMPERSAND => self.and(pos, ch),
|
||||
CH_PIPE => self.or(pos, ch),
|
||||
CH_EXCLAMATION => self.not_equal(pos, ch),
|
||||
_ if ch.is_whitespace() => self.whitespace(pos, ch),
|
||||
_ => self.other(pos, ch),
|
||||
}
|
||||
}
|
||||
match ch {
|
||||
CH_DOLLA => self.dolla(pos, ch),
|
||||
CH_DOT => Ok(Token::Dot(pos)),
|
||||
CH_ASTERISK => Ok(Token::Asterisk(pos)),
|
||||
CH_LARRAY => Ok(Token::OpenArray(pos)),
|
||||
CH_RARRAY => Ok(Token::CloseArray(pos)),
|
||||
CH_LPAREN => Ok(Token::OpenParenthesis(pos)),
|
||||
CH_RPAREN => Ok(Token::CloseParenthesis(pos)),
|
||||
CH_AT => Ok(Token::At(pos)),
|
||||
CH_QUESTION => Ok(Token::Question(pos)),
|
||||
CH_COMMA => Ok(Token::Comma(pos)),
|
||||
CH_SEMICOLON => Ok(Token::Split(pos)),
|
||||
CH_SINGLE_QUOTE => self.single_quote(pos, ch),
|
||||
CH_DOUBLE_QUOTE => self.double_quote(pos, ch),
|
||||
CH_EQUAL => self.equal(pos, ch),
|
||||
CH_GREATER => self.greater(pos, ch),
|
||||
CH_LITTLE => self.little(pos, ch),
|
||||
CH_AMPERSAND => self.and(pos, ch),
|
||||
CH_PIPE => self.or(pos, ch),
|
||||
CH_EXCLAMATION => self.not_equal(pos, ch),
|
||||
_ if ch.is_whitespace() => self.whitespace(pos, ch),
|
||||
_ => self.other(pos, ch),
|
||||
}
|
||||
}
|
||||
|
||||
@ -275,7 +369,7 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PreloadedTokenizer<'a> {
|
||||
pub struct TokenReader<'a> {
|
||||
origin_input: &'a str,
|
||||
err: TokenError,
|
||||
err_pos: usize,
|
||||
@ -283,7 +377,7 @@ pub struct PreloadedTokenizer<'a> {
|
||||
curr_pos: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'a> PreloadedTokenizer<'a> {
|
||||
impl<'a> TokenReader<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
let mut tokenizer = Tokenizer::new(input);
|
||||
let mut tokens = vec![];
|
||||
@ -293,8 +387,8 @@ impl<'a> PreloadedTokenizer<'a> {
|
||||
tokens.insert(0, (tokenizer.current_pos(), t));
|
||||
}
|
||||
Err(e) => {
|
||||
return PreloadedTokenizer {
|
||||
origin_input: input.clone(),
|
||||
return TokenReader {
|
||||
origin_input: input,
|
||||
err: e,
|
||||
err_pos: tokenizer.current_pos(),
|
||||
tokens,
|
||||
@ -333,23 +427,13 @@ impl<'a> PreloadedTokenizer<'a> {
|
||||
}
|
||||
|
||||
pub fn err_msg_with_pos(&self, pos: usize) -> String {
|
||||
let mut w = Vec::new();
|
||||
writeln!(&mut w, "{}", self.origin_input).unwrap();
|
||||
writeln!(&mut w, "{}", "^".repeat(pos)).unwrap();
|
||||
match std::str::from_utf8(&w[..]) {
|
||||
Ok(s) => s.to_owned(),
|
||||
Err(_) => panic!("Invalid UTF-8")
|
||||
}
|
||||
format!("{}\n{}", self.origin_input, "^".repeat(pos))
|
||||
}
|
||||
|
||||
pub fn err_msg(&self) -> String {
|
||||
match self.curr_pos {
|
||||
Some(pos) => {
|
||||
self.err_msg_with_pos(pos)
|
||||
}
|
||||
_ => {
|
||||
self.err_msg_with_pos(self.err_pos)
|
||||
}
|
||||
Some(pos) => self.err_msg_with_pos(pos),
|
||||
_ => self.err_msg_with_pos(self.err_pos),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
1058
src/ref_value/de.rs
1058
src/ref_value/de.rs
File diff suppressed because it is too large
Load Diff
@ -1,4 +0,0 @@
|
||||
pub mod model;
|
||||
pub mod de;
|
||||
pub mod ser;
|
||||
pub mod serde_error;
|
@ -1,264 +0,0 @@
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use indexmap::map::IndexMap;
|
||||
use serde::ser::Serialize;
|
||||
use serde_json::{Number, Value};
|
||||
|
||||
type TypeRefValue = Arc<Box<RefValue>>;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct RefValueWrapper {
|
||||
data: TypeRefValue
|
||||
}
|
||||
|
||||
impl Eq for RefValueWrapper {}
|
||||
|
||||
impl Deref for RefValueWrapper {
|
||||
type Target = RefValue;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&(**self.data)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for RefValueWrapper {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.deref().hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for RefValueWrapper {
|
||||
fn clone(&self) -> Self {
|
||||
RefValueWrapper {
|
||||
data: self.data.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
///
|
||||
/// serde_json::Value 참고
|
||||
///
|
||||
pub trait RefIndex {
|
||||
fn index_into<'v>(&self, v: &'v RefValue) -> Option<&'v RefValueWrapper>;
|
||||
fn index_into_mut<'v>(&self, v: &'v mut RefValue) -> Option<&'v mut RefValueWrapper>;
|
||||
fn index_or_insert<'v>(&self, v: &'v mut RefValue) -> &'v mut RefValueWrapper;
|
||||
}
|
||||
|
||||
impl RefIndex for usize {
|
||||
fn index_into<'v>(&self, v: &'v RefValue) -> Option<&'v RefValueWrapper> {
|
||||
match *v {
|
||||
RefValue::Array(ref vec) => vec.get(*self),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn index_into_mut<'v>(&self, v: &'v mut RefValue) -> Option<&'v mut RefValueWrapper> {
|
||||
match *v {
|
||||
RefValue::Array(ref mut vec) => vec.get_mut(*self),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn index_or_insert<'v>(&self, v: &'v mut RefValue) -> &'v mut RefValueWrapper {
|
||||
match *v {
|
||||
RefValue::Array(ref mut vec) => {
|
||||
let len = vec.len();
|
||||
vec.get_mut(*self).unwrap_or_else(|| {
|
||||
panic!(
|
||||
"cannot access index {} of JSON array of length {}",
|
||||
self, len
|
||||
)
|
||||
})
|
||||
}
|
||||
_ => panic!("cannot access index {} of JSON {:?}", self, v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RefIndex for str {
|
||||
fn index_into<'v>(&self, v: &'v RefValue) -> Option<&'v RefValueWrapper> {
|
||||
match *v {
|
||||
RefValue::Object(ref map) => map.get(self),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn index_into_mut<'v>(&self, v: &'v mut RefValue) -> Option<&'v mut RefValueWrapper> {
|
||||
match *v {
|
||||
RefValue::Object(ref mut map) => map.get_mut(self),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn index_or_insert<'v>(&self, v: &'v mut RefValue) -> &'v mut RefValueWrapper {
|
||||
if let RefValue::Null = *v {
|
||||
*v = RefValue::Object(IndexMap::new());
|
||||
}
|
||||
match *v {
|
||||
RefValue::Object(ref mut map) => {
|
||||
map.entry(self.to_owned()).or_insert(RefValue::Null.into())
|
||||
}
|
||||
_ => panic!("cannot access key {:?} in JSON {:?}", self, v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RefIndex for String {
|
||||
fn index_into<'v>(&self, v: &'v RefValue) -> Option<&'v RefValueWrapper> {
|
||||
self[..].index_into(v)
|
||||
}
|
||||
fn index_into_mut<'v>(&self, v: &'v mut RefValue) -> Option<&'v mut RefValueWrapper> {
|
||||
self[..].index_into_mut(v)
|
||||
}
|
||||
fn index_or_insert<'v>(&self, v: &'v mut RefValue) -> &'v mut RefValueWrapper {
|
||||
self[..].index_or_insert(v)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum RefValue {
|
||||
Null,
|
||||
Bool(bool),
|
||||
Number(Number),
|
||||
String(String),
|
||||
Array(Vec<RefValueWrapper>),
|
||||
Object(IndexMap<String, RefValueWrapper>),
|
||||
}
|
||||
|
||||
static REF_VALUE_NULL: &'static str = "$jsonpath::ref_value::model::RefValue::Null";
|
||||
|
||||
impl Hash for RefValue {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
match self {
|
||||
RefValue::Null => {
|
||||
REF_VALUE_NULL.hash(state)
|
||||
}
|
||||
RefValue::Bool(b) => {
|
||||
b.hash(state)
|
||||
}
|
||||
RefValue::Number(n) => {
|
||||
if n.is_f64() {
|
||||
n.as_f64().unwrap().to_string().hash(state)
|
||||
} else if n.is_i64() {
|
||||
n.as_i64().unwrap().hash(state);
|
||||
} else {
|
||||
n.as_u64().unwrap().hash(state);
|
||||
}
|
||||
}
|
||||
RefValue::String(s) => {
|
||||
s.hash(state)
|
||||
}
|
||||
RefValue::Object(map) => {
|
||||
for (_, v) in map {
|
||||
v.hash(state);
|
||||
}
|
||||
}
|
||||
RefValue::Array(v) => {
|
||||
for i in v {
|
||||
i.hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RefValue {}
|
||||
|
||||
impl RefValue {
|
||||
pub fn get<I: RefIndex>(&self, index: I) -> Option<&RefValueWrapper> {
|
||||
index.index_into(self)
|
||||
}
|
||||
|
||||
pub fn is_object(&self) -> bool {
|
||||
self.as_object().is_some()
|
||||
}
|
||||
|
||||
pub fn as_object(&self) -> Option<&IndexMap<String, RefValueWrapper>> {
|
||||
match *self {
|
||||
RefValue::Object(ref map) => Some(map),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_array(&self) -> bool {
|
||||
self.as_array().is_some()
|
||||
}
|
||||
|
||||
pub fn as_array(&self) -> Option<&Vec<RefValueWrapper>> {
|
||||
match *self {
|
||||
RefValue::Array(ref array) => Some(&*array),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
self.as_str().is_some()
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> Option<&str> {
|
||||
match *self {
|
||||
RefValue::String(ref s) => Some(s),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_number(&self) -> bool {
|
||||
match *self {
|
||||
RefValue::Number(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_number(&self) -> Option<Number> {
|
||||
match *self {
|
||||
RefValue::Number(ref n) => Some(n.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_boolean(&self) -> bool {
|
||||
self.as_bool().is_some()
|
||||
}
|
||||
|
||||
pub fn as_bool(&self) -> Option<bool> {
|
||||
match *self {
|
||||
RefValue::Bool(b) => Some(b),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_null(&self) -> bool {
|
||||
self.as_null().is_some()
|
||||
}
|
||||
|
||||
pub fn as_null(&self) -> Option<()> {
|
||||
match *self {
|
||||
RefValue::Null => Some(()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<RefValueWrapper> for RefValue {
|
||||
fn into(self) -> RefValueWrapper {
|
||||
RefValueWrapper {
|
||||
data: Arc::new(Box::new(self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<RefValueWrapper> for &Value {
|
||||
fn into(self) -> RefValueWrapper {
|
||||
match self.serialize(super::ser::Serializer) {
|
||||
Ok(v) => v.into(),
|
||||
Err(e) => panic!("Error Value into RefValue: {:?}", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Value> for &RefValueWrapper {
|
||||
fn into(self) -> Value {
|
||||
match serde_json::to_value(self.deref()) {
|
||||
Ok(v) => v,
|
||||
Err(e) => panic!("Error RefValueWrapper into Value: {:?}", e)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,624 +0,0 @@
|
||||
use std::result::Result;
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use serde::{self, Serialize};
|
||||
use serde::ser::Impossible;
|
||||
|
||||
use ref_value::model::{RefValue, RefValueWrapper};
|
||||
|
||||
use super::serde_error::SerdeError;
|
||||
|
||||
///
|
||||
/// see `serde_json/value/ser.rs`
|
||||
///
|
||||
impl Serialize for RefValue {
|
||||
#[inline]
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: ::serde::Serializer,
|
||||
{
|
||||
match *self {
|
||||
RefValue::Null => serializer.serialize_unit(),
|
||||
RefValue::Bool(b) => serializer.serialize_bool(b),
|
||||
RefValue::Number(ref n) => n.serialize(serializer),
|
||||
RefValue::String(ref s) => serializer.serialize_str(s),
|
||||
RefValue::Array(ref v) => {
|
||||
use std::ops::Deref;
|
||||
let v: Vec<&RefValue> = v.iter().map(|v| v.deref()).collect();
|
||||
v.serialize(serializer)
|
||||
}
|
||||
RefValue::Object(ref m) => {
|
||||
use serde::ser::SerializeMap;
|
||||
use std::ops::Deref;
|
||||
let mut map = try!(serializer.serialize_map(Some(m.len())));
|
||||
for (k, v) in m {
|
||||
try!(map.serialize_key(k));
|
||||
try!(map.serialize_value(v.deref()));
|
||||
}
|
||||
map.end()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Serializer;
|
||||
|
||||
impl serde::Serializer for Serializer {
|
||||
type Ok = RefValue;
|
||||
type Error = SerdeError;
|
||||
|
||||
type SerializeSeq = SerializeVec;
|
||||
type SerializeTuple = SerializeVec;
|
||||
type SerializeTupleStruct = SerializeVec;
|
||||
type SerializeTupleVariant = SerializeTupleVariant;
|
||||
type SerializeMap = SerializeMap;
|
||||
type SerializeStruct = SerializeMap;
|
||||
type SerializeStructVariant = SerializeStructVariant;
|
||||
|
||||
#[inline]
|
||||
fn serialize_bool(self, value: bool) -> Result<RefValue, Self::Error> {
|
||||
Ok(RefValue::Bool(value))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_i8(self, value: i8) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_i64(value as i64)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_i16(self, value: i16) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_i64(value as i64)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_i32(self, value: i32) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_i64(value as i64)
|
||||
}
|
||||
|
||||
fn serialize_i64(self, value: i64) -> Result<RefValue, Self::Error> {
|
||||
Ok(RefValue::Number(value.into()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_u8(self, value: u8) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_u64(value as u64)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_u16(self, value: u16) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_u64(value as u64)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_u32(self, value: u32) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_u64(value as u64)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_u64(self, value: u64) -> Result<RefValue, Self::Error> {
|
||||
Ok(RefValue::Number(value.into()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_f32(self, value: f32) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_f64(value as f64)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_f64(self, value: f64) -> Result<RefValue, Self::Error> {
|
||||
Ok(serde_json::Number::from_f64(value).map_or(RefValue::Null, RefValue::Number))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_char(self, value: char) -> Result<RefValue, Self::Error> {
|
||||
let mut s = String::new();
|
||||
s.push(value);
|
||||
self.serialize_str(&s)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_str(self, value: &str) -> Result<RefValue, Self::Error> {
|
||||
Ok(RefValue::String(value.to_owned()))
|
||||
}
|
||||
|
||||
fn serialize_bytes(self, value: &[u8]) -> Result<RefValue, Self::Error> {
|
||||
let vec = value.iter().map(|&b| RefValue::Number(b.into()).into()).collect();
|
||||
Ok(RefValue::Array(vec))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_unit(self) -> Result<RefValue, Self::Error> {
|
||||
Ok(RefValue::Null)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_unit_struct(self, _name: &'static str) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_unit()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_unit_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
variant: &'static str,
|
||||
) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_str(variant)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_newtype_struct<T: ?Sized>(
|
||||
self,
|
||||
_name: &'static str,
|
||||
value: &T,
|
||||
) -> Result<RefValue, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
value.serialize(self)
|
||||
}
|
||||
|
||||
fn serialize_newtype_variant<T: ?Sized>(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
variant: &'static str,
|
||||
value: &T,
|
||||
) -> Result<RefValue, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let mut values: IndexMap<String, RefValueWrapper> = IndexMap::new();
|
||||
values.insert(String::from(variant), {
|
||||
value.serialize(Serializer)?.into()
|
||||
});
|
||||
Ok(RefValue::Object(values))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_none(self) -> Result<RefValue, Self::Error> {
|
||||
self.serialize_unit()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_some<T: ?Sized>(self, value: &T) -> Result<RefValue, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
value.serialize(self)
|
||||
}
|
||||
|
||||
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
|
||||
Ok(SerializeVec {
|
||||
vec: Vec::with_capacity(len.unwrap_or(0)),
|
||||
})
|
||||
}
|
||||
|
||||
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
|
||||
self.serialize_seq(Some(len))
|
||||
}
|
||||
|
||||
fn serialize_tuple_struct(
|
||||
self,
|
||||
_name: &'static str,
|
||||
len: usize,
|
||||
) -> Result<Self::SerializeTupleStruct, Self::Error> {
|
||||
self.serialize_seq(Some(len))
|
||||
}
|
||||
|
||||
fn serialize_tuple_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
variant: &'static str,
|
||||
len: usize,
|
||||
) -> Result<Self::SerializeTupleVariant, Self::Error> {
|
||||
Ok(SerializeTupleVariant {
|
||||
name: String::from(variant),
|
||||
vec: Vec::with_capacity(len),
|
||||
})
|
||||
}
|
||||
|
||||
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
|
||||
Ok(SerializeMap::Map {
|
||||
map: IndexMap::new(),
|
||||
next_key: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn serialize_struct(
|
||||
self,
|
||||
name: &'static str,
|
||||
len: usize,
|
||||
) -> Result<Self::SerializeStruct, Self::Error> {
|
||||
match name {
|
||||
_ => self.serialize_map(Some(len)),
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_struct_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
variant: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeStructVariant, Self::Error> {
|
||||
Ok(SerializeStructVariant {
|
||||
name: String::from(variant),
|
||||
map: IndexMap::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SerializeVec {
|
||||
vec: Vec<RefValueWrapper>,
|
||||
}
|
||||
|
||||
pub struct SerializeTupleVariant {
|
||||
name: String,
|
||||
vec: Vec<RefValueWrapper>,
|
||||
}
|
||||
|
||||
pub enum SerializeMap {
|
||||
Map {
|
||||
map: IndexMap<String, RefValueWrapper>,
|
||||
next_key: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct SerializeStructVariant {
|
||||
name: String,
|
||||
map: IndexMap<String, RefValueWrapper>,
|
||||
}
|
||||
|
||||
impl serde::ser::SerializeSeq for SerializeVec {
|
||||
type Ok = RefValue;
|
||||
type Error = SerdeError;
|
||||
|
||||
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
self.vec.push({
|
||||
value.serialize(Serializer)?.into()
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<RefValue, Self::Error> {
|
||||
Ok(RefValue::Array(self.vec))
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::SerializeTuple for SerializeVec {
|
||||
type Ok = RefValue;
|
||||
type Error = SerdeError;
|
||||
|
||||
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
serde::ser::SerializeSeq::serialize_element(self, value)
|
||||
}
|
||||
|
||||
fn end(self) -> Result<RefValue, Self::Error> {
|
||||
serde::ser::SerializeSeq::end(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::SerializeTupleStruct for SerializeVec {
|
||||
type Ok = RefValue;
|
||||
type Error = SerdeError;
|
||||
|
||||
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
serde::ser::SerializeSeq::serialize_element(self, value)
|
||||
}
|
||||
|
||||
fn end(self) -> Result<RefValue, Self::Error> {
|
||||
serde::ser::SerializeSeq::end(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::SerializeTupleVariant for SerializeTupleVariant {
|
||||
type Ok = RefValue;
|
||||
type Error = SerdeError;
|
||||
|
||||
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
self.vec.push({
|
||||
let a: RefValue = value.serialize(Serializer)?;
|
||||
a.into()
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<RefValue, Self::Error> {
|
||||
let mut object: IndexMap<String, RefValueWrapper> = IndexMap::new();
|
||||
|
||||
object.insert(self.name, RefValue::Array(self.vec).into());
|
||||
|
||||
Ok(RefValue::Object(object))
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::SerializeMap for SerializeMap {
|
||||
type Ok = RefValue;
|
||||
type Error = SerdeError;
|
||||
|
||||
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
match *self {
|
||||
SerializeMap::Map {
|
||||
ref mut next_key, ..
|
||||
} => {
|
||||
*next_key = Some(key.serialize(MapKeySerializer)?);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
match *self {
|
||||
SerializeMap::Map {
|
||||
ref mut map,
|
||||
ref mut next_key,
|
||||
} => {
|
||||
let key = next_key.take();
|
||||
// Panic because this indicates a bug in the program rather than an
|
||||
// expected failure.
|
||||
let key = key.expect("serialize_value called before serialize_key");
|
||||
map.insert(key, {
|
||||
let a: RefValue = value.serialize(Serializer)?;
|
||||
a.into()
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn end(self) -> Result<RefValue, Self::Error> {
|
||||
match self {
|
||||
SerializeMap::Map { map, .. } => Ok(RefValue::Object(map)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct MapKeySerializer;
|
||||
|
||||
fn key_must_be_a_string() -> SerdeError {
|
||||
SerdeError::from_str("key must be string")
|
||||
}
|
||||
|
||||
impl serde::Serializer for MapKeySerializer {
|
||||
type Ok = String;
|
||||
type Error = SerdeError;
|
||||
|
||||
type SerializeSeq = Impossible<String, Self::Error>;
|
||||
type SerializeTuple = Impossible<String, Self::Error>;
|
||||
type SerializeTupleStruct = Impossible<String, Self::Error>;
|
||||
type SerializeTupleVariant = Impossible<String, Self::Error>;
|
||||
type SerializeMap = Impossible<String, Self::Error>;
|
||||
type SerializeStruct = Impossible<String, Self::Error>;
|
||||
type SerializeStructVariant = Impossible<String, Self::Error>;
|
||||
|
||||
#[inline]
|
||||
fn serialize_unit_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
variant: &'static str,
|
||||
) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(variant.to_owned())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_newtype_struct<T: ?Sized>(
|
||||
self,
|
||||
_name: &'static str,
|
||||
value: &T,
|
||||
) -> Result<Self::Ok, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
value.serialize(self)
|
||||
}
|
||||
|
||||
fn serialize_bool(self, _value: bool) -> Result<Self::Ok, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_i8(self, value: i8) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(value.to_string())
|
||||
}
|
||||
|
||||
fn serialize_i16(self, value: i16) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(value.to_string())
|
||||
}
|
||||
|
||||
fn serialize_i32(self, value: i32) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(value.to_string())
|
||||
}
|
||||
|
||||
fn serialize_i64(self, value: i64) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(value.to_string())
|
||||
}
|
||||
|
||||
fn serialize_u8(self, value: u8) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(value.to_string())
|
||||
}
|
||||
|
||||
fn serialize_u16(self, value: u16) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(value.to_string())
|
||||
}
|
||||
|
||||
fn serialize_u32(self, value: u32) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(value.to_string())
|
||||
}
|
||||
|
||||
fn serialize_u64(self, value: u64) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(value.to_string())
|
||||
}
|
||||
|
||||
fn serialize_f32(self, _value: f32) -> Result<Self::Ok, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_f64(self, _value: f64) -> Result<Self::Ok, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_char(self, value: char) -> Result<Self::Ok, Self::Error> {
|
||||
Ok({
|
||||
let mut s = String::new();
|
||||
s.push(value);
|
||||
s
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_str(self, value: &str) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(value.to_owned())
|
||||
}
|
||||
|
||||
fn serialize_bytes(self, _value: &[u8]) -> Result<Self::Ok, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_newtype_variant<T: ?Sized>(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
_variant: &'static str,
|
||||
_value: &T,
|
||||
) -> Result<Self::Ok, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_some<T: ?Sized>(self, _value: &T) -> Result<Self::Ok, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_tuple_struct(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeTupleStruct, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_tuple_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
_variant: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeTupleVariant, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_struct(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeStruct, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
|
||||
fn serialize_struct_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
_variant: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeStructVariant, Self::Error> {
|
||||
Err(key_must_be_a_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::SerializeStruct for SerializeMap {
|
||||
type Ok = RefValue;
|
||||
type Error = SerdeError;
|
||||
|
||||
fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
match *self {
|
||||
SerializeMap::Map { .. } => {
|
||||
serde::ser::SerializeMap::serialize_key(self, key)?;
|
||||
serde::ser::SerializeMap::serialize_value(self, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn end(self) -> Result<RefValue, Self::Error> {
|
||||
match self {
|
||||
SerializeMap::Map { .. } => serde::ser::SerializeMap::end(self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::SerializeStructVariant for SerializeStructVariant {
|
||||
type Ok = RefValue;
|
||||
type Error = SerdeError;
|
||||
|
||||
fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
self.map.insert(String::from(key), {
|
||||
let a: RefValue = value.serialize(Serializer)?;
|
||||
a.into()
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<RefValue, Self::Error> {
|
||||
let mut object: IndexMap<String, RefValueWrapper> = IndexMap::new();
|
||||
|
||||
object.insert(self.name, RefValue::Object(self.map).into());
|
||||
|
||||
Ok(RefValue::Object(object))
|
||||
}
|
||||
}
|
@ -1,38 +0,0 @@
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SerdeError {
|
||||
msg: String,
|
||||
}
|
||||
|
||||
impl<'a> SerdeError {
|
||||
pub fn new(msg: String) -> Self {
|
||||
SerdeError { msg: msg }
|
||||
}
|
||||
|
||||
pub fn from_str(msg: &str) -> Self {
|
||||
SerdeError { msg: msg.to_string() }
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::de::Error for SerdeError {
|
||||
#[cold]
|
||||
fn custom<T: fmt::Display>(msg: T) -> SerdeError {
|
||||
SerdeError { msg: msg.to_string() }
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::Error for SerdeError {
|
||||
#[cold]
|
||||
fn custom<T: fmt::Display>(msg: T) -> SerdeError {
|
||||
SerdeError { msg: msg.to_string() }
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for SerdeError {}
|
||||
|
||||
impl fmt::Display for SerdeError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.msg)
|
||||
}
|
||||
}
|
335
src/select/cmp.rs
Normal file
335
src/select/cmp.rs
Normal file
@ -0,0 +1,335 @@
|
||||
use array_tool::vec::{Intersect, Union};
|
||||
use serde_json::Value;
|
||||
|
||||
pub(super) trait Cmp {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool;
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool;
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool;
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value>;
|
||||
|
||||
fn default(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpEq;
|
||||
|
||||
impl Cmp for CmpEq {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 == v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
(v1 - v2).abs() == 0_f64
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 == v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
|
||||
v1.to_vec().intersect(v2.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpNe;
|
||||
|
||||
impl Cmp for CmpNe {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 != v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
(v1 - v2).abs() != 0_f64
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 != v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
|
||||
v1.to_vec().intersect_if(v2.to_vec(), |a, b| a != b)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpGt;
|
||||
|
||||
impl Cmp for CmpGt {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 & !v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
v1 > v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 > v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpGe;
|
||||
|
||||
impl Cmp for CmpGe {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 >= v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
v1 >= v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 >= v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpLt;
|
||||
|
||||
impl Cmp for CmpLt {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
!v1 & v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
v1 < v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 < v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpLe;
|
||||
|
||||
impl Cmp for CmpLe {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 <= v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
v1 <= v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 <= v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpAnd;
|
||||
|
||||
impl Cmp for CmpAnd {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 && v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, _v1: f64, _v2: f64) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
!v1.is_empty() && !v2.is_empty()
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
|
||||
v1.to_vec().intersect(v2.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpOr;
|
||||
|
||||
impl Cmp for CmpOr {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 || v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, _v1: f64, _v2: f64) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
!v1.is_empty() || !v2.is_empty()
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
|
||||
v1.to_vec().union(v2.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod cmp_inner_tests {
|
||||
use serde_json::Value;
|
||||
|
||||
use select::cmp::*;
|
||||
|
||||
#[test]
|
||||
fn cmp_eq() {
|
||||
let cmp_fn = CmpEq;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), false);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "1"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_ne() {
|
||||
let cmp_fn = CmpNe;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "1"), false);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_gt() {
|
||||
let cmp_fn = CmpGt;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), false);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), false);
|
||||
assert_eq!(cmp_fn.cmp_string("b", "a"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_ge() {
|
||||
let cmp_fn = CmpGe;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), false);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "1"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("ab", "a"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_lt() {
|
||||
let cmp_fn = CmpLt;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, false), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), false);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), false);
|
||||
assert_eq!(cmp_fn.cmp_string("ab", "b"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_le() {
|
||||
let cmp_fn = CmpLe;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, false), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), false);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("ab", "b"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("abd", "abc"), false);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_and() {
|
||||
let cmp_fn = CmpAnd;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, true), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, false), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.0, 0.0), true);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_or() {
|
||||
let cmp_fn = CmpOr;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, false), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.0, 0.0), true);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_json() {
|
||||
let v1 = Value::Bool(true);
|
||||
let v2 = Value::String("1".to_string());
|
||||
let left = [&v1, &v2];
|
||||
let right = [&v1, &v2];
|
||||
let empty: Vec<&Value> = Vec::new();
|
||||
|
||||
assert_eq!(CmpEq.cmp_json(&left, &right), left.to_vec());
|
||||
assert_eq!(CmpNe.cmp_json(&left, &right), left.to_vec());
|
||||
assert_eq!(CmpGt.cmp_json(&left, &right), empty);
|
||||
assert_eq!(CmpGe.cmp_json(&left, &right), empty);
|
||||
assert_eq!(CmpLt.cmp_json(&left, &right), empty);
|
||||
assert_eq!(CmpLe.cmp_json(&left, &right), empty);
|
||||
assert_eq!(CmpAnd.cmp_json(&left, &right), left.to_vec());
|
||||
assert_eq!(CmpOr.cmp_json(&left, &right), left.to_vec());
|
||||
|
||||
assert_eq!(
|
||||
CmpEq.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(true)]),
|
||||
vec![&Value::Bool(true)]
|
||||
);
|
||||
assert_eq!(
|
||||
CmpEq.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(false)]),
|
||||
empty
|
||||
);
|
||||
assert_eq!(
|
||||
CmpNe.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(true)]),
|
||||
empty
|
||||
);
|
||||
assert_eq!(
|
||||
CmpNe.cmp_json(&[&Value::Bool(false)], &[&Value::Bool(true)]),
|
||||
vec![&Value::Bool(false)]
|
||||
);
|
||||
assert_eq!(
|
||||
CmpAnd.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(true)]),
|
||||
vec![&Value::Bool(true)]
|
||||
);
|
||||
assert_eq!(
|
||||
CmpOr.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(false)]),
|
||||
vec![&Value::Bool(true), &Value::Bool(false)]
|
||||
);
|
||||
}
|
||||
}
|
227
src/select/expr_term.rs
Normal file
227
src/select/expr_term.rs
Normal file
@ -0,0 +1,227 @@
|
||||
use serde_json::{Number, Value};
|
||||
use select::cmp::*;
|
||||
use select::{FilterKey, to_f64};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub(super) enum ExprTerm<'a> {
|
||||
String(String),
|
||||
Number(Number),
|
||||
Bool(bool),
|
||||
Json(Option<Vec<&'a Value>>, Option<FilterKey>, Vec<&'a Value>),
|
||||
}
|
||||
|
||||
impl<'a> ExprTerm<'a> {
|
||||
fn cmp<C1: Cmp, C2: Cmp>(
|
||||
&self,
|
||||
other: &Self,
|
||||
cmp_fn: &C1,
|
||||
reverse_cmp_fn: &C2,
|
||||
) -> ExprTerm<'a> {
|
||||
match &self {
|
||||
ExprTerm::String(s1) => match &other {
|
||||
ExprTerm::String(s2) => ExprTerm::Bool(cmp_fn.cmp_string(s1, s2)),
|
||||
ExprTerm::Json(_, _, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||
},
|
||||
ExprTerm::Number(n1) => match &other {
|
||||
ExprTerm::Number(n2) => ExprTerm::Bool(cmp_fn.cmp_f64(to_f64(n1), to_f64(n2))),
|
||||
ExprTerm::Json(_, _, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||
},
|
||||
ExprTerm::Bool(b1) => match &other {
|
||||
ExprTerm::Bool(b2) => ExprTerm::Bool(cmp_fn.cmp_bool(*b1, *b2)),
|
||||
ExprTerm::Json(_, _, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||
},
|
||||
ExprTerm::Json(rel, fk1, vec1) => {
|
||||
let ret: Vec<&Value> = match &other {
|
||||
ExprTerm::String(s2) => vec1
|
||||
.iter()
|
||||
.filter(|v1| match v1 {
|
||||
Value::String(s1) => cmp_fn.cmp_string(s1, s2),
|
||||
Value::Object(map1) => {
|
||||
if let Some(FilterKey::String(k)) = fk1 {
|
||||
if let Some(Value::String(s1)) = map1.get(k) {
|
||||
return cmp_fn.cmp_string(s1, s2);
|
||||
}
|
||||
}
|
||||
cmp_fn.default()
|
||||
}
|
||||
_ => cmp_fn.default(),
|
||||
})
|
||||
.cloned()
|
||||
.collect(),
|
||||
ExprTerm::Number(n2) => vec1
|
||||
.iter()
|
||||
.filter(|v1| match v1 {
|
||||
Value::Number(n1) => cmp_fn.cmp_f64(to_f64(n1), to_f64(n2)),
|
||||
Value::Object(map1) => {
|
||||
if let Some(FilterKey::String(k)) = fk1 {
|
||||
if let Some(Value::Number(n1)) = map1.get(k) {
|
||||
return cmp_fn.cmp_f64(to_f64(n1), to_f64(n2));
|
||||
}
|
||||
}
|
||||
cmp_fn.default()
|
||||
}
|
||||
_ => cmp_fn.default(),
|
||||
})
|
||||
.cloned()
|
||||
.collect(),
|
||||
ExprTerm::Bool(b2) => vec1
|
||||
.iter()
|
||||
.filter(|v1| match v1 {
|
||||
Value::Bool(b1) => cmp_fn.cmp_bool(*b1, *b2),
|
||||
Value::Object(map1) => {
|
||||
if let Some(FilterKey::String(k)) = fk1 {
|
||||
if let Some(Value::Bool(b1)) = map1.get(k) {
|
||||
return cmp_fn.cmp_bool(*b1, *b2);
|
||||
}
|
||||
}
|
||||
cmp_fn.default()
|
||||
}
|
||||
_ => cmp_fn.default(),
|
||||
})
|
||||
.cloned()
|
||||
.collect(),
|
||||
ExprTerm::Json(parent, _, vec2) => {
|
||||
if let Some(vec1) = rel {
|
||||
cmp_fn.cmp_json(vec1, vec2)
|
||||
} else if let Some(vec2) = parent {
|
||||
cmp_fn.cmp_json(vec1, vec2)
|
||||
} else {
|
||||
cmp_fn.cmp_json(vec1, vec2)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if ret.is_empty() {
|
||||
ExprTerm::Bool(cmp_fn.default())
|
||||
} else if let Some(rel) = rel {
|
||||
if let ExprTerm::Json(_, _, _) = &other {
|
||||
ExprTerm::Json(Some(rel.to_vec()), None, ret)
|
||||
} else {
|
||||
let mut tmp = Vec::new();
|
||||
for rel_value in rel {
|
||||
if let Value::Object(map) = rel_value {
|
||||
for map_value in map.values() {
|
||||
for result_value in &ret {
|
||||
if map_value.eq(*result_value) {
|
||||
tmp.push(*rel_value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ExprTerm::Json(Some(tmp), None, ret)
|
||||
}
|
||||
} else {
|
||||
ExprTerm::Json(None, None, ret)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eq(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("eq - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpEq, &CmpEq);
|
||||
debug!("eq = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn ne(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("ne - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpNe, &CmpNe);
|
||||
debug!("ne = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn gt(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("gt - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpGt, &CmpLt);
|
||||
debug!("gt = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn ge(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("ge - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpGe, &CmpLe);
|
||||
debug!("ge = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn lt(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("lt - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpLt, &CmpGt);
|
||||
debug!("lt = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn le(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("le - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpLe, &CmpGe);
|
||||
debug!("le = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn and(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("and - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpAnd, &CmpAnd);
|
||||
debug!("and = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn or(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("or - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpOr, &CmpOr);
|
||||
debug!("or = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Into<ExprTerm<'a>> for &Vec<&'a Value> {
|
||||
fn into(self) -> ExprTerm<'a> {
|
||||
if self.len() == 1 {
|
||||
match &self[0] {
|
||||
Value::Number(v) => return ExprTerm::Number(v.clone()),
|
||||
Value::String(v) => return ExprTerm::String(v.clone()),
|
||||
Value::Bool(v) => return ExprTerm::Bool(*v),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
ExprTerm::Json(None, None, self.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod expr_term_inner_tests {
|
||||
use serde_json::{Number, Value};
|
||||
use select::expr_term::ExprTerm;
|
||||
|
||||
#[test]
|
||||
fn value_vec_into() {
|
||||
let v = Value::Bool(true);
|
||||
let vec = &vec![&v];
|
||||
let term: ExprTerm = vec.into();
|
||||
assert_eq!(term, ExprTerm::Bool(true));
|
||||
|
||||
let v = Value::String("a".to_string());
|
||||
let vec = &vec![&v];
|
||||
let term: ExprTerm = vec.into();
|
||||
assert_eq!(term, ExprTerm::String("a".to_string()));
|
||||
|
||||
let v = serde_json::from_str("1.0").unwrap();
|
||||
let vec = &vec![&v];
|
||||
let term: ExprTerm = vec.into();
|
||||
assert_eq!(term, ExprTerm::Number(Number::from_f64(1.0).unwrap()));
|
||||
}
|
||||
}
|
1001
src/select/mod.rs
Normal file
1001
src/select/mod.rs
Normal file
File diff suppressed because it is too large
Load Diff
99
src/select/value_walker.rs
Normal file
99
src/select/value_walker.rs
Normal file
@ -0,0 +1,99 @@
|
||||
use serde_json::Value;
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub(super) struct ValueWalker;
|
||||
|
||||
impl<'a> ValueWalker {
|
||||
pub fn all_with_num(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, index: f64) {
|
||||
Self::walk(vec, tmp, &|v| if v.is_array() {
|
||||
if let Some(item) = v.get(index as usize) {
|
||||
Some(vec![item])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
});
|
||||
}
|
||||
|
||||
pub fn all_with_str(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, key: &str, is_filter: bool) {
|
||||
if is_filter {
|
||||
Self::walk(vec, tmp, &|v| match v {
|
||||
Value::Object(map) if map.contains_key(key) => Some(vec![v]),
|
||||
_ => None,
|
||||
});
|
||||
} else {
|
||||
Self::walk(vec, tmp, &|v| match v {
|
||||
Value::Object(map) => match map.get(key) {
|
||||
Some(v) => Some(vec![v]),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all(vec: &[&'a Value], tmp: &mut Vec<&'a Value>) {
|
||||
Self::walk(vec, tmp, &|v| match v {
|
||||
Value::Array(vec) => Some(vec.iter().collect()),
|
||||
Value::Object(map) => {
|
||||
let mut tmp = Vec::new();
|
||||
for (_, v) in map {
|
||||
tmp.push(v);
|
||||
}
|
||||
Some(tmp)
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
}
|
||||
|
||||
fn walk<F>(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, fun: &F) where F: Fn(&Value) -> Option<Vec<&Value>> {
|
||||
for v in vec {
|
||||
Self::_walk(v, tmp, fun);
|
||||
}
|
||||
}
|
||||
|
||||
fn _walk<F>(v: &'a Value, tmp: &mut Vec<&'a Value>, fun: &F) where F: Fn(&Value) -> Option<Vec<&Value>> {
|
||||
if let Some(mut ret) = fun(v) {
|
||||
tmp.append(&mut ret);
|
||||
}
|
||||
|
||||
match v {
|
||||
Value::Array(vec) => {
|
||||
for v in vec {
|
||||
Self::_walk(v, tmp, fun);
|
||||
}
|
||||
}
|
||||
Value::Object(map) => {
|
||||
for (_, v) in map {
|
||||
Self::_walk(&v, tmp, fun);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_dedup(v: &'a Value,
|
||||
tmp: &mut Vec<&'a Value>,
|
||||
key: &str,
|
||||
visited: &mut HashSet<*const Value>, ) {
|
||||
match v {
|
||||
Value::Object(map) => {
|
||||
if map.contains_key(key) {
|
||||
let ptr = v as *const Value;
|
||||
if !visited.contains(&ptr) {
|
||||
visited.insert(ptr);
|
||||
tmp.push(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Array(vec) => {
|
||||
for v in vec {
|
||||
Self::walk_dedup(v, tmp, key, visited);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
253
tests/array_filter.rs
Normal file
253
tests/array_filter.rs
Normal file
@ -0,0 +1,253 @@
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{read_json, select_and_then_compare, setup};
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn array_range_default() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[1, 2]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_all() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[ : ]",
|
||||
json!(["first", "second"]),
|
||||
json!(["first", "second"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_all() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[::]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_only_step_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[::2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "third", "fifth"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_only_start_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[1::]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second", "third", "forth", "fifth"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_empty_step_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[1:2:]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_empty_end_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[1::2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second", "forth"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_by_1() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[0:3:1]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "second", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_by_2() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[0:3:2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_only_negative_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[-4:]",
|
||||
json!(["first", "second", "third"]),
|
||||
json!(["first", "second", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_only_end_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[:4]",
|
||||
json!(["first", "second", "third"]),
|
||||
json!(["first", "second", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_only_from_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[1: ]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_only_nagative_end_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[:-2]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
{"id": 0, "name": "Millicent Norman"}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..friends[2].name",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!(["Gray Berry", "Gray Berry"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_all_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..friends[*].name",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
"Vincent Cannon",
|
||||
"Gray Berry",
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_all_and_then_key() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$['school']['friends'][*].['name']",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!(["Millicent Norman", "Vincent Cannon", "Gray Berry"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_index_and_then_key() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$['school']['friends'][0].['name']",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!(["Millicent Norman"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_multiple_key() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.["eyeColor", "name"]"#,
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!(["blue", "Leonor Herman"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bugs40_bracket_notation_after_recursive_descent() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[0]",
|
||||
json!([
|
||||
"first",
|
||||
{
|
||||
"key": [
|
||||
"first nested",
|
||||
{
|
||||
"more": [
|
||||
{"nested": ["deepest", "second"]},
|
||||
["more", "values"]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]),
|
||||
json!([
|
||||
"first",
|
||||
"first nested",
|
||||
{
|
||||
"nested" : [
|
||||
"deepest",
|
||||
"second"
|
||||
]
|
||||
},
|
||||
"deepest",
|
||||
"more"
|
||||
]),
|
||||
);
|
||||
}
|
56
tests/common.rs
Normal file
56
tests/common.rs
Normal file
@ -0,0 +1,56 @@
|
||||
extern crate env_logger;
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
extern crate serde_json;
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use self::jsonpath::Selector;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn setup() {
|
||||
let _ = env_logger::try_init();
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn read_json(path: &str) -> Value {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
serde_json::from_str(&contents).unwrap()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn read_contents(path: &str) -> String {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
contents
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn select_and_then_compare(path: &str, json: Value, target: Value) {
|
||||
let mut selector = Selector::default();
|
||||
let result = selector
|
||||
.str_path(path)
|
||||
.unwrap()
|
||||
.value(&json)
|
||||
.select_as::<Value>()
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
result,
|
||||
match target {
|
||||
Value::Array(vec) => vec,
|
||||
_ => panic!("Give me the Array!"),
|
||||
},
|
||||
"{}",
|
||||
path
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn compare_result(result: Vec<&Value>, target: Value) {
|
||||
let result = serde_json::to_value(result).unwrap();
|
||||
assert_eq!(result, target);
|
||||
}
|
613
tests/filter.rs
613
tests/filter.rs
@ -1,421 +1,280 @@
|
||||
extern crate env_logger;
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use std::io::Read;
|
||||
use common::{read_json, select_and_then_compare, setup};
|
||||
|
||||
use serde_json::Value;
|
||||
use jsonpath::filter::value_filter::{ValueFilter, JsonValueFilter};
|
||||
use jsonpath::parser::parser::Parser;
|
||||
mod common;
|
||||
|
||||
fn setup() {
|
||||
let _ = env_logger::try_init();
|
||||
}
|
||||
#[test]
|
||||
fn quote() {
|
||||
setup();
|
||||
|
||||
fn new_value_filter(file: &str) -> ValueFilter {
|
||||
let string = read_json(file);
|
||||
let json: Value = serde_json::from_str(string.as_str()).unwrap();
|
||||
ValueFilter::new((&json).into(), false, false)
|
||||
}
|
||||
|
||||
fn do_filter(path: &str, file: &str) -> JsonValueFilter {
|
||||
let string = read_json(file);
|
||||
let mut jf = JsonValueFilter::new(string.as_str()).unwrap();
|
||||
let mut parser = Parser::new(path);
|
||||
parser.parse(&mut jf).unwrap();
|
||||
jf
|
||||
}
|
||||
|
||||
fn read_json(path: &str) -> String {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
contents
|
||||
select_and_then_compare(
|
||||
r#"$['single\'quote']"#,
|
||||
json!({"single'quote":"value"}),
|
||||
json!(["value"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$["double\"quote"]"#,
|
||||
json!({"double\"quote":"value"}),
|
||||
json!(["value"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn step_in() {
|
||||
fn filter_next_all() {
|
||||
setup();
|
||||
|
||||
let mut jf = new_value_filter("./benches/data_obj.json");
|
||||
{
|
||||
let current = jf.step_in_str("friends");
|
||||
assert_eq!(current.is_array(), true);
|
||||
}
|
||||
|
||||
let mut jf = new_value_filter("./benches/data_array.json");
|
||||
{
|
||||
let current = jf.step_in_num(&1.0);
|
||||
assert_eq!(current.get_val().is_object(), true);
|
||||
}
|
||||
{
|
||||
let current = jf.step_in_str("friends");
|
||||
assert_eq!(current.is_array(), true);
|
||||
}
|
||||
let mut jf = new_value_filter("./benches/data_obj.json");
|
||||
{
|
||||
jf.step_in_str("school");
|
||||
jf.step_in_str("friends");
|
||||
jf.step_in_all();
|
||||
let current = jf.step_in_str("name");
|
||||
let friends = json!([
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]);
|
||||
|
||||
assert_eq!(friends, current.into_value());
|
||||
}
|
||||
let mut jf = new_value_filter("./benches/data_obj.json");
|
||||
{
|
||||
let current = jf.step_leaves_str("name");
|
||||
let names = json!([
|
||||
"Leonor Herman",
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]);
|
||||
assert_eq!(names, current.into_value());
|
||||
for path in &[r#"$.*"#, r#"$[*]"#] {
|
||||
select_and_then_compare(
|
||||
path,
|
||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array() {
|
||||
fn filter_all() {
|
||||
setup();
|
||||
|
||||
let friends = json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]);
|
||||
|
||||
let jf = do_filter("$.school.friends[1, 2]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.school.friends[1:]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.school.friends[:-2]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{"id": 0, "name": "Millicent Norman"}
|
||||
]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..friends[2].name", "./benches/data_obj.json");
|
||||
let friends = json!(["Gray Berry", "Gray Berry"]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..friends[*].name", "./benches/data_obj.json");
|
||||
let friends = json!(["Vincent Cannon","Gray Berry","Millicent Norman","Vincent Cannon","Gray Berry"]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$['school']['friends'][*].['name']", "./benches/data_obj.json");
|
||||
let friends = json!(["Millicent Norman","Vincent Cannon","Gray Berry"]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$['school']['friends'][0].['name']", "./benches/data_obj.json");
|
||||
let friends = json!("Millicent Norman");
|
||||
assert_eq!(friends, jf.into_value());
|
||||
for path in &[r#"$..*"#, r#"$..[*]"#] {
|
||||
select_and_then_compare(
|
||||
path,
|
||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||
json!([ "string", 42, { "key" : "value" }, [ 0, 1 ], "value", 0, 1 ]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type() {
|
||||
fn filter_array_next_all() {
|
||||
setup();
|
||||
|
||||
let friends = json!({
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
});
|
||||
|
||||
let jf = do_filter("$.school", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.school[?(@.friends[0])]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.school[?(@.friends[10])]", "./benches/data_obj.json");
|
||||
assert_eq!(Value::Null, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.school[?(1==1)]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.school.friends[?(1==1)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
for path in &[r#"$.*.*"#, r#"$[*].*"#, r#"$.*[*]"#, r#"$[*][*]"#] {
|
||||
select_and_then_compare(
|
||||
path,
|
||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||
json!(["value", 0, 1]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_default() {
|
||||
fn filter_all_complex() {
|
||||
setup();
|
||||
|
||||
let jf = do_filter("$.school[?(@.friends == @.friends)]", "./benches/data_obj.json");
|
||||
let friends = json!({
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
});
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?(@.name)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?(@.id >= 2)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?(@.id >= 2 || @.id == 1)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" },
|
||||
{ "id" : 1, "name" : "Vincent Cannon" }
|
||||
]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?( (@.id >= 2 || @.id == 1) && @.id == 0)]", "./benches/data_obj.json");
|
||||
assert_eq!(Value::Null, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..friends[?(@.id == $.index)].id", "./benches/data_obj.json");
|
||||
let friends = json!([0, 0]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..book[?($.store.bicycle.price < @.price)].price", "./benches/example.json");
|
||||
let friends = json!([22.99]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..book[?( (@.price == 12.99 || @.category == 'reference') && @.price > 10)].price", "./benches/example.json");
|
||||
let friends = json!([12.99]);
|
||||
assert_eq!(friends, jf.into_value());
|
||||
for path in &[r#"$..friends.*"#, r#"$[*].friends.*"#] {
|
||||
select_and_then_compare(
|
||||
path,
|
||||
read_json("./benchmark/data_array.json"),
|
||||
json!([
|
||||
{ "id" : 0, "name" : "Millicent Norman" },
|
||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||
{ "id" : 2, "name" : "Gray Berry" },
|
||||
{ "id" : 0, "name" : "Tillman Mckay" },
|
||||
{ "id" : 1, "name" : "Rivera Berg" },
|
||||
{ "id" : 2, "name" : "Rosetta Erickson" }
|
||||
]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_number() {
|
||||
fn filter_parent_with_matched_child() {
|
||||
setup();
|
||||
|
||||
let json = json!({ "a": 1 });
|
||||
let ret = jsonpath::select(&json, "$.[?(@.a == 1)]").unwrap();
|
||||
assert_eq!(json, ret);
|
||||
let ret = jsonpath::select(&json, "$.[?(@.a != 2)]").unwrap();
|
||||
assert_eq!(json, ret);
|
||||
let ret = jsonpath::select(&json, "$.[?(@.a < 2)]").unwrap();
|
||||
assert_eq!(json, ret);
|
||||
let ret = jsonpath::select(&json, "$.[?(@.a <= 1)]").unwrap();
|
||||
assert_eq!(json, ret);
|
||||
let ret = jsonpath::select(&json, "$.[?(@.a > 0)]").unwrap();
|
||||
assert_eq!(json, ret);
|
||||
let ret = jsonpath::select(&json, "$.[?(@.a >= 0)]").unwrap();
|
||||
assert_eq!(json, ret);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_string() {
|
||||
setup();
|
||||
|
||||
let json = json!({ "a": "b" });
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a == "b")]"#).unwrap();
|
||||
assert_eq!(json!({ "a": "b" }), ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a != "c")]"#).unwrap();
|
||||
assert_eq!(json!({ "a": "b" }), ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a < "b")]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a <= "b")]"#).unwrap();
|
||||
assert_eq!(json!({ "a": "b" }), ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a > "b")]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a >= "b")]"#).unwrap();
|
||||
assert_eq!(json!({ "a": "b" }), ret);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_object() {
|
||||
setup();
|
||||
|
||||
let json = json!({
|
||||
"a": { "1": 1 },
|
||||
"b": { "2": 2 },
|
||||
"c": { "1": 1 },
|
||||
});
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a == @.c)]"#).unwrap();
|
||||
assert_eq!(json, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a != @.c)]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a < @.c)]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a <= @.c)]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a > @.c)]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a >= @.c)]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_complex() {
|
||||
setup();
|
||||
|
||||
let json = json!({ "a": { "b": 1 } });
|
||||
let ret = jsonpath::select(&json, r#"$.[?(1 == @.a)]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?("1" != @.a)]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a <= 1)]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
let ret = jsonpath::select(&json, r#"$.[?(@.a > "1")]"#).unwrap();
|
||||
assert_eq!(Value::Null, ret);
|
||||
}
|
||||
#[test]
|
||||
fn example() {
|
||||
setup();
|
||||
|
||||
let jf = do_filter("$.store.book[*].author", "./benches/example.json");
|
||||
let ret = json!(["Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..author", "./benches/example.json");
|
||||
assert_eq!(ret, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.store.*", "./benches/example.json");
|
||||
let ret = json!([
|
||||
[
|
||||
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||
{"category" : "fiction", "author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
],
|
||||
{"color" : "red","price" : 19.95},
|
||||
]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
|
||||
let jf = do_filter("$.store..price", "./benches/example.json");
|
||||
let ret = json!([8.95, 12.99, 8.99, 22.99, 19.95]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..book[2]", "./benches/example.json");
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..book[-2]", "./benches/example.json");
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..book[0,1]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
select_and_then_compare(
|
||||
"$.a[?(@.b.c == 1)]",
|
||||
json!({
|
||||
"a": {
|
||||
"b": {
|
||||
"c": 1
|
||||
}
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
|
||||
let jf = do_filter("$..book[:2]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
"b" : {
|
||||
"c" : 1
|
||||
}
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
let jf = do_filter("$..book[2:]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
#[test]
|
||||
fn filter_parent_exist_child() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.a[?(@.b.c)]",
|
||||
json!({
|
||||
"a": {
|
||||
"b": {
|
||||
"c": 1
|
||||
}
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
"b" : {
|
||||
"c" : 1
|
||||
}
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
let jf = do_filter("$..book[?(@.isbn)]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
#[test]
|
||||
fn filter_parent_paths() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[?(@.key.subKey == 'subKey2')]",
|
||||
json!([
|
||||
{"key": {"seq": 1, "subKey": "subKey1"}},
|
||||
{"key": {"seq": 2, "subKey": "subKey2"}},
|
||||
{"key": 42},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"key": {"seq": 2, "subKey": "subKey2"}}]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bugs33_exist_in_all() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.first.second)]",
|
||||
json!({
|
||||
"foo": {
|
||||
"first": { "second": "value" }
|
||||
},
|
||||
"foo2": {
|
||||
"first": {}
|
||||
},
|
||||
"foo3": {
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"first": {
|
||||
"second": "value"
|
||||
}
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bugs33_exist_left_in_all_with_and_condition() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.first && @.first.second)]",
|
||||
json!({
|
||||
"foo": {
|
||||
"first": { "second": "value" }
|
||||
},
|
||||
"foo2": {
|
||||
"first": {}
|
||||
},
|
||||
"foo3": {
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"first": {
|
||||
"second": "value"
|
||||
}
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bugs33_exist_right_in_all_with_and_condition() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.b.c.d && @.b)]",
|
||||
json!({
|
||||
"a": {
|
||||
"b": {
|
||||
"c": {
|
||||
"d" : {
|
||||
"e" : 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
"b" : {
|
||||
"c" : {
|
||||
"d" : {
|
||||
"e" : 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
let jf = do_filter("$.store.book[?(@.price < 10)]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.into_value());
|
||||
#[test]
|
||||
fn bugs38_array_notation_in_filter() {
|
||||
setup();
|
||||
|
||||
let jf = do_filter("$..*", "./benches/example.json");
|
||||
let json: Value = serde_json::from_str(read_json("./benches/giveme_every_thing_result.json").as_str()).unwrap();
|
||||
assert_eq!(json, jf.into_value());
|
||||
select_and_then_compare(
|
||||
"$[?(@['key']==42)]",
|
||||
json!([
|
||||
{"key": 0},
|
||||
{"key": 42},
|
||||
{"key": -1},
|
||||
{"key": 41},
|
||||
{"key": 43},
|
||||
{"key": 42.0001},
|
||||
{"key": 41.9999},
|
||||
{"key": 100},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"key": 42}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$[?(@['key'].subKey == 'subKey2')]",
|
||||
json!([
|
||||
{"key": {"seq": 1, "subKey": "subKey1"}},
|
||||
{"key": {"seq": 2, "subKey": "subKey2"}},
|
||||
{"key": 42},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"key": {"seq": 2, "subKey": "subKey2"}}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$[?(@['key']['subKey'] == 'subKey2')]",
|
||||
json!([
|
||||
{"key": {"seq": 1, "subKey": "subKey1"}},
|
||||
{"key": {"seq": 2, "subKey": "subKey2"}},
|
||||
{"key": 42},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"key": {"seq": 2, "subKey": "subKey2"}}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..key[?(@['subKey'] == 'subKey2')]",
|
||||
json!([
|
||||
{"key": {"seq": 1, "subKey": "subKey1"}},
|
||||
{"key": {"seq": 2, "subKey": "subKey2"}},
|
||||
{"key": 42},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"seq": 2, "subKey": "subKey2"}]),
|
||||
);
|
||||
}
|
242
tests/jsonpath_examples.rs
Normal file
242
tests/jsonpath_examples.rs
Normal file
@ -0,0 +1,242 @@
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{read_json, select_and_then_compare, setup};
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn example_authros_of_all_books() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store.book[*].author"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_authors() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..author"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_things_both_books_and_bicycles() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store.*"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
[
|
||||
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||
{"category" : "fiction", "author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
],
|
||||
{"color" : "red","price" : 19.95},
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn the_price_of_everything() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store..price"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([8.95, 12.99, 8.99, 22.99, 19.95]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn the_third_book() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[2]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn the_second_to_last_book() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[-2]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn the_first_two_books() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[0, 1]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_books_from_index_0_inclusive_until_index_2_exclusive() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[:2]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_books_from_index_1_inclusive_until_index_2_exclusive() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[2:]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_books_with_an_isbn_number() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[?(@.isbn)]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_books_in_store_cheaper_than_10() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store.book[?(@.price < 10)]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn give_me_every_thing() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..*"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
read_json("./benchmark/giveme_every_thing_result.json"),
|
||||
);
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user