mirror of
https://github.com/fluencelabs/jsonpath
synced 2025-07-10 13:11:43 +00:00
Compare commits
116 Commits
Author | SHA1 | Date | |
---|---|---|---|
49179464f6 | |||
81bbf47545 | |||
55e8088111 | |||
c1d4c78da9 | |||
75c28fc73d | |||
1512e9b72f | |||
a168c46c04 | |||
83a170eb08 | |||
710f7ad544 | |||
74e79e6330 | |||
ec92c95a69 | |||
da7cf723de | |||
c99879cfbb | |||
7991bba51d | |||
b8c82a9126 | |||
ba31b48c73 | |||
e9f69cd2b9 | |||
241a7f482d | |||
67cc6447a8 | |||
9bc4b81f33 | |||
0d4d82fd49 | |||
de46a66148 | |||
ef52c63bc8 | |||
d618e60e13 | |||
a6f742e3ad | |||
6f20013076 | |||
218321a4bd | |||
d855c17899 | |||
4919a03c6b | |||
7a07bc7744 | |||
32eef3f7c7 | |||
b736c962d2 | |||
accd08139f | |||
25cffed2d7 | |||
bab2ff38f7 | |||
ffd87cfbe4 | |||
7597325f59 | |||
2ba3930a8c | |||
844a96b2d1 | |||
3b4d2b4ffc | |||
c2a6f3b319 | |||
8e10128826 | |||
ad39c9e668 | |||
636618e4ac | |||
9fa0f74ce9 | |||
cc5ce6f65f | |||
1152af6c68 | |||
1a54a36cd3 | |||
5b878d7ba7 | |||
d84d0d845c | |||
c3ac7e40e8 | |||
c8ab8ad107 | |||
8c24411c3f | |||
422a23ee57 | |||
b5c5d6b88e | |||
ea7599c012 | |||
9276c0aa02 | |||
2b875c8a7e | |||
6a89553c33 | |||
fe376c4483 | |||
5a52ded4ca | |||
773ea3a3b3 | |||
17a8608392 | |||
e0db04aed9 | |||
d5364ad74a | |||
67991df1f2 | |||
15e6c6065b | |||
fab07adc5a | |||
4b62fcd376 | |||
2daef2c938 | |||
43d092cb35 | |||
19f0878b5a | |||
b2e27b423a | |||
88e6320cf7 | |||
8b85ec9d61 | |||
f5e46882da | |||
53c4711d1a | |||
31612315b8 | |||
2efb019155 | |||
d9b9e9d8bd | |||
c0a5296451 | |||
6c983ced99 | |||
d0f93bde49 | |||
d7423e35da | |||
a23ab7a519 | |||
d4140c8ed5 | |||
595a2d8528 | |||
e8e9dbfe1c | |||
b644c5b0a6 | |||
5c29b54e3a | |||
dde0d5dc2e | |||
a553b4b06b | |||
3d33e8bd08 | |||
8f01598e05 | |||
cab5177811 | |||
9a28faf058 | |||
319186b1d9 | |||
2f0d29d644 | |||
b910ed35f9 | |||
f5717d6d26 | |||
f1fda2af13 | |||
6ad4432737 | |||
13816df970 | |||
030dccc8cc | |||
28ad9c903f | |||
be29571670 | |||
59dad2ea02 | |||
967df1b787 | |||
6d38c67e90 | |||
053be432f2 | |||
d384079842 | |||
5f832e8fe7 | |||
4390feb807 | |||
f536391b71 | |||
c19c75dac5 | |||
ad47444b7a |
7
.idea/runConfigurations/all.xml
generated
7
.idea/runConfigurations/all.xml
generated
@ -4,9 +4,12 @@
|
||||
<option name="command" value="test --package jsonpath_lib" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="emulateTerminal" value="false" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
<method v="2">
|
||||
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
|
||||
</method>
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/filter.xml
generated
12
.idea/runConfigurations/filter.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="filter" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test filter """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/lib.xml
generated
12
.idea/runConfigurations/lib.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="lib" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test lib """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/mutable.xml
generated
12
.idea/runConfigurations/mutable.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="mutable" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test mutable """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/readme.xml
generated
12
.idea/runConfigurations/readme.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="readme" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test readme """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
49
.travis.yml
49
.travis.yml
@ -1,5 +1,9 @@
|
||||
language: rust
|
||||
sudo: false
|
||||
sudo: required
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libssl-dev
|
||||
|
||||
cache: cargo
|
||||
|
||||
@ -13,27 +17,22 @@ matrix:
|
||||
- rust: stable
|
||||
os: linux
|
||||
env: RUST_BACKTRACE=1
|
||||
addons:
|
||||
chrome: stable
|
||||
before_cache: |
|
||||
if [[ "$TRAVIS_RUST_VERSION" == stable ]]; then
|
||||
cargo install cargo-tarpaulin -f
|
||||
fi
|
||||
before_script:
|
||||
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
|
||||
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
|
||||
- cargo install-update -a
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
|
||||
- rustup component add clippy
|
||||
script:
|
||||
- cargo build --verbose --all
|
||||
- cargo test --verbose --all
|
||||
- ./clippy.sh
|
||||
after_success: |
|
||||
cargo tarpaulin --exclude-files wasm parser/mod.rs --out Xml
|
||||
bash <(curl -s https://codecov.io/bash)
|
||||
- rust: stable
|
||||
os: osx
|
||||
env: RUST_BACKTRACE=1
|
||||
addons:
|
||||
chrome: stable
|
||||
before_script:
|
||||
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
|
||||
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
|
||||
- cargo install-update -a
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
|
||||
script:
|
||||
- cargo clean
|
||||
- cargo build --verbose --all
|
||||
- cargo test --verbose --all
|
||||
- language: node_js
|
||||
@ -48,13 +47,10 @@ matrix:
|
||||
- sh /tmp/rustup.sh -y
|
||||
- export PATH="$HOME/.cargo/bin:$PATH"
|
||||
- source "$HOME/.cargo/env"
|
||||
- npm install -g neon-cli
|
||||
- cd nodejs
|
||||
- node -v
|
||||
- npm -v
|
||||
- npm install
|
||||
before_script:
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
|
||||
script:
|
||||
- npm test
|
||||
- bash ./build-wasm.sh
|
||||
- language: node_js
|
||||
os: osx
|
||||
node_js:
|
||||
@ -67,10 +63,7 @@ matrix:
|
||||
- sh /tmp/rustup.sh -y
|
||||
- export PATH="$HOME/.cargo/bin:$PATH"
|
||||
- source "$HOME/.cargo/env"
|
||||
- npm install -g neon-cli
|
||||
- cd nodejs
|
||||
- node -v
|
||||
- npm -v
|
||||
- npm install
|
||||
before_script:
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
|
||||
script:
|
||||
- npm test
|
||||
- bash ./build-wasm.sh
|
24
Cargo.toml
24
Cargo.toml
@ -1,37 +1,33 @@
|
||||
[package]
|
||||
name = "jsonpath_lib"
|
||||
version = "0.2.2"
|
||||
version = "0.2.5"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
|
||||
description = "It is JsonPath engine written in Rust. it provide a similar API interface in Webassembly and Javascript also. - Webassembly Demo: https://freestrings.github.io/jsonpath"
|
||||
description = "It is JsonPath engine written in Rust. it provide a similar API interface in Webassembly and Javascript too. - Webassembly Demo: https://freestrings.github.io/jsonpath"
|
||||
readme = "README.md"
|
||||
|
||||
keywords = ["jsonpath", "json", "webassembly", "nodejs", "javascript"]
|
||||
keywords = ["jsonpath", "json", "webassembly", "lua", "query"]
|
||||
categories = ['wasm', "parser-implementations", "api-bindings"]
|
||||
|
||||
repository = "https://github.com/freestrings/jsonpath"
|
||||
documentation = "https://docs.rs/jsonpath_lib/0.1.0/jsonpath_lib"
|
||||
documentation = "https://docs.rs/jsonpath_lib/0.2.5/jsonpath_lib"
|
||||
license = "MIT"
|
||||
|
||||
categories = ["parsing"]
|
||||
|
||||
[badges]
|
||||
travis-ci = { repository = "freestrings/jsonpath", branch = "master" }
|
||||
|
||||
[dependencies]
|
||||
log = "0.4"
|
||||
env_logger = "0.6.0"
|
||||
env_logger = "0.7"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
indexmap = "1.0.2"
|
||||
array_tool = "~1.0.3"
|
||||
|
||||
[dev-dependencies]
|
||||
bencher = "0.1.5"
|
||||
array_tool = "1.0.3"
|
||||
|
||||
[lib]
|
||||
name = "jsonpath_lib"
|
||||
path = "src/lib.rs"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[profile.release]
|
||||
#[profile.release]
|
||||
#debug = true
|
||||
#lto = false
|
||||
#lto = false
|
59
README.md
59
README.md
@ -2,16 +2,15 @@
|
||||
|
||||
[](https://travis-ci.org/freestrings/jsonpath)
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
`Rust` 버전 [JsonPath](https://goessner.net/articles/JsonPath/) 구현이다. `Webassembly`와 `Javascript`에서도 유사한 API 인터페이스를 제공 한다.
|
||||
`Rust` 버전 [JsonPath](https://goessner.net/articles/JsonPath/) 구현으로 `Webassembly`와 `Javascript`에서도 유사한 API 인터페이스를 제공 한다.
|
||||
|
||||
It is JsonPath [JsonPath](https://goessner.net/articles/JsonPath/) engine written in `Rust`. it provide a similar API interface in `Webassembly` and` Javascript` also.
|
||||
It is JsonPath [JsonPath](https://goessner.net/articles/JsonPath/) engine written in `Rust`. it provide a similar API interface in `Webassembly` and` Javascript` too.
|
||||
|
||||
- [Webassembly Demo](https://freestrings.github.io/jsonpath/)
|
||||
- [NPM jsonpath-wasm - webassembly](https://www.npmjs.com/package/jsonpath-wasm)
|
||||
- [NPM jsonpath-rs - native addon](https://www.npmjs.com/package/jsonpath-rs)
|
||||
|
||||
## Rust API
|
||||
|
||||
@ -91,7 +90,7 @@ let result = selector_mut
|
||||
0
|
||||
};
|
||||
|
||||
json!(age)
|
||||
Some(json!(age))
|
||||
}).unwrap()
|
||||
.take().unwrap();
|
||||
|
||||
@ -352,7 +351,7 @@ let ret = jsonpath::replace_with(json_obj, "$..[?(@.age == 20)].age", &mut |v| {
|
||||
0
|
||||
};
|
||||
|
||||
json!(age)
|
||||
Some(json!(age))
|
||||
}).unwrap();
|
||||
|
||||
assert_eq!(ret, json!({
|
||||
@ -387,18 +386,6 @@ import * as jsonpath from "jsonpath-wasm";
|
||||
const jsonpath = require('jsonpath-wasm');
|
||||
```
|
||||
|
||||
##### jsonpath-rs (NodeJS only)
|
||||
|
||||
Goto [`jsonpath-rs` npmjs.org](https://www.npmjs.com/package/jsonpath-rs)
|
||||
|
||||
```javascript
|
||||
const jsonpath = require('jsonpath-rs');
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.Selector class</b></summary>
|
||||
|
||||
##### jsonpath-wasm
|
||||
`wasm-bindgen` 리턴 타입 제약 때문에 빌더 패턴은 지원하지 않는다.
|
||||
|
||||
@ -434,42 +421,6 @@ console.log(JSON.stringify(ret) == JSON.stringify(retObj));
|
||||
// => true
|
||||
```
|
||||
|
||||
##### jsonpath-rs
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let selector = new jsonpath.Selector()
|
||||
.path('$..friends[0]')
|
||||
.value(jsonObj);
|
||||
|
||||
let retObj = selector.select();
|
||||
|
||||
console.log(JSON.stringify(ret) == JSON.stringify(retObj));
|
||||
|
||||
// => true
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.SelectorMut class</b></summary>
|
||||
|
||||
빌더 패턴 제약은 `Selector class`와 동일하다.
|
||||
|
||||
```javascript
|
||||
|
9
bench.sh
Executable file
9
bench.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
#
|
||||
# rustup default nightly
|
||||
#
|
||||
|
||||
cargo bench --manifest-path ./benchmark/Cargo.toml
|
@ -1,44 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
DIR="$(pwd)"
|
||||
|
||||
cd "${DIR}"/bench_bin && cargo build --release
|
||||
|
||||
ITER=100000
|
||||
|
||||
printf "\n\n$..book[?(@.price<30 && @.category=="fiction")] (loop ${ITER})"
|
||||
printf "\n\n"
|
||||
|
||||
__default () {
|
||||
echo "Rust - select: " && time ./bench.sh select ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath - query: " && time ./bench.sh jsonpath ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-rs - select:" && time ./bench.sh nativeSelect ${ITER}
|
||||
}
|
||||
|
||||
__extra () {
|
||||
echo "Rust - selector: " && time ./bench.sh selector ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
echo "Rust - compile: " && time ./bench.sh compile ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath - query: " && time ./bench.sh jsonpath ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-rs - selector: " && time ./bench.sh nativeSelector ${ITER}
|
||||
printf "\n"
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-rs - compile: " && time ./bench.sh nativeCompile ${ITER}
|
||||
printf "\n"
|
||||
}
|
||||
|
||||
if [ "$1" = "extra" ]; then
|
||||
__extra
|
||||
else
|
||||
__default
|
||||
fi
|
222
benches/javascript/package-lock.json
generated
222
benches/javascript/package-lock.json
generated
@ -1,222 +0,0 @@
|
||||
{
|
||||
"name": "jsonpath-benches",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"JSONSelect": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/JSONSelect/-/JSONSelect-0.4.0.tgz",
|
||||
"integrity": "sha1-oI7cxn6z/L6Z7WMIVTRKDPKCu40="
|
||||
},
|
||||
"cjson": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/cjson/-/cjson-0.2.1.tgz",
|
||||
"integrity": "sha1-c82KrWXZ4VBfmvF0TTt5wVJ2gqU="
|
||||
},
|
||||
"colors": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/colors/-/colors-0.5.1.tgz",
|
||||
"integrity": "sha1-fQAj6usVTo7p/Oddy5I9DtFmd3Q="
|
||||
},
|
||||
"deep-is": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz",
|
||||
"integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ="
|
||||
},
|
||||
"ebnf-parser": {
|
||||
"version": "0.1.10",
|
||||
"resolved": "https://registry.npmjs.org/ebnf-parser/-/ebnf-parser-0.1.10.tgz",
|
||||
"integrity": "sha1-zR9rpHfFY4xAyX7ZtXLbW6tdgzE="
|
||||
},
|
||||
"escodegen": {
|
||||
"version": "0.0.21",
|
||||
"resolved": "https://registry.npmjs.org/escodegen/-/escodegen-0.0.21.tgz",
|
||||
"integrity": "sha1-U9ZSz6EDA4gnlFilJmxf/HCcY8M=",
|
||||
"requires": {
|
||||
"esprima": "~1.0.2",
|
||||
"estraverse": "~0.0.4",
|
||||
"source-map": ">= 0.1.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"esprima": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz",
|
||||
"integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0="
|
||||
}
|
||||
}
|
||||
},
|
||||
"esprima": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz",
|
||||
"integrity": "sha1-dqD9Zvz+FU/SkmZ9wmQBl1CxZXs="
|
||||
},
|
||||
"estraverse": {
|
||||
"version": "0.0.4",
|
||||
"resolved": "https://registry.npmjs.org/estraverse/-/estraverse-0.0.4.tgz",
|
||||
"integrity": "sha1-AaCTLf7ldGhKWYr1pnw7+bZCjbI="
|
||||
},
|
||||
"esutils": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz",
|
||||
"integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs="
|
||||
},
|
||||
"fast-levenshtein": {
|
||||
"version": "2.0.6",
|
||||
"resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
|
||||
"integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc="
|
||||
},
|
||||
"jison": {
|
||||
"version": "0.4.13",
|
||||
"resolved": "https://registry.npmjs.org/jison/-/jison-0.4.13.tgz",
|
||||
"integrity": "sha1-kEFwfWIkE2f1iDRTK58ZwsNvrHg=",
|
||||
"requires": {
|
||||
"JSONSelect": "0.4.0",
|
||||
"cjson": "~0.2.1",
|
||||
"ebnf-parser": "~0.1.9",
|
||||
"escodegen": "0.0.21",
|
||||
"esprima": "1.0.x",
|
||||
"jison-lex": "0.2.x",
|
||||
"lex-parser": "~0.1.3",
|
||||
"nomnom": "1.5.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"esprima": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz",
|
||||
"integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0="
|
||||
}
|
||||
}
|
||||
},
|
||||
"jison-lex": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/jison-lex/-/jison-lex-0.2.1.tgz",
|
||||
"integrity": "sha1-rEuBXozOUTLrErXfz+jXB7iETf4=",
|
||||
"requires": {
|
||||
"lex-parser": "0.1.x",
|
||||
"nomnom": "1.5.2"
|
||||
}
|
||||
},
|
||||
"jsonpath": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jsonpath/-/jsonpath-1.0.1.tgz",
|
||||
"integrity": "sha512-HY5kSg82LHIs0r0h9gYBwpNc1w1qGY0qJ7al01W1bJltsN2lp+mjjA/a79gXWuvD6Xf8oPkD2d5uKMZQXTGzqA==",
|
||||
"requires": {
|
||||
"esprima": "1.2.2",
|
||||
"jison": "0.4.13",
|
||||
"static-eval": "2.0.2",
|
||||
"underscore": "1.7.0"
|
||||
}
|
||||
},
|
||||
"levn": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
|
||||
"integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=",
|
||||
"requires": {
|
||||
"prelude-ls": "~1.1.2",
|
||||
"type-check": "~0.3.2"
|
||||
}
|
||||
},
|
||||
"lex-parser": {
|
||||
"version": "0.1.4",
|
||||
"resolved": "https://registry.npmjs.org/lex-parser/-/lex-parser-0.1.4.tgz",
|
||||
"integrity": "sha1-ZMTwJfF/1Tv7RXY/rrFvAVp0dVA="
|
||||
},
|
||||
"nomnom": {
|
||||
"version": "1.5.2",
|
||||
"resolved": "https://registry.npmjs.org/nomnom/-/nomnom-1.5.2.tgz",
|
||||
"integrity": "sha1-9DRUSKhTz71cDSYyDyR3qwUm/i8=",
|
||||
"requires": {
|
||||
"colors": "0.5.x",
|
||||
"underscore": "1.1.x"
|
||||
},
|
||||
"dependencies": {
|
||||
"underscore": {
|
||||
"version": "1.1.7",
|
||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.1.7.tgz",
|
||||
"integrity": "sha1-QLq4S60Z0jAJbo1u9ii/8FXYPbA="
|
||||
}
|
||||
}
|
||||
},
|
||||
"optionator": {
|
||||
"version": "0.8.2",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz",
|
||||
"integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=",
|
||||
"requires": {
|
||||
"deep-is": "~0.1.3",
|
||||
"fast-levenshtein": "~2.0.4",
|
||||
"levn": "~0.3.0",
|
||||
"prelude-ls": "~1.1.2",
|
||||
"type-check": "~0.3.2",
|
||||
"wordwrap": "~1.0.0"
|
||||
}
|
||||
},
|
||||
"prelude-ls": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz",
|
||||
"integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ="
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.7.3",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
|
||||
"integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
|
||||
"optional": true
|
||||
},
|
||||
"static-eval": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.0.2.tgz",
|
||||
"integrity": "sha512-N/D219Hcr2bPjLxPiV+TQE++Tsmrady7TqAJugLy7Xk1EumfDWS/f5dtBbkRCGE7wKKXuYockQoj8Rm2/pVKyg==",
|
||||
"requires": {
|
||||
"escodegen": "^1.8.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"escodegen": {
|
||||
"version": "1.11.1",
|
||||
"resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.11.1.tgz",
|
||||
"integrity": "sha512-JwiqFD9KdGVVpeuRa68yU3zZnBEOcPs0nKW7wZzXky8Z7tffdYUHbe11bPCV5jYlK6DVdKLWLm0f5I/QlL0Kmw==",
|
||||
"requires": {
|
||||
"esprima": "^3.1.3",
|
||||
"estraverse": "^4.2.0",
|
||||
"esutils": "^2.0.2",
|
||||
"optionator": "^0.8.1",
|
||||
"source-map": "~0.6.1"
|
||||
}
|
||||
},
|
||||
"esprima": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-3.1.3.tgz",
|
||||
"integrity": "sha1-/cpRzuYTOJXjyI1TXOSdv/YqRjM="
|
||||
},
|
||||
"estraverse": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz",
|
||||
"integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM="
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"type-check": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz",
|
||||
"integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=",
|
||||
"requires": {
|
||||
"prelude-ls": "~1.1.2"
|
||||
}
|
||||
},
|
||||
"underscore": {
|
||||
"version": "1.7.0",
|
||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz",
|
||||
"integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk="
|
||||
},
|
||||
"wordwrap": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
|
||||
"integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus="
|
||||
}
|
||||
}
|
||||
}
|
3
benches/package-lock.json
generated
3
benches/package-lock.json
generated
@ -1,3 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1
|
||||
}
|
4
benchmark/.gitignore
vendored
Normal file
4
benchmark/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
.idea/*
|
||||
.vscode
|
||||
/target/
|
||||
Cargo.lock
|
17
benchmark/Cargo.toml
Normal file
17
benchmark/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "jsonpath_lib_benches"
|
||||
version = "0.1.0"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
description = "jsonpath_lib benchmark"
|
||||
license = "MIT"
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
jsonpath_lib = { path = "../" }
|
||||
|
||||
[dev-dependencies]
|
||||
bencher = "0.1.5"
|
||||
|
||||
[[bin]]
|
||||
name = "jsonpath_lib_benches"
|
||||
path = "src/main.rs"
|
@ -22,7 +22,7 @@ fn read_json(path: &str) -> String {
|
||||
}
|
||||
|
||||
fn get_string() -> String {
|
||||
read_json("./benches/example.json")
|
||||
read_json("./example.json")
|
||||
}
|
||||
|
||||
fn get_json() -> Value {
|
||||
@ -109,7 +109,7 @@ fn bench_select_as(b: &mut Bencher) {
|
||||
#[bench]
|
||||
fn bench_delete(b: &mut Bencher) {
|
||||
let json = get_json();
|
||||
let mut selector = SelectorMut::new();
|
||||
let mut selector = SelectorMut::default();
|
||||
let _ = selector.str_path(get_path());
|
||||
|
||||
b.iter(move || {
|
||||
@ -123,13 +123,13 @@ fn bench_delete(b: &mut Bencher) {
|
||||
fn bench_select_to_compare_with_delete(b: &mut Bencher) {
|
||||
let json = &get_json();
|
||||
|
||||
let mut selector = Selector::new();
|
||||
let mut selector = Selector::default();
|
||||
let _ = selector.str_path(get_path());
|
||||
|
||||
b.iter(move || {
|
||||
for _ in 1..100 {
|
||||
let json = json.clone();
|
||||
let mut s = Selector::new();
|
||||
let mut s = Selector::default();
|
||||
let _ = s.compiled_path(selector.node_ref().unwrap()).value(&json);
|
||||
let _ = s.select();
|
||||
}
|
@ -19,7 +19,7 @@ fn read_json(path: &str) -> String {
|
||||
}
|
||||
|
||||
fn get_string() -> String {
|
||||
read_json("./benches/example.json")
|
||||
read_json("./example.json")
|
||||
}
|
||||
|
||||
fn get_json() -> Value {
|
||||
@ -53,7 +53,7 @@ fn _selector(b: &mut Bencher, index: usize) {
|
||||
let json = get_json();
|
||||
b.iter(move || {
|
||||
for _ in 1..100 {
|
||||
let mut selector = jsonpath::Selector::new();
|
||||
let mut selector = jsonpath::Selector::default();
|
||||
let _ = selector.str_path(get_path(index));
|
||||
selector.value(&json);
|
||||
let r = selector.select();
|
4501
benchmark/big_example.json
Normal file
4501
benchmark/big_example.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -34,4 +34,4 @@
|
||||
}
|
||||
},
|
||||
"expensive": 10
|
||||
}
|
||||
}
|
@ -43,7 +43,6 @@ function getJson() {
|
||||
const path = '$..book[?(@.price<30 && @.category=="fiction")]';
|
||||
const jp = require('jsonpath');
|
||||
const jpw = require('jsonpath-wasm');
|
||||
const jpwRs = require('jsonpath-rs');
|
||||
|
||||
function jsonpath() {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
@ -51,26 +50,6 @@ function jsonpath() {
|
||||
}
|
||||
}
|
||||
|
||||
function nativeCompile() {
|
||||
let template = jpwRs.compile(path);
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = template(JSON.stringify(json));
|
||||
}
|
||||
}
|
||||
|
||||
function nativeSelector() {
|
||||
let selector = jpwRs.selector(getJson());
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = selector(path);
|
||||
}
|
||||
}
|
||||
|
||||
function nativeSelect() {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = jpwRs.select(JSON.stringify(json), path);
|
||||
}
|
||||
}
|
||||
|
||||
function wasmSelector() {
|
||||
let selector = jpw.selector(getJson());
|
||||
for (var i = 0; i < iter; i++) {
|
1
benchmark/src/main.rs
Normal file
1
benchmark/src/main.rs
Normal file
@ -0,0 +1 @@
|
||||
fn main() {}
|
113
build-wasm.sh
113
build-wasm.sh
@ -7,9 +7,8 @@ DIR="$(pwd)"
|
||||
WASM="${DIR}"/wasm
|
||||
WASM_WWW="${WASM}"/www
|
||||
WASM_WWW_BENCH="${WASM}"/www_bench
|
||||
WASM_BROWSER_PKG="${WASM}"/browser_pkg
|
||||
WASM_NODEJS_PKG="${WASM}"/nodejs_pkg
|
||||
WASM_ALL_PKG="${WASM}"/all_pkg
|
||||
WASM_WEB_PKG="${WASM}"/web_pkg
|
||||
WASM_TEST="${WASM}"/tests
|
||||
DOCS="${DIR}"/docs
|
||||
DOCS_BENCH="${DOCS}"/bench
|
||||
@ -19,6 +18,8 @@ __msg () {
|
||||
}
|
||||
|
||||
__cargo_clean () {
|
||||
rm -f "${DIR}"/Cargo.lock
|
||||
rm -f "${WASM}"/Cargo.lock
|
||||
cd "${WASM}" && cargo clean && \
|
||||
cd "${DIR}" && cargo clean
|
||||
}
|
||||
@ -27,78 +28,64 @@ echo
|
||||
__msg "clean wasm"
|
||||
rm -rf \
|
||||
"${WASM_NODEJS_PKG}" \
|
||||
"${WASM_BROWSER_PKG}" \
|
||||
"${WASM_ALL_PKG}" \
|
||||
"${WASM_WWW}"/node_modules \
|
||||
"${WASM_WWW_BENCH}"/node_modules \
|
||||
"${WASM_WEB_PKG}" \
|
||||
"${WASM_WWW}"/dist \
|
||||
"${WASM_WWW}"/node_modules \
|
||||
"${WASM_WWW}"/package-lock.json \
|
||||
"${WASM_WWW_BENCH}"/dist \
|
||||
"${WASM_TEST}"/node_modules
|
||||
"${WASM_WWW_BENCH}"/node_modules \
|
||||
"${WASM_WWW_BENCH}"/package-lock.json \
|
||||
"${WASM_TEST}"/node_modules \
|
||||
"${WASM_TEST}"/package-lock.json
|
||||
|
||||
if [ "$1" = "all" ]; then
|
||||
__msg "clean all wasm"
|
||||
__cargo_clean
|
||||
fi
|
||||
__msg "clean cargo clean"
|
||||
__cargo_clean
|
||||
|
||||
echo
|
||||
wasm_pack_version=$(wasm-pack -V)
|
||||
__msg "wasm-pack: ${wasm_pack_version}"
|
||||
|
||||
echo
|
||||
__msg "wasm-pack nodejs"
|
||||
cd "${WASM}" && wasm-pack build --release --target "nodejs" --out-dir "${WASM_NODEJS_PKG}"
|
||||
|
||||
__msg "npm install: wasm"
|
||||
cd "${WASM_WWW}" && npm install
|
||||
__msg "npm install: wasm_bench"
|
||||
cd "${WASM_WWW_BENCH}" && npm install
|
||||
__msg "npm install: wasm test"
|
||||
cd "${WASM_TEST}" && npm install
|
||||
cd "${WASM_TEST}" && npm install "${WASM_NODEJS_PKG}" && npm install
|
||||
|
||||
echo
|
||||
echo
|
||||
__msg "wasm-pack"
|
||||
cd "${WASM}" && \
|
||||
wasm-pack build --release --target=nodejs --out-dir "${WASM_NODEJS_PKG}"
|
||||
|
||||
cd "${WASM}" && \
|
||||
wasm-pack build --release --target=browser --out-dir "${WASM_BROWSER_PKG}"
|
||||
# && \
|
||||
# wasm-pack test --chrome --firefox --headless
|
||||
|
||||
__msg "wasm npm packaging"
|
||||
cp -r "${WASM_BROWSER_PKG}" "${WASM_ALL_PKG}/" && \
|
||||
sed "s/require[\(]'\.\/jsonpath_wasm_bg/require\('\.\/jsonpath_wasm_nodejs/" "${WASM_NODEJS_PKG}/jsonpath_wasm.js" \
|
||||
> "${WASM_ALL_PKG}/jsonpath_wasm_main.js" && \
|
||||
sed "s/require[\(]'\.\/jsonpath_wasm/require\('\.\/jsonpath_wasm_main/" "${WASM_NODEJS_PKG}/jsonpath_wasm_bg.js" \
|
||||
> "${WASM_ALL_PKG}/jsonpath_wasm_nodejs.js" && \
|
||||
jq ".files += [\"jsonpath_wasm_nodejs.js\"]" ${WASM_ALL_PKG}/package.json \
|
||||
| jq ".main = \"jsonpath_wasm_main.js\"" \
|
||||
| jq ".keywords += [\"jsonpath\", \"json\", \"webassembly\", \"parsing\", \"rust\"]" \
|
||||
> ${WASM_ALL_PKG}/temp.json && \
|
||||
mv -v "${WASM_ALL_PKG}/temp.json" "${WASM_ALL_PKG}/package.json" && \
|
||||
cd "${WASM_ALL_PKG}" && npm link
|
||||
|
||||
echo
|
||||
__msg "link"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm link jsonpath-wasm
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm link jsonpath-wasm
|
||||
|
||||
cd "${WASM_TEST}" && \
|
||||
npm link jsonpath-wasm
|
||||
|
||||
echo
|
||||
echo
|
||||
__msg "wasm test"
|
||||
cd "${WASM_TEST}" && npm test
|
||||
|
||||
if [ "$1" = "all" ] || [ "$1" = "docs" ]; then
|
||||
echo
|
||||
__msg "docs"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS}"/*.js "${DOCS}"/*.wasm "${DOCS}"/*.html && \
|
||||
cp "${WASM_WWW}"/dist/*.* "${DOCS}"/
|
||||
if [ "$1" = "docs" ]; then
|
||||
echo
|
||||
__msg "wasm-pack web"
|
||||
cd "${WASM}" && wasm-pack build --release --out-dir "${WASM_WEB_PKG}"
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS_BENCH}"/*.js "${DOCS_BENCH}"/*.wasm "${DOCS_BENCH}"/*.html && \
|
||||
cp "${WASM_WWW_BENCH}"/dist/*.* "${DOCS_BENCH}"/
|
||||
echo
|
||||
__msg "jsonpath-wasm npm link"
|
||||
cd "${WASM_WEB_PKG}" && npm link
|
||||
|
||||
__msg "npm install: wasm"
|
||||
cd "${WASM_WWW}" && npm install
|
||||
__msg "npm install: wasm_bench"
|
||||
cd "${WASM_WWW_BENCH}" && npm install
|
||||
|
||||
echo
|
||||
__msg "link"
|
||||
cd "${WASM_WWW}" && npm link jsonpath-wasm
|
||||
cd "${WASM_WWW_BENCH}" && npm link jsonpath-wasm
|
||||
|
||||
echo
|
||||
__msg "docs"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS}"/*.js "${DOCS}"/*.wasm "${DOCS}"/*.html && \
|
||||
cp "${WASM_WWW}"/dist/*.* "${DOCS}"/
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS_BENCH}"/*.js "${DOCS_BENCH}"/*.wasm "${DOCS_BENCH}"/*.html && \
|
||||
cp "${WASM_WWW_BENCH}"/dist/*.* "${DOCS_BENCH}"/
|
||||
fi
|
||||
|
||||
__msg "wasm done"
|
123
build.sh
123
build.sh
@ -1,123 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# project_root
|
||||
DIR="$(pwd)"
|
||||
WASM="${DIR}"/wasm
|
||||
WASM_WWW="${WASM}"/www
|
||||
WASM_WWW_BENCH="${WASM}"/www_bench
|
||||
WASM_BROWSER_PKG="${WASM}"/browser_pkg
|
||||
WASM_NODEJS_PKG="${WASM}"/nodejs_pkg
|
||||
WASM_ALL_PKG="${WASM}"/all_pkg
|
||||
WASM_TEST="${WASM}"/tests
|
||||
BENCHES="${DIR}"/benches
|
||||
BENCHES_JS="${BENCHES}"/javascript
|
||||
NODEJS="${DIR}"/nodejs
|
||||
DOCS="${DIR}"/docs
|
||||
DOCS_BENCH="${DOCS}"/bench
|
||||
|
||||
__msg () {
|
||||
echo ">>>>>>>>>>$1<<<<<<<<<<"
|
||||
}
|
||||
|
||||
__cargo_clean () {
|
||||
cd "${BENCHES}"/bench_bin && cargo clean && \
|
||||
cd "${NODEJS}"/native && cargo clean && \
|
||||
cd "${WASM}" && cargo clean && \
|
||||
cd "${DIR}" && cargo clean
|
||||
}
|
||||
|
||||
echo
|
||||
__msg "clean"
|
||||
rm -rf \
|
||||
"${WASM_NODEJS_PKG}" \
|
||||
"${WASM_BROWSER_PKG}" \
|
||||
"${WASM_ALL_PKG}" \
|
||||
"${BENCHES_JS}"/node_modules \
|
||||
"${NODEJS}"/node_modules \
|
||||
"${WASM_WWW}"/node_modules \
|
||||
"${WASM_WWW_BENCH}"/node_modules \
|
||||
"${WASM_WWW}"/dist \
|
||||
"${WASM_WWW_BENCH}"/dist \
|
||||
"${WASM_TEST}"/node_modules
|
||||
|
||||
if [ "$1" = "all" ]; then
|
||||
__msg "clean targets"
|
||||
__cargo_clean
|
||||
fi
|
||||
|
||||
__msg "npm install: wasm"
|
||||
cd "${WASM_WWW}" && npm install
|
||||
__msg "npm install: wasm_bench"
|
||||
cd "${WASM_WWW_BENCH}" && npm install
|
||||
__msg "npm install: nodejs"
|
||||
cd "${NODEJS}" && npm install
|
||||
__msg "npm install: benches_js"
|
||||
cd "${BENCHES_JS}" && npm install
|
||||
__msg "npm install: wasm test"
|
||||
cd "${WASM_TEST}" && npm install
|
||||
|
||||
echo
|
||||
echo
|
||||
__msg "nodejs test"
|
||||
cd "${NODEJS}" && npm test
|
||||
|
||||
echo
|
||||
echo
|
||||
__msg "wasm-pack"
|
||||
cd "${WASM}" && \
|
||||
wasm-pack build --release --target=nodejs --out-dir "${WASM_NODEJS_PKG}"
|
||||
|
||||
cd "${WASM}" && \
|
||||
wasm-pack build --release --target=browser --out-dir "${WASM_BROWSER_PKG}"
|
||||
# && \
|
||||
# wasm-pack test --chrome --firefox --headless
|
||||
|
||||
__msg "wasm npm packaging"
|
||||
cp -r "${WASM_BROWSER_PKG}" "${WASM_ALL_PKG}/" && \
|
||||
sed "s/require[\(]'\.\/jsonpath_wasm_bg/require\('\.\/jsonpath_wasm_nodejs/" "${WASM_NODEJS_PKG}/jsonpath_wasm.js" \
|
||||
> "${WASM_ALL_PKG}/jsonpath_wasm_main.js" && \
|
||||
sed "s/require[\(]'\.\/jsonpath_wasm/require\('\.\/jsonpath_wasm_main/" "${WASM_NODEJS_PKG}/jsonpath_wasm_bg.js" \
|
||||
> "${WASM_ALL_PKG}/jsonpath_wasm_nodejs.js" && \
|
||||
jq ".files += [\"jsonpath_wasm_nodejs.js\"]" ${WASM_ALL_PKG}/package.json \
|
||||
| jq ".main = \"jsonpath_wasm_main.js\"" \
|
||||
| jq ".keywords += [\"jsonpath\", \"json\", \"webassembly\", \"parsing\", \"rust\"]" \
|
||||
> ${WASM_ALL_PKG}/temp.json && \
|
||||
mv -v "${WASM_ALL_PKG}/temp.json" "${WASM_ALL_PKG}/package.json" && \
|
||||
cd "${WASM_ALL_PKG}" && npm link
|
||||
|
||||
echo
|
||||
__msg "link"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm link jsonpath-wasm
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm link jsonpath-wasm
|
||||
|
||||
cd "${BENCHES_JS}" && \
|
||||
npm link jsonpath-wasm && \
|
||||
npm link jsonpath-rs
|
||||
|
||||
cd "${WASM_TEST}" && \
|
||||
npm link jsonpath-wasm
|
||||
|
||||
echo
|
||||
echo
|
||||
__msg "wasm test"
|
||||
cd "${WASM_TEST}" && npm test
|
||||
|
||||
|
||||
echo
|
||||
__msg "docs"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS}"/*.js "${DOCS}"/*.wasm "${DOCS}"/*.html && \
|
||||
cp "${WASM_WWW}"/dist/*.* "${DOCS}"/
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS_BENCH}"/*.js "${DOCS_BENCH}"/*.wasm "${DOCS_BENCH}"/*.html && \
|
||||
cp "${WASM_WWW_BENCH}"/dist/*.* "${DOCS_BENCH}"/
|
||||
|
||||
__msg "done"
|
11
clippy.sh
Executable file
11
clippy.sh
Executable file
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
cargo clean
|
||||
cargo clippy -- -D warnings
|
||||
cargo build --verbose --all
|
||||
cargo clippy --all-targets --all-features -- -D warnings -A clippy::cognitive_complexity
|
||||
cargo test --verbose --all
|
||||
cd wasm && cargo clippy -- -D warnings -A clippy::suspicious_else_formatting
|
||||
cd ../
|
9
coverage.sh
Executable file
9
coverage.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# cargo install cargo-tarpaulin
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
cargo tarpaulin --exclude-files nodejs wasm parser/mod.rs -v --all
|
File diff suppressed because one or more lines are too long
38
docs/1.bootstrap.js
Normal file
38
docs/1.bootstrap.js
Normal file
File diff suppressed because one or more lines are too long
BIN
docs/a9530753c3f0aa3c5ead.module.wasm
Normal file
BIN
docs/a9530753c3f0aa3c5ead.module.wasm
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
docs/bench/a9530753c3f0aa3c5ead.module.wasm
Normal file
BIN
docs/bench/a9530753c3f0aa3c5ead.module.wasm
Normal file
Binary file not shown.
59
docs/bench/bootstrap.js
vendored
59
docs/bench/bootstrap.js
vendored
@ -10,7 +10,7 @@
|
||||
/******/ var moduleId, chunkId, i = 0, resolves = [];
|
||||
/******/ for(;i < chunkIds.length; i++) {
|
||||
/******/ chunkId = chunkIds[i];
|
||||
/******/ if(installedChunks[chunkId]) {
|
||||
/******/ if(Object.prototype.hasOwnProperty.call(installedChunks, chunkId) && installedChunks[chunkId]) {
|
||||
/******/ resolves.push(installedChunks[chunkId][0]);
|
||||
/******/ }
|
||||
/******/ installedChunks[chunkId] = 0;
|
||||
@ -52,53 +52,53 @@
|
||||
/******/ function promiseResolve() { return Promise.resolve(); }
|
||||
/******/
|
||||
/******/ var wasmImportObjects = {
|
||||
/******/ "../all_pkg/jsonpath_wasm_bg.wasm": function() {
|
||||
/******/ "../web_pkg/jsonpath_wasm_bg.wasm": function() {
|
||||
/******/ return {
|
||||
/******/ "./jsonpath_wasm": {
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "./jsonpath_wasm.js": {
|
||||
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_error_8015049cb5adfca2": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_8015049cb5adfca2"](p0i32,p1i32);
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_error_e7d3e8dbb31828c8": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbg_error_e7d3e8dbb31828c8"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_call_972de3aa550c37b2": function(p0i32,p1i32,p2i32,p3i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_972de3aa550c37b2"](p0i32,p1i32,p2i32,p3i32);
|
||||
/******/ "__wbg_call_1ad0eb4a7ab279eb": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbg_call_1ad0eb4a7ab279eb"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_is_string": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_throw": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_rethrow": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper18": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper18"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper28": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper28"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper20": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper20"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper26": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper26"](p0i32,p1i32,p2i32);
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ };
|
||||
@ -161,6 +161,8 @@
|
||||
/******/ }
|
||||
/******/ script.src = jsonpScriptSrc(chunkId);
|
||||
/******/
|
||||
/******/ // create error before stack unwound to get useful stacktrace later
|
||||
/******/ var error = new Error();
|
||||
/******/ onScriptComplete = function (event) {
|
||||
/******/ // avoid mem leaks in IE.
|
||||
/******/ script.onerror = script.onload = null;
|
||||
@ -170,7 +172,8 @@
|
||||
/******/ if(chunk) {
|
||||
/******/ var errorType = event && (event.type === 'load' ? 'missing' : event.type);
|
||||
/******/ var realSrc = event && event.target && event.target.src;
|
||||
/******/ var error = new Error('Loading chunk ' + chunkId + ' failed.\n(' + errorType + ': ' + realSrc + ')');
|
||||
/******/ error.message = 'Loading chunk ' + chunkId + ' failed.\n(' + errorType + ': ' + realSrc + ')';
|
||||
/******/ error.name = 'ChunkLoadError';
|
||||
/******/ error.type = errorType;
|
||||
/******/ error.request = realSrc;
|
||||
/******/ chunk[1](error);
|
||||
@ -188,7 +191,7 @@
|
||||
/******/
|
||||
/******/ // Fetch + compile chunk loading for webassembly
|
||||
/******/
|
||||
/******/ var wasmModules = {"0":["../all_pkg/jsonpath_wasm_bg.wasm"]}[chunkId] || [];
|
||||
/******/ var wasmModules = {"1":["../web_pkg/jsonpath_wasm_bg.wasm"]}[chunkId] || [];
|
||||
/******/
|
||||
/******/ wasmModules.forEach(function(wasmModuleId) {
|
||||
/******/ var installedWasmModuleData = installedWasmModules[wasmModuleId];
|
||||
@ -198,7 +201,7 @@
|
||||
/******/ promises.push(installedWasmModuleData);
|
||||
/******/ else {
|
||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"d60993d3a441db221b47"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../web_pkg/jsonpath_wasm_bg.wasm":"a9530753c3f0aa3c5ead"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var promise;
|
||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||
@ -299,7 +302,7 @@
|
||||
/*! no static exports found */
|
||||
/***/ (function(module, exports, __webpack_require__) {
|
||||
|
||||
eval("// A dependency graph that contains any wasm must all be imported\n// asynchronously. This `bootstrap.js` file does the single async import, so\n// that no one else needs to worry about it again.\nPromise.all(/*! import() */[__webpack_require__.e(1), __webpack_require__.e(0)]).then(__webpack_require__.bind(null, /*! ./index.js */ \"./index.js\"))\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n\n//# sourceURL=webpack:///./bootstrap.js?");
|
||||
eval("// A dependency graph that contains any wasm must all be imported\n// asynchronously. This `bootstrap.js` file does the single async import, so\n// that no one else needs to worry about it again.\nPromise.all(/*! import() */[__webpack_require__.e(0), __webpack_require__.e(1)]).then(__webpack_require__.bind(null, /*! ./index.js */ \"./index.js\"))\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n\n//# sourceURL=webpack:///./bootstrap.js?");
|
||||
|
||||
/***/ })
|
||||
|
||||
|
Binary file not shown.
59
docs/bootstrap.js
vendored
59
docs/bootstrap.js
vendored
@ -10,7 +10,7 @@
|
||||
/******/ var moduleId, chunkId, i = 0, resolves = [];
|
||||
/******/ for(;i < chunkIds.length; i++) {
|
||||
/******/ chunkId = chunkIds[i];
|
||||
/******/ if(installedChunks[chunkId]) {
|
||||
/******/ if(Object.prototype.hasOwnProperty.call(installedChunks, chunkId) && installedChunks[chunkId]) {
|
||||
/******/ resolves.push(installedChunks[chunkId][0]);
|
||||
/******/ }
|
||||
/******/ installedChunks[chunkId] = 0;
|
||||
@ -52,53 +52,53 @@
|
||||
/******/ function promiseResolve() { return Promise.resolve(); }
|
||||
/******/
|
||||
/******/ var wasmImportObjects = {
|
||||
/******/ "../all_pkg/jsonpath_wasm_bg.wasm": function() {
|
||||
/******/ "../web_pkg/jsonpath_wasm_bg.wasm": function() {
|
||||
/******/ return {
|
||||
/******/ "./jsonpath_wasm": {
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "./jsonpath_wasm.js": {
|
||||
/******/ "__wbindgen_json_parse": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_parse"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_json_serialize": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_json_serialize"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_error_8015049cb5adfca2": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_error_8015049cb5adfca2"](p0i32,p1i32);
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_error_e7d3e8dbb31828c8": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbg_error_e7d3e8dbb31828c8"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_new": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_new"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbg_call_972de3aa550c37b2": function(p0i32,p1i32,p2i32,p3i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbg_call_972de3aa550c37b2"](p0i32,p1i32,p2i32,p3i32);
|
||||
/******/ "__wbg_call_1ad0eb4a7ab279eb": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbg_call_1ad0eb4a7ab279eb"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_is_string": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_is_string"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_debug_string": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_debug_string"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_throw": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_rethrow": function(p0i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_rethrow"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper18": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper18"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper28": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper28"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper20": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../all_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper20"](p0i32,p1i32,p2i32);
|
||||
/******/ "__wbindgen_closure_wrapper26": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../web_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper26"](p0i32,p1i32,p2i32);
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ };
|
||||
@ -161,6 +161,8 @@
|
||||
/******/ }
|
||||
/******/ script.src = jsonpScriptSrc(chunkId);
|
||||
/******/
|
||||
/******/ // create error before stack unwound to get useful stacktrace later
|
||||
/******/ var error = new Error();
|
||||
/******/ onScriptComplete = function (event) {
|
||||
/******/ // avoid mem leaks in IE.
|
||||
/******/ script.onerror = script.onload = null;
|
||||
@ -170,7 +172,8 @@
|
||||
/******/ if(chunk) {
|
||||
/******/ var errorType = event && (event.type === 'load' ? 'missing' : event.type);
|
||||
/******/ var realSrc = event && event.target && event.target.src;
|
||||
/******/ var error = new Error('Loading chunk ' + chunkId + ' failed.\n(' + errorType + ': ' + realSrc + ')');
|
||||
/******/ error.message = 'Loading chunk ' + chunkId + ' failed.\n(' + errorType + ': ' + realSrc + ')';
|
||||
/******/ error.name = 'ChunkLoadError';
|
||||
/******/ error.type = errorType;
|
||||
/******/ error.request = realSrc;
|
||||
/******/ chunk[1](error);
|
||||
@ -188,7 +191,7 @@
|
||||
/******/
|
||||
/******/ // Fetch + compile chunk loading for webassembly
|
||||
/******/
|
||||
/******/ var wasmModules = {"0":["../all_pkg/jsonpath_wasm_bg.wasm"]}[chunkId] || [];
|
||||
/******/ var wasmModules = {"1":["../web_pkg/jsonpath_wasm_bg.wasm"]}[chunkId] || [];
|
||||
/******/
|
||||
/******/ wasmModules.forEach(function(wasmModuleId) {
|
||||
/******/ var installedWasmModuleData = installedWasmModules[wasmModuleId];
|
||||
@ -198,7 +201,7 @@
|
||||
/******/ promises.push(installedWasmModuleData);
|
||||
/******/ else {
|
||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../all_pkg/jsonpath_wasm_bg.wasm":"d60993d3a441db221b47"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../web_pkg/jsonpath_wasm_bg.wasm":"a9530753c3f0aa3c5ead"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var promise;
|
||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||
@ -299,7 +302,7 @@
|
||||
/*! no static exports found */
|
||||
/***/ (function(module, exports, __webpack_require__) {
|
||||
|
||||
eval("// A dependency graph that contains any wasm must all be imported\n// asynchronously. This `bootstrap.js` file does the single async import, so\n// that no one else needs to worry about it again.\n__webpack_require__.e(/*! import() */ 0).then(__webpack_require__.bind(null, /*! ./index.js */ \"./index.js\"))\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n\n\n//# sourceURL=webpack:///./bootstrap.js?");
|
||||
eval("// A dependency graph that contains any wasm must all be imported\n// asynchronously. This `bootstrap.js` file does the single async import, so\n// that no one else needs to worry about it again.\nPromise.all(/*! import() */[__webpack_require__.e(0), __webpack_require__.e(1)]).then(__webpack_require__.bind(null, /*! ./index.js */ \"./index.js\"))\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n\n\n//# sourceURL=webpack:///./bootstrap.js?");
|
||||
|
||||
/***/ })
|
||||
|
||||
|
Binary file not shown.
5324
examples/browser/package-lock.json
generated
5324
examples/browser/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,31 +0,0 @@
|
||||
const jsonpath = require('jsonpath-rs');
|
||||
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let selector = new jsonpath.Selector();
|
||||
selector.path('$..friends[0]');
|
||||
selector.value(jsonObj);
|
||||
|
||||
let selectToObj = selector.selectTo();
|
||||
let selectToString = selector.selectToStr();
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret) == JSON.stringify(selectToObj),
|
||||
JSON.stringify(ret) == selectToString
|
||||
);
|
658
examples/nodejs-rs/package-lock.json
generated
658
examples/nodejs-rs/package-lock.json
generated
@ -1,658 +0,0 @@
|
||||
{
|
||||
"name": "jsonpath-rs-example",
|
||||
"requires": true,
|
||||
"lockfileVersion": 1,
|
||||
"dependencies": {
|
||||
"ansi-escape-sequences": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-escape-sequences/-/ansi-escape-sequences-4.1.0.tgz",
|
||||
"integrity": "sha512-dzW9kHxH011uBsidTXd14JXgzye/YLb2LzeKZ4bsgl/Knwx8AtbSFkkGxagdNOoh0DlqHCmfiEjWKBaqjOanVw==",
|
||||
"requires": {
|
||||
"array-back": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"array-back": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz",
|
||||
"integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"ansi-escapes": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz",
|
||||
"integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ=="
|
||||
},
|
||||
"ansi-regex": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz",
|
||||
"integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg="
|
||||
},
|
||||
"ansi-styles": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
|
||||
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
|
||||
"requires": {
|
||||
"color-convert": "^1.9.0"
|
||||
}
|
||||
},
|
||||
"array-back": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz",
|
||||
"integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==",
|
||||
"requires": {
|
||||
"typical": "^2.6.1"
|
||||
}
|
||||
},
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"builtins": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/builtins/-/builtins-1.0.3.tgz",
|
||||
"integrity": "sha1-y5T662HIaWRR2zZTThQi+U8K7og="
|
||||
},
|
||||
"chalk": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.1.0.tgz",
|
||||
"integrity": "sha512-LUHGS/dge4ujbXMJrnihYMcL4AoOweGnw9Tp3kQuqy1Kx5c1qKjqvMJZ6nVJPMWJtKCTN72ZogH3oeSO9g9rXQ==",
|
||||
"requires": {
|
||||
"ansi-styles": "^3.1.0",
|
||||
"escape-string-regexp": "^1.0.5",
|
||||
"supports-color": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"chardet": {
|
||||
"version": "0.4.2",
|
||||
"resolved": "https://registry.npmjs.org/chardet/-/chardet-0.4.2.tgz",
|
||||
"integrity": "sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I="
|
||||
},
|
||||
"cli-cursor": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz",
|
||||
"integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=",
|
||||
"requires": {
|
||||
"restore-cursor": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"cli-width": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz",
|
||||
"integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk="
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "1.9.3",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
|
||||
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
|
||||
"requires": {
|
||||
"color-name": "1.1.3"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
||||
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU="
|
||||
},
|
||||
"command-line-args": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-4.0.7.tgz",
|
||||
"integrity": "sha512-aUdPvQRAyBvQd2n7jXcsMDz68ckBJELXNzBybCHOibUWEg0mWTnaYCSRU8h9R+aNRSvDihJtssSRCiDRpLaezA==",
|
||||
"requires": {
|
||||
"array-back": "^2.0.0",
|
||||
"find-replace": "^1.0.3",
|
||||
"typical": "^2.6.1"
|
||||
}
|
||||
},
|
||||
"command-line-commands": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/command-line-commands/-/command-line-commands-2.0.1.tgz",
|
||||
"integrity": "sha512-m8c2p1DrNd2ruIAggxd/y6DgygQayf6r8RHwchhXryaLF8I6koYjoYroVP+emeROE9DXN5b9sP1Gh+WtvTTdtQ==",
|
||||
"requires": {
|
||||
"array-back": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"command-line-usage": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/command-line-usage/-/command-line-usage-4.1.0.tgz",
|
||||
"integrity": "sha512-MxS8Ad995KpdAC0Jopo/ovGIroV/m0KHwzKfXxKag6FHOkGsH8/lv5yjgablcRxCJJC0oJeUMuO/gmaq+Wq46g==",
|
||||
"requires": {
|
||||
"ansi-escape-sequences": "^4.0.0",
|
||||
"array-back": "^2.0.0",
|
||||
"table-layout": "^0.4.2",
|
||||
"typical": "^2.6.1"
|
||||
}
|
||||
},
|
||||
"commander": {
|
||||
"version": "2.20.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.0.tgz",
|
||||
"integrity": "sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ==",
|
||||
"optional": true
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||
},
|
||||
"deep-extend": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
|
||||
"integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="
|
||||
},
|
||||
"escape-string-regexp": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
|
||||
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ="
|
||||
},
|
||||
"external-editor": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.2.0.tgz",
|
||||
"integrity": "sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A==",
|
||||
"requires": {
|
||||
"chardet": "^0.4.0",
|
||||
"iconv-lite": "^0.4.17",
|
||||
"tmp": "^0.0.33"
|
||||
}
|
||||
},
|
||||
"figures": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz",
|
||||
"integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=",
|
||||
"requires": {
|
||||
"escape-string-regexp": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"find-replace": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/find-replace/-/find-replace-1.0.3.tgz",
|
||||
"integrity": "sha1-uI5zZNLZyVlVnziMZmcNYTBEH6A=",
|
||||
"requires": {
|
||||
"array-back": "^1.0.4",
|
||||
"test-value": "^2.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"array-back": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz",
|
||||
"integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=",
|
||||
"requires": {
|
||||
"typical": "^2.6.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
|
||||
},
|
||||
"git-config": {
|
||||
"version": "0.0.7",
|
||||
"resolved": "https://registry.npmjs.org/git-config/-/git-config-0.0.7.tgz",
|
||||
"integrity": "sha1-qcij7wendsPXImE1bYtye2IgKyg=",
|
||||
"requires": {
|
||||
"iniparser": "~1.0.5"
|
||||
}
|
||||
},
|
||||
"glob": {
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz",
|
||||
"integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==",
|
||||
"requires": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
"inflight": "^1.0.4",
|
||||
"inherits": "2",
|
||||
"minimatch": "^3.0.4",
|
||||
"once": "^1.3.0",
|
||||
"path-is-absolute": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"handlebars": {
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.1.2.tgz",
|
||||
"integrity": "sha512-nvfrjqvt9xQ8Z/w0ijewdD/vvWDTOweBUm96NTr66Wfvo1mJenBLwcYmPs3TIBP5ruzYGD7Hx/DaM9RmhroGPw==",
|
||||
"requires": {
|
||||
"neo-async": "^2.6.0",
|
||||
"optimist": "^0.6.1",
|
||||
"source-map": "^0.6.1",
|
||||
"uglify-js": "^3.1.4"
|
||||
}
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz",
|
||||
"integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE="
|
||||
},
|
||||
"iconv-lite": {
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||
"requires": {
|
||||
"safer-buffer": ">= 2.1.2 < 3"
|
||||
}
|
||||
},
|
||||
"inflight": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
|
||||
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
|
||||
"requires": {
|
||||
"once": "^1.3.0",
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"inherits": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
|
||||
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
|
||||
},
|
||||
"iniparser": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/iniparser/-/iniparser-1.0.5.tgz",
|
||||
"integrity": "sha1-g21r7+bfv87gvM8c+fKsxwJ/eD0="
|
||||
},
|
||||
"inquirer": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/inquirer/-/inquirer-3.3.0.tgz",
|
||||
"integrity": "sha512-h+xtnyk4EwKvFWHrUYsWErEVR+igKtLdchu+o0Z1RL7VU/jVMFbYir2bp6bAj8efFNxWqHX0dIss6fJQ+/+qeQ==",
|
||||
"requires": {
|
||||
"ansi-escapes": "^3.0.0",
|
||||
"chalk": "^2.0.0",
|
||||
"cli-cursor": "^2.1.0",
|
||||
"cli-width": "^2.0.0",
|
||||
"external-editor": "^2.0.4",
|
||||
"figures": "^2.0.0",
|
||||
"lodash": "^4.3.0",
|
||||
"mute-stream": "0.0.7",
|
||||
"run-async": "^2.2.0",
|
||||
"rx-lite": "^4.0.8",
|
||||
"rx-lite-aggregates": "^4.0.8",
|
||||
"string-width": "^2.1.0",
|
||||
"strip-ansi": "^4.0.0",
|
||||
"through": "^2.3.6"
|
||||
}
|
||||
},
|
||||
"is-fullwidth-code-point": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
|
||||
"integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8="
|
||||
},
|
||||
"is-promise": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz",
|
||||
"integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o="
|
||||
},
|
||||
"jsonpath-rs": {
|
||||
"version": "0.1.7",
|
||||
"resolved": "https://registry.npmjs.org/jsonpath-rs/-/jsonpath-rs-0.1.7.tgz",
|
||||
"integrity": "sha512-BSuCWJK5PaTevsPHmFaLb9kzoc1Wh56+TBm6XH+gObIKA8Z3SQp6gUrgibGlApCYipha4IDo59StrdyVcvVPqA==",
|
||||
"requires": {
|
||||
"neon-cli": "^0.2.0"
|
||||
}
|
||||
},
|
||||
"lodash": {
|
||||
"version": "4.17.11",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz",
|
||||
"integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg=="
|
||||
},
|
||||
"lodash.padend": {
|
||||
"version": "4.6.1",
|
||||
"resolved": "https://registry.npmjs.org/lodash.padend/-/lodash.padend-4.6.1.tgz",
|
||||
"integrity": "sha1-U8y6BH0G4VjTEfRdpiX05J5vFm4="
|
||||
},
|
||||
"mimic-fn": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz",
|
||||
"integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ=="
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
},
|
||||
"minimist": {
|
||||
"version": "0.0.10",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz",
|
||||
"integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8="
|
||||
},
|
||||
"mkdirp": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz",
|
||||
"integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=",
|
||||
"requires": {
|
||||
"minimist": "0.0.8"
|
||||
},
|
||||
"dependencies": {
|
||||
"minimist": {
|
||||
"version": "0.0.8",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
|
||||
"integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0="
|
||||
}
|
||||
}
|
||||
},
|
||||
"mute-stream": {
|
||||
"version": "0.0.7",
|
||||
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz",
|
||||
"integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s="
|
||||
},
|
||||
"neo-async": {
|
||||
"version": "2.6.0",
|
||||
"resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.0.tgz",
|
||||
"integrity": "sha512-MFh0d/Wa7vkKO3Y3LlacqAEeHK0mckVqzDieUKTT+KGxi+zIpeVsFxymkIiRpbpDziHc290Xr9A1O4Om7otoRA=="
|
||||
},
|
||||
"neon-cli": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/neon-cli/-/neon-cli-0.2.0.tgz",
|
||||
"integrity": "sha512-IsrxCyUcuAyWiq4Z+JnTXrjurj2SAL2VtWnCXS8iBYGJeIs1NIhFuLaM6fe7+rOyFfDcqUUTWGxZmkvUqwweRA==",
|
||||
"requires": {
|
||||
"chalk": "~2.1.0",
|
||||
"command-line-args": "^4.0.2",
|
||||
"command-line-commands": "^2.0.0",
|
||||
"command-line-usage": "^4.0.0",
|
||||
"git-config": "0.0.7",
|
||||
"handlebars": "^4.0.3",
|
||||
"inquirer": "^3.0.6",
|
||||
"mkdirp": "^0.5.1",
|
||||
"quickly-copy-file": "^1.0.0",
|
||||
"rimraf": "^2.6.1",
|
||||
"rsvp": "^4.6.1",
|
||||
"semver": "^5.1.0",
|
||||
"toml": "^2.3.0",
|
||||
"ts-typed-json": "^0.2.2",
|
||||
"validate-npm-package-license": "^3.0.1",
|
||||
"validate-npm-package-name": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
|
||||
"requires": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"onetime": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz",
|
||||
"integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=",
|
||||
"requires": {
|
||||
"mimic-fn": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"optimist": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz",
|
||||
"integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=",
|
||||
"requires": {
|
||||
"minimist": "~0.0.1",
|
||||
"wordwrap": "~0.0.2"
|
||||
}
|
||||
},
|
||||
"os-tmpdir": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
|
||||
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ="
|
||||
},
|
||||
"path-is-absolute": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
|
||||
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
|
||||
},
|
||||
"quickly-copy-file": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/quickly-copy-file/-/quickly-copy-file-1.0.0.tgz",
|
||||
"integrity": "sha1-n4/wZiMFEO50IrASFHKwk6hpCFk=",
|
||||
"requires": {
|
||||
"mkdirp": "~0.5.0"
|
||||
}
|
||||
},
|
||||
"reduce-flatten": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-1.0.1.tgz",
|
||||
"integrity": "sha1-JYx479FT3fk8tWEjf2EYTzaW4yc="
|
||||
},
|
||||
"restore-cursor": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz",
|
||||
"integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=",
|
||||
"requires": {
|
||||
"onetime": "^2.0.0",
|
||||
"signal-exit": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"rimraf": {
|
||||
"version": "2.6.3",
|
||||
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz",
|
||||
"integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==",
|
||||
"requires": {
|
||||
"glob": "^7.1.3"
|
||||
}
|
||||
},
|
||||
"rsvp": {
|
||||
"version": "4.8.4",
|
||||
"resolved": "https://registry.npmjs.org/rsvp/-/rsvp-4.8.4.tgz",
|
||||
"integrity": "sha512-6FomvYPfs+Jy9TfXmBpBuMWNH94SgCsZmJKcanySzgNNP6LjWxBvyLTa9KaMfDDM5oxRfrKDB0r/qeRsLwnBfA=="
|
||||
},
|
||||
"run-async": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz",
|
||||
"integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=",
|
||||
"requires": {
|
||||
"is-promise": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"rx-lite": {
|
||||
"version": "4.0.8",
|
||||
"resolved": "https://registry.npmjs.org/rx-lite/-/rx-lite-4.0.8.tgz",
|
||||
"integrity": "sha1-Cx4Rr4vESDbwSmQH6S2kJGe3lEQ="
|
||||
},
|
||||
"rx-lite-aggregates": {
|
||||
"version": "4.0.8",
|
||||
"resolved": "https://registry.npmjs.org/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz",
|
||||
"integrity": "sha1-dTuHqJoRyVRnxKwWJsTvxOBcZ74=",
|
||||
"requires": {
|
||||
"rx-lite": "*"
|
||||
}
|
||||
},
|
||||
"safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||
},
|
||||
"semver": {
|
||||
"version": "5.7.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz",
|
||||
"integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA=="
|
||||
},
|
||||
"signal-exit": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
|
||||
"integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0="
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
|
||||
},
|
||||
"spdx-correct": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz",
|
||||
"integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==",
|
||||
"requires": {
|
||||
"spdx-expression-parse": "^3.0.0",
|
||||
"spdx-license-ids": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"spdx-exceptions": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz",
|
||||
"integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA=="
|
||||
},
|
||||
"spdx-expression-parse": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz",
|
||||
"integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==",
|
||||
"requires": {
|
||||
"spdx-exceptions": "^2.1.0",
|
||||
"spdx-license-ids": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"spdx-license-ids": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz",
|
||||
"integrity": "sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA=="
|
||||
},
|
||||
"string-width": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
|
||||
"integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
|
||||
"requires": {
|
||||
"is-fullwidth-code-point": "^2.0.0",
|
||||
"strip-ansi": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"strip-ansi": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
|
||||
"integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
|
||||
"requires": {
|
||||
"ansi-regex": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "4.5.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
|
||||
"integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
|
||||
"requires": {
|
||||
"has-flag": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"table-layout": {
|
||||
"version": "0.4.4",
|
||||
"resolved": "https://registry.npmjs.org/table-layout/-/table-layout-0.4.4.tgz",
|
||||
"integrity": "sha512-uNaR3SRMJwfdp9OUr36eyEi6LLsbcTqTO/hfTsNviKsNeyMBPICJCC7QXRF3+07bAP6FRwA8rczJPBqXDc0CkQ==",
|
||||
"requires": {
|
||||
"array-back": "^2.0.0",
|
||||
"deep-extend": "~0.6.0",
|
||||
"lodash.padend": "^4.6.1",
|
||||
"typical": "^2.6.1",
|
||||
"wordwrapjs": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"test-value": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/test-value/-/test-value-2.1.0.tgz",
|
||||
"integrity": "sha1-Edpv9nDzRxpztiXKTz/c97t0gpE=",
|
||||
"requires": {
|
||||
"array-back": "^1.0.3",
|
||||
"typical": "^2.6.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"array-back": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz",
|
||||
"integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=",
|
||||
"requires": {
|
||||
"typical": "^2.6.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"through": {
|
||||
"version": "2.3.8",
|
||||
"resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
|
||||
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU="
|
||||
},
|
||||
"tmp": {
|
||||
"version": "0.0.33",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
|
||||
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
|
||||
"requires": {
|
||||
"os-tmpdir": "~1.0.2"
|
||||
}
|
||||
},
|
||||
"toml": {
|
||||
"version": "2.3.6",
|
||||
"resolved": "https://registry.npmjs.org/toml/-/toml-2.3.6.tgz",
|
||||
"integrity": "sha512-gVweAectJU3ebq//Ferr2JUY4WKSDe5N+z0FvjDncLGyHmIDoxgY/2Ie4qfEIDm4IS7OA6Rmdm7pdEEdMcV/xQ=="
|
||||
},
|
||||
"ts-typed-json": {
|
||||
"version": "0.2.2",
|
||||
"resolved": "https://registry.npmjs.org/ts-typed-json/-/ts-typed-json-0.2.2.tgz",
|
||||
"integrity": "sha1-UxhL7ok+RZkbc8jEY6OLWeJ81H4=",
|
||||
"requires": {
|
||||
"rsvp": "^3.5.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"rsvp": {
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/rsvp/-/rsvp-3.6.2.tgz",
|
||||
"integrity": "sha512-OfWGQTb9vnwRjwtA2QwpG2ICclHC3pgXZO5xt8H2EfgDquO0qVdSb5T88L4qJVAEugbS56pAuV4XZM58UX8ulw=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"typical": {
|
||||
"version": "2.6.1",
|
||||
"resolved": "https://registry.npmjs.org/typical/-/typical-2.6.1.tgz",
|
||||
"integrity": "sha1-XAgOXWYcu+OCWdLnCjxyU+hziB0="
|
||||
},
|
||||
"uglify-js": {
|
||||
"version": "3.5.4",
|
||||
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.5.4.tgz",
|
||||
"integrity": "sha512-GpKo28q/7Bm5BcX9vOu4S46FwisbPbAmkkqPnGIpKvKTM96I85N6XHQV+k4I6FA2wxgLhcsSyHoNhzucwCflvA==",
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"commander": "~2.20.0",
|
||||
"source-map": "~0.6.1"
|
||||
}
|
||||
},
|
||||
"validate-npm-package-license": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
|
||||
"integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==",
|
||||
"requires": {
|
||||
"spdx-correct": "^3.0.0",
|
||||
"spdx-expression-parse": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"validate-npm-package-name": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-3.0.0.tgz",
|
||||
"integrity": "sha1-X6kS2B630MdK/BQN5zF/DKffQ34=",
|
||||
"requires": {
|
||||
"builtins": "^1.0.3"
|
||||
}
|
||||
},
|
||||
"wordwrap": {
|
||||
"version": "0.0.3",
|
||||
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz",
|
||||
"integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc="
|
||||
},
|
||||
"wordwrapjs": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-3.0.0.tgz",
|
||||
"integrity": "sha512-mO8XtqyPvykVCsrwj5MlOVWvSnCdT+C+QVbm6blradR7JExAhbkZ7hZ9A+9NUtwzSqrlUo9a67ws0EiILrvRpw==",
|
||||
"requires": {
|
||||
"reduce-flatten": "^1.0.1",
|
||||
"typical": "^2.6.1"
|
||||
}
|
||||
},
|
||||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
||||
}
|
||||
}
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
{
|
||||
"name": "jsonpath-rs-example",
|
||||
"scripts": {
|
||||
"start": "node index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"jsonpath-rs": "0"
|
||||
}
|
||||
}
|
2
examples/nodejs-wasm/.gitignore
vendored
2
examples/nodejs-wasm/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
node_modules
|
||||
dist
|
5
lua/.gitignore
vendored
Normal file
5
lua/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
.idea/*
|
||||
.vscode
|
||||
/target/
|
||||
Cargo.lock
|
||||
docker_example/ab_results/**
|
14
lua/Cargo.toml
Normal file
14
lua/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "jsonpath_lua"
|
||||
version = "0.1.0"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
license = "MIT"
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
jsonpath_lib = { path = "../" }
|
||||
|
||||
[[bin]]
|
||||
name = "bench"
|
||||
path = "bench_lua_vs_rust/example.rs"
|
||||
|
22
lua/bench_lua_vs_rust/example.lua
Normal file
22
lua/bench_lua_vs_rust/example.lua
Normal file
@ -0,0 +1,22 @@
|
||||
local jsonpath = require("jsonpath")
|
||||
|
||||
local iter;
|
||||
if arg[1] == nil or arg[1] == '' then
|
||||
iter = 5000;
|
||||
else
|
||||
iter = tonumber(arg[1]);
|
||||
end
|
||||
|
||||
print(string.format("%s - %u", "lua iter", iter));
|
||||
|
||||
local file = io.open("../../benchmark/example.json", "r");
|
||||
io.input(file)
|
||||
local data = io.read("*a");
|
||||
io.close(file);
|
||||
|
||||
jsonpath.init('../target/release/deps/libjsonpath_lib.so')
|
||||
local template = jsonpath.compile("$..book[?(@.price<30 && @.category==\"fiction\")]");
|
||||
for i = 0, iter do
|
||||
local r = template(data);
|
||||
-- print(r);
|
||||
end
|
46
lua/bench_lua_vs_rust/example.rs
Normal file
46
lua/bench_lua_vs_rust/example.rs
Normal file
@ -0,0 +1,46 @@
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
fn read_json(path: &str) -> String {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
contents
|
||||
}
|
||||
|
||||
fn get_string() -> String {
|
||||
read_json("../../benchmark/example.json")
|
||||
}
|
||||
|
||||
fn get_json() -> Value {
|
||||
let string = get_string();
|
||||
serde_json::from_str(string.as_str()).unwrap()
|
||||
}
|
||||
|
||||
fn get_path() -> &'static str {
|
||||
r#"$..book[?(@.price<30 && @.category=="fiction")]"#
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
let iter = if args.len() < 2 { 5000_usize } else { args[1].as_str().parse::<usize>().unwrap() };
|
||||
|
||||
println!("rust iter - {}", iter);
|
||||
|
||||
let json = get_json();
|
||||
for _ in 0..iter {
|
||||
let mut selector = jsonpath::Selector::default();
|
||||
let _ = selector.str_path(get_path());
|
||||
selector.value(&json);
|
||||
let r = selector.select();
|
||||
if r.is_err() {
|
||||
panic!();
|
||||
}
|
||||
// println!("{:?}", serde_json::to_string(&r.expect("")).unwrap());
|
||||
}
|
||||
}
|
27
lua/bench_lua_vs_rust/run.sh
Executable file
27
lua/bench_lua_vs_rust/run.sh
Executable file
@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
|
||||
# cd lua/bench_lua_vs_rust && ./run.sh
|
||||
|
||||
set -e
|
||||
|
||||
# http://luajit.org/index.html
|
||||
|
||||
# cargo clean && \
|
||||
cargo build --release
|
||||
|
||||
export JSONPATH_LIB_PATH="${PWD}/../target/release/deps"
|
||||
export LUA_PATH="${PWD}/../?.lua;"
|
||||
|
||||
echo
|
||||
time cargo run --release --bin bench -- 1000
|
||||
echo
|
||||
time luajit example.lua 1000
|
||||
echo
|
||||
time cargo run --release --bin bench -- 5000
|
||||
echo
|
||||
time luajit example.lua 5000
|
||||
echo
|
||||
time cargo run --release --bin bench -- 10000
|
||||
echo
|
||||
time luajit example.lua 10000
|
||||
|
107
lua/docker_example/default.conf
Normal file
107
lua/docker_example/default.conf
Normal file
@ -0,0 +1,107 @@
|
||||
lua_package_path '/etc/jsonpath/?.lua;;';
|
||||
|
||||
access_log /var/log/access.log;
|
||||
error_log /var/log/error.log info;
|
||||
|
||||
lua_shared_dict jsonpaths 1m;
|
||||
|
||||
init_by_lua_block {
|
||||
local pathStrings = {
|
||||
"$.store.book[*].author",
|
||||
"$..author",
|
||||
"$.store.*",
|
||||
"$.store..price",
|
||||
"$..book[2]",
|
||||
"$..book[-2]",
|
||||
"$..book[0,1]",
|
||||
"$..book[:2]",
|
||||
"$..book[1:2]",
|
||||
"$..book[-2:]",
|
||||
"$..book[2:]",
|
||||
"$..book[?(@.isbn)]",
|
||||
"$.store.book[?(@.price == 10)]",
|
||||
"$..*",
|
||||
"$..book[ ?( (@.price < 13 || $.store.bicycle.price < @.price) && @.price <=10 ) ]",
|
||||
"$.store.book[?( (@.price < 10 || @.price > 10) && @.price > 10 )]",
|
||||
"$..[?(@.originPrice > 1)]",
|
||||
"$.pickBanner[?(@.originPrice > 1)]"
|
||||
}
|
||||
|
||||
local jp = require("jsonpath")
|
||||
jp.init("/etc/jsonpath/libjsonpath_lib.so")
|
||||
local jsonpaths = ngx.shared.jsonpaths
|
||||
|
||||
for i, path in ipairs(pathStrings) do
|
||||
jsonpaths:set(i, path)
|
||||
jp.compile(path)
|
||||
end
|
||||
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
gzip on;
|
||||
gzip_types text/plain application/json;
|
||||
#gzip_comp_level 6;
|
||||
#gzip_vary on;
|
||||
|
||||
location / {
|
||||
add_header 'Cache-Control' 'no-store, no-cache, must-revalidate, proxy-revalidate, max-age=0';
|
||||
expires off;
|
||||
|
||||
default_type 'text/plain';
|
||||
root /etc/jsonpath/example;
|
||||
}
|
||||
|
||||
location /filter {
|
||||
# https://developer.mozilla.org/ko/docs/Web/HTTP/Headers/Accept-Encoding
|
||||
proxy_set_header Accept-Encoding "*";
|
||||
|
||||
default_type 'text/plain';
|
||||
|
||||
rewrite /filter/(.*) /$1 break;
|
||||
proxy_pass http://localhost;
|
||||
|
||||
header_filter_by_lua_block {
|
||||
ngx.header["content-length"] = nil
|
||||
|
||||
local args = ngx.req.get_uri_args()
|
||||
local jsonpaths = ngx.shared.jsonpaths
|
||||
local path = jsonpaths:get(args['path'])
|
||||
|
||||
if path == nil then
|
||||
ngx.exit(ngx.HTTP_BAD_REQUEST)
|
||||
end
|
||||
}
|
||||
|
||||
body_filter_by_lua_block {
|
||||
local chunk, eof = ngx.arg[1], ngx.arg[2]
|
||||
local buf = ngx.ctx.buf
|
||||
|
||||
if eof then
|
||||
if buf then
|
||||
local args = ngx.req.get_uri_args()
|
||||
local path = ngx.shared.jsonpaths:get(args['path'])
|
||||
local jsonpath = require("jsonpath")
|
||||
local template = jsonpath.exec(path)
|
||||
local json = buf .. chunk
|
||||
local result = template(json)
|
||||
ngx.arg[1] = result
|
||||
return
|
||||
end
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
if buf then
|
||||
ngx.ctx.buf = buf .. chunk
|
||||
else
|
||||
ngx.ctx.buf = chunk
|
||||
end
|
||||
|
||||
ngx.arg[1] = nil
|
||||
}
|
||||
}
|
||||
}
|
3
lua/docker_example/init.lua
Normal file
3
lua/docker_example/init.lua
Normal file
@ -0,0 +1,3 @@
|
||||
local jsonpath = require("jsonpath")
|
||||
jsonpath.init("/etc/jsonpath/libjsonpath_lib.so")
|
||||
ngx.log(ngx.INFO, "loaded libjsonpath_lib.so")
|
25
lua/docker_example/run.sh
Executable file
25
lua/docker_example/run.sh
Executable file
@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# cd lua && cargo build --release && cd docker_example && ./run.sh
|
||||
|
||||
set -v
|
||||
|
||||
[ "$(docker ps -a | grep jsonpath)" ] && docker kill jsonpath
|
||||
|
||||
docker run -d --rm --name jsonpath \
|
||||
-v "${PWD}/../../benchmark/example.json":/etc/jsonpath/example/example.json:ro \
|
||||
-v "${PWD}/../../benchmark/big_example.json":/etc/jsonpath/example/big_example.json:ro \
|
||||
-v "${PWD}/../jsonpath.lua":/etc/jsonpath/jsonpath.lua:ro \
|
||||
-v "${PWD}/init.lua":/etc/jsonpath/init.lua:ro \
|
||||
-v "${PWD}/../target/release/deps/libjsonpath_lib.so":/etc/jsonpath/libjsonpath_lib.so:ro \
|
||||
-v "${PWD}/default.conf":/etc/nginx/conf.d/default.conf \
|
||||
-p 8080:80 \
|
||||
openresty/openresty:bionic
|
||||
|
||||
#for i in {1..16}; do
|
||||
# curl http://localhost:8080/filter/example.json?path=${i}
|
||||
# echo
|
||||
#done
|
||||
|
||||
#ab -n 1000 -c 10 http://localhost:8080/filter/big_example.json?path=17
|
||||
#ab -n 1000 -c 10 http://localhost:8080/filter/big_example.json?path=18
|
60
lua/jsonpath.lua
Normal file
60
lua/jsonpath.lua
Normal file
@ -0,0 +1,60 @@
|
||||
local ffi = require('ffi')
|
||||
|
||||
ffi.cdef [[
|
||||
const char* ffi_select(const char *json_str, const char *path);
|
||||
void *ffi_path_compile(const char *path);
|
||||
const char* ffi_select_with_compiled_path(void *ptr, const char *json_str);
|
||||
]]
|
||||
|
||||
local jsonpath
|
||||
local cache = {}
|
||||
local module = {}
|
||||
|
||||
local function existsVaiable(var)
|
||||
for k, _ in pairs(_G) do
|
||||
if k == var then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local _ngx
|
||||
if existsVaiable('ngx') then
|
||||
_ngx = ngx
|
||||
else
|
||||
_ngx = {}
|
||||
_ngx.log = function(level, msg)
|
||||
print('['..level..'] ' .. msg)
|
||||
end
|
||||
end
|
||||
|
||||
function module.compile(path)
|
||||
assert(jsonpath, '"libjsonpath_lib" is not loaded')
|
||||
|
||||
if(cache[path] == nil) then
|
||||
cache[path] = jsonpath.ffi_path_compile(path)
|
||||
_ngx.log(_ngx.INFO, 'compile : [' .. path .. ']')
|
||||
end
|
||||
end
|
||||
|
||||
function module.exec(path)
|
||||
local compiledPath = cache[path]
|
||||
|
||||
if(cache[path] == nil) then
|
||||
assert(jsonpath, path .. ": is not compiled")
|
||||
end
|
||||
|
||||
return function(jsonStr)
|
||||
local result = jsonpath.ffi_select_with_compiled_path(compiledPath, jsonStr)
|
||||
return ffi.string(result);
|
||||
end
|
||||
end
|
||||
|
||||
function module.init(path)
|
||||
if jsonpath == nil then
|
||||
jsonpath = ffi.load(path)
|
||||
_ngx.log(_ngx.INFO, '"' .. path .. '" initialized')
|
||||
end
|
||||
end
|
||||
|
||||
return module
|
7
nodejs/.gitignore
vendored
7
nodejs/.gitignore
vendored
@ -1,7 +0,0 @@
|
||||
native/target
|
||||
native/index.node
|
||||
native/artifacts.json
|
||||
**/*~
|
||||
**/node_modules
|
||||
.idea
|
||||
build
|
334
nodejs/README.md
334
nodejs/README.md
@ -1,334 +0,0 @@
|
||||
# jsonpath-rs
|
||||
|
||||
[](https://travis-ci.org/freestrings/jsonpath)
|
||||
|
||||
It is native-addon of [jsonpath_lib](https://github.com/freestrings/jsonpath) that is [JsonPath](https://goessner.net/articles/JsonPath/) engine written in Rust.
|
||||
|
||||
## Notice
|
||||
|
||||
Pre-built 바이너리는 제공하진 않고 소스를 컴파일해서 설치한다. 만약 Rust가 설치되지 않았다면 자동으로 설치된다.
|
||||
|
||||
Build from source instead of using pre-built binary, and if Rust is not installed, the latest version is automatically installed.
|
||||
|
||||
> Not yet tested in Windows.
|
||||
|
||||
> Supported node version is under v12.0.
|
||||
|
||||
## APIs
|
||||
|
||||
<details><summary><b>npm package</b></summary>
|
||||
|
||||
```javascript
|
||||
const jsonpath = require('jsonpath-rs');
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.Selector class</b></summary>
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let selector = new jsonpath.Selector()
|
||||
.path('$..friends[0]')
|
||||
.value(jsonObj);
|
||||
|
||||
let retObj = selector.select();
|
||||
|
||||
console.log(JSON.stringify(ret) == JSON.stringify(retObj));
|
||||
|
||||
// => true
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.SelectorMut class</b></summary>
|
||||
|
||||
빌더 패턴 제약은 `Selector class`와 동일하다.
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
'school': {
|
||||
'friends': [
|
||||
{'name': '친구1', 'age': 20},
|
||||
{'name': '친구2', 'age': 20},
|
||||
],
|
||||
},
|
||||
'friends': [
|
||||
{'name': '친구3', 'age': 30},
|
||||
{'name': '친구4'},
|
||||
],
|
||||
};
|
||||
|
||||
let selector = new jsonpath.SelectorMut();
|
||||
selector.path('$..[?(@.age == 20)]');
|
||||
|
||||
{
|
||||
selector.value(jsonObj);
|
||||
selector.deleteValue();
|
||||
|
||||
let resultObj = {
|
||||
'school': {'friends': [null, null]},
|
||||
'friends': [
|
||||
{'name': '친구3', 'age': 30},
|
||||
{'name': '친구4'},
|
||||
],
|
||||
};
|
||||
console.log(JSON.stringify(selector.take()) !== JSON.stringify(resultObj));
|
||||
|
||||
// => true
|
||||
}
|
||||
|
||||
{
|
||||
selector.value(jsonObj);
|
||||
selector.replaceWith((v) => {
|
||||
v.age = v.age * 2;
|
||||
return v;
|
||||
});
|
||||
|
||||
let resultObj = {
|
||||
'school': {
|
||||
'friends': [
|
||||
{'name': '친구1', 'age': 40},
|
||||
{'name': '친구2', 'age': 40},
|
||||
],
|
||||
},
|
||||
'friends': [
|
||||
{'name': '친구3', 'age': 30},
|
||||
{'name': '친구4'},
|
||||
],
|
||||
};
|
||||
console.log(JSON.stringify(selector.take()) !== JSON.stringify(resultObj));
|
||||
|
||||
// => true
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.select(json: string|object, jsonpath: string)</b></summary>
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
|
||||
let selectAsString = jsonpath.select(JSON.stringify(jsonObj), '$..friends[0]');
|
||||
let selectAsObj = jsonpath.select(jsonObj, '$..friends[0]');
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret) == JSON.stringify(selectAsString),
|
||||
JSON.stringify(ret) == JSON.stringify(selectAsObj)
|
||||
);
|
||||
|
||||
// => true, true
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.compile(jsonpath: string)</b></summary>
|
||||
|
||||
```javascript
|
||||
let template = jsonpath.compile('$..friends[0]');
|
||||
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let selectAsString = template(JSON.stringify(jsonObj));
|
||||
let selectAsObj = template(jsonObj);
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret) == JSON.stringify(selectAsString),
|
||||
JSON.stringify(ret) == JSON.stringify(selectAsObj)
|
||||
);
|
||||
|
||||
// => true, true
|
||||
|
||||
let jsonObj2 = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "Millicent Norman"},
|
||||
{"name": "Vincent Cannon"}
|
||||
]
|
||||
},
|
||||
"friends": [ {"age": 30}, {"age": 40} ]
|
||||
};
|
||||
|
||||
let ret2 = [
|
||||
{"age": 30},
|
||||
{"name": "Millicent Norman"}
|
||||
];
|
||||
|
||||
let selectAsString2 = template(JSON.stringify(jsonObj2));
|
||||
let selectAsObj2 = template(jsonObj2);
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret2) == JSON.stringify(selectAsString2),
|
||||
JSON.stringify(ret2) == JSON.stringify(selectAsObj2)
|
||||
);
|
||||
|
||||
// => true, true
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.selector(json: string|object)</b></summary>
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret1 = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let ret2 = [
|
||||
{"name": "친구4"},
|
||||
{"name": "친구2", "age": 20}
|
||||
];
|
||||
|
||||
let selector = jsonpath.selector(jsonObj);
|
||||
// or as json string
|
||||
// let selector = jsonpath.selector(JSON.stringify(jsonObj));
|
||||
|
||||
let select1 = selector('$..friends[0]');
|
||||
let select2 = selector('$..friends[1]');
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret1) == JSON.stringify(select1),
|
||||
JSON.stringify(ret2) == JSON.stringify(select2)
|
||||
);
|
||||
|
||||
// => true, true
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.deleteValue(json: string|object, path: string)</b></summary>
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let _1 = jsonpath.deleteValue(jsonObj, '$..friends[0]');
|
||||
let result = jsonpath.deleteValue(_1, '$..friends[1]');
|
||||
|
||||
console.log(JSON.stringify(result) !== JSON.stringify({
|
||||
"school": { "friends": [null, null]},
|
||||
"friends": [null, null]
|
||||
}));
|
||||
|
||||
// => true
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary><b>Javascript - jsonpath.replaceWith(json: string|object, path: string, fun: function(json: object) => json: object</b></summary>
|
||||
|
||||
```javascript
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let result = jsonpath.replaceWith(jsonObj, '$..friends[0]', (v) => {
|
||||
v.age = v.age * 2;
|
||||
return v;
|
||||
});
|
||||
|
||||
console.log(JSON.stringify(result) === JSON.stringify({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 40},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 60},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
}));
|
||||
|
||||
// => true
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
[Javascript - Other Examples](https://github.com/freestrings/jsonpath/wiki/Javascript-examples)
|
@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
if ! [ -x "$(command -v rustc)" ]; then
|
||||
echo "install rust"
|
||||
curl https://sh.rustup.rs -sSf > /tmp/rustup.sh
|
||||
sh /tmp/rustup.sh -y
|
||||
export PATH="$HOME/.cargo/bin:$PATH"
|
||||
source "$HOME/.cargo/env"
|
||||
fi
|
||||
check-node-version --node '<12.0' && neon build --release
|
@ -1,149 +0,0 @@
|
||||
const {
|
||||
CompileFn,
|
||||
SelectorFn,
|
||||
selectStr,
|
||||
deleteValue: _deleteValue,
|
||||
replaceWith: _replaceWith,
|
||||
Selector: _Selector,
|
||||
SelectorMut: _SelectorMut
|
||||
} = require('../native');
|
||||
|
||||
function compile(path) {
|
||||
let compile = new CompileFn(path);
|
||||
return (json) => {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
return JSON.parse(compile.template(json));
|
||||
};
|
||||
}
|
||||
|
||||
function selector(json) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
let selector = new SelectorFn(json);
|
||||
return (path) => {
|
||||
return JSON.parse(selector.select(path));
|
||||
}
|
||||
}
|
||||
|
||||
function select(json, path) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
return JSON.parse(selectStr(json, path));
|
||||
}
|
||||
|
||||
function deleteValue(json, path) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
return JSON.parse(_deleteValue(json, path));
|
||||
}
|
||||
|
||||
function replaceWith(json, path, fun) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
let result = _replaceWith(json, path, (v) => {
|
||||
let result = fun(JSON.parse(v));
|
||||
if(typeof result != 'string') {
|
||||
result = JSON.stringify(result)
|
||||
}
|
||||
return result;
|
||||
});
|
||||
if(typeof result == 'string') {
|
||||
result = JSON.parse(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
class Selector {
|
||||
constructor() {
|
||||
this._selector = new _Selector();
|
||||
return this;
|
||||
}
|
||||
|
||||
path(path) {
|
||||
this._selector.path(path);
|
||||
return this;
|
||||
}
|
||||
|
||||
value(json) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
this._selector.value(json);
|
||||
return this;
|
||||
}
|
||||
|
||||
select() {
|
||||
return JSON.parse(this._selector.select());
|
||||
}
|
||||
}
|
||||
|
||||
class SelectorMut {
|
||||
constructor() {
|
||||
return this;
|
||||
}
|
||||
|
||||
path(path) {
|
||||
this._path = path;
|
||||
return this;
|
||||
}
|
||||
|
||||
value(json) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
this._json = json;
|
||||
return this;
|
||||
}
|
||||
|
||||
deleteValue() {
|
||||
let selector = new _SelectorMut();
|
||||
if(!this._path) {
|
||||
selector.emptyPathError();
|
||||
return;
|
||||
}
|
||||
|
||||
if(!this._json) {
|
||||
selector.emptyValueError();
|
||||
return;
|
||||
}
|
||||
|
||||
this._json = deleteValue(this._json, this._path);
|
||||
return this;
|
||||
}
|
||||
|
||||
replaceWith(fun) {
|
||||
let selector = new _SelectorMut();
|
||||
if(!this._path) {
|
||||
selector.emptyPathError();
|
||||
return;
|
||||
}
|
||||
if(!this._json) {
|
||||
selector.emptyValueError();
|
||||
return;
|
||||
}
|
||||
this._json = replaceWith(this._json, this._path, fun);
|
||||
return this;
|
||||
}
|
||||
|
||||
take() {
|
||||
let json = this._json;
|
||||
delete this._json;
|
||||
return json;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
compile,
|
||||
selector,
|
||||
select,
|
||||
deleteValue,
|
||||
replaceWith,
|
||||
Selector,
|
||||
SelectorMut
|
||||
};
|
5
nodejs/native/.gitignore
vendored
5
nodejs/native/.gitignore
vendored
@ -1,5 +0,0 @@
|
||||
.idea/*
|
||||
.vscode
|
||||
!.idea/runConfigurations/
|
||||
/target/
|
||||
Cargo.lock
|
@ -1,25 +0,0 @@
|
||||
[package]
|
||||
name = "jsonpath4nodejs"
|
||||
version = "0.2.2"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
description = "jsonpath_lib bindings for nodejs"
|
||||
keywords = ["library", "jsonpath", "json", "nodejs"]
|
||||
repository = "https://github.com/freestrings/jsonpath"
|
||||
license = "MIT"
|
||||
|
||||
build = "build.rs"
|
||||
exclude = ["artifacts.json", "index.node"]
|
||||
|
||||
[build-dependencies]
|
||||
neon-build = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
jsonpath_lib = "0.2.2"
|
||||
#jsonpath_lib = { path = "../../" }
|
||||
neon = "0.2.0"
|
||||
neon-serde = "0.1.1"
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
|
||||
[lib]
|
||||
name = "jsonpath4nodejs"
|
||||
crate-type = ["dylib"]
|
@ -1,7 +0,0 @@
|
||||
extern crate neon_build;
|
||||
|
||||
fn main() {
|
||||
neon_build::setup(); // must be called in build.rs
|
||||
|
||||
// add project-specific build logic here...
|
||||
}
|
@ -1,284 +0,0 @@
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
#[macro_use]
|
||||
extern crate neon;
|
||||
extern crate neon_serde;
|
||||
extern crate serde_json;
|
||||
|
||||
use jsonpath::{JsonPathError, Node, Parser, Selector};
|
||||
use neon::prelude::*;
|
||||
use serde_json::Value;
|
||||
|
||||
///
|
||||
/// `neon_serde::from_value` has very poor performance.
|
||||
///
|
||||
fn select(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsValue>(0)?;
|
||||
let json: Value = neon_serde::from_value(&mut ctx, json_val)?;
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
|
||||
match jsonpath::select(&json, path.as_str()) {
|
||||
Ok(value) => Ok(neon_serde::to_value(&mut ctx, &value)?),
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
|
||||
fn select_str(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsString>(0)?.value();
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
match jsonpath::select_as_str(&json_val, path.as_str()) {
|
||||
Ok(value) => Ok(JsString::new(&mut ctx, &value).upcast()),
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
|
||||
fn delete(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsString>(0)?.value();
|
||||
let json: Value = match serde_json::from_str(&json_val) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
};
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
match jsonpath::delete(json, &path) {
|
||||
Ok(value) => Ok(JsString::new(
|
||||
&mut ctx,
|
||||
match serde_json::to_string(&value) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
},
|
||||
)
|
||||
.upcast()),
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
|
||||
fn replace_with(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsString>(0)?.value();
|
||||
let json: Value = match serde_json::from_str(&json_val) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
};
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
let fun = ctx.argument::<JsFunction>(2)?;
|
||||
match jsonpath::replace_with(json, &path, &mut |v| {
|
||||
let json_str = JsString::new(
|
||||
&mut ctx,
|
||||
match serde_json::to_string(v) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
},
|
||||
);
|
||||
|
||||
let null = ctx.null();
|
||||
let args = vec![ctx.string(json_str.value())];
|
||||
let result = match fun.call(&mut ctx, null, args) {
|
||||
Ok(result) => result,
|
||||
Err(e) => panic!("{:?}", e),
|
||||
};
|
||||
let json_str = match result.downcast::<JsString>() {
|
||||
Ok(v) => v.value(),
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
};
|
||||
match serde_json::from_str(&json_str) {
|
||||
Ok(v) => v,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
}
|
||||
}) {
|
||||
Ok(value) => Ok(JsString::new(
|
||||
&mut ctx,
|
||||
match serde_json::to_string(&value) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
},
|
||||
)
|
||||
.upcast()),
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SelectorCls {
|
||||
node: Option<Node>,
|
||||
value: Option<Value>,
|
||||
}
|
||||
|
||||
impl SelectorCls {
|
||||
fn path(&mut self, path: &str) {
|
||||
let node = match Parser::compile(path) {
|
||||
Ok(node) => node,
|
||||
Err(e) => panic!("{:?}", e),
|
||||
};
|
||||
|
||||
self.node = Some(node);
|
||||
}
|
||||
|
||||
fn value(&mut self, json_str: &str) {
|
||||
let value: Value = match serde_json::from_str(&json_str) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string())),
|
||||
};
|
||||
|
||||
self.value = Some(value);
|
||||
}
|
||||
|
||||
fn select(&self) -> String {
|
||||
let node = match &self.node {
|
||||
Some(node) => node,
|
||||
None => panic!("{:?}", JsonPathError::EmptyPath),
|
||||
};
|
||||
|
||||
let value = match &self.value {
|
||||
Some(value) => value,
|
||||
None => panic!("{:?}", JsonPathError::EmptyValue),
|
||||
};
|
||||
|
||||
let mut selector = Selector::new();
|
||||
selector.compiled_path(node);
|
||||
selector.value(&value);
|
||||
match selector.select_as_str() {
|
||||
Ok(ret) => ret,
|
||||
Err(e) => panic!("{:?}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SelectorMutCls {}
|
||||
|
||||
declare_types! {
|
||||
pub class JsCompileFn for SelectorCls {
|
||||
init(mut ctx) {
|
||||
let path = ctx.argument::<JsString>(0)?.value();
|
||||
let node = match Parser::compile(path.as_str()) {
|
||||
Ok(node) => node,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
|
||||
Ok(SelectorCls { node: Some(node), value: None })
|
||||
}
|
||||
|
||||
method template(mut ctx) {
|
||||
let mut this = ctx.this();
|
||||
|
||||
let json_str = ctx.argument::<JsString>(0)?.value();
|
||||
{
|
||||
let guard = ctx.lock();
|
||||
let mut this = this.borrow_mut(&guard);
|
||||
let value: Value = match serde_json::from_str(&json_str) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
};
|
||||
this.value = Some(value);
|
||||
};
|
||||
|
||||
let result_str = {
|
||||
let guard = ctx.lock();
|
||||
let this = this.borrow(&guard);
|
||||
this.select()
|
||||
};
|
||||
|
||||
Ok(JsString::new(&mut ctx, &result_str).upcast())
|
||||
}
|
||||
}
|
||||
|
||||
pub class JsSelectorFn for SelectorCls {
|
||||
init(mut ctx) {
|
||||
let json_str = ctx.argument::<JsString>(0)?.value();
|
||||
let value: Value = match serde_json::from_str(&json_str) {
|
||||
Ok(value) => value,
|
||||
Err(e) => panic!("{:?}", JsonPathError::Serde(e.to_string()))
|
||||
};
|
||||
|
||||
Ok(SelectorCls { node: None, value: Some(value) })
|
||||
}
|
||||
|
||||
method select(mut ctx) {
|
||||
let mut this = ctx.this();
|
||||
|
||||
let path = ctx.argument::<JsString>(0)?.value();
|
||||
{
|
||||
let guard = ctx.lock();
|
||||
let mut this = this.borrow_mut(&guard);
|
||||
this.path(&path);
|
||||
}
|
||||
|
||||
let result_str = {
|
||||
let guard = ctx.lock();
|
||||
let this = this.borrow(&guard);
|
||||
this.select()
|
||||
};
|
||||
|
||||
Ok(JsString::new(&mut ctx, &result_str).upcast())
|
||||
}
|
||||
}
|
||||
|
||||
pub class JsSelector for SelectorCls {
|
||||
init(mut _ctx) {
|
||||
Ok(SelectorCls { node: None, value: None })
|
||||
}
|
||||
|
||||
method path(mut ctx) {
|
||||
let mut this = ctx.this();
|
||||
|
||||
let path = ctx.argument::<JsString>(0)?.value();
|
||||
{
|
||||
let guard = ctx.lock();
|
||||
let mut this = this.borrow_mut(&guard);
|
||||
let _ = this.path(&path);
|
||||
}
|
||||
|
||||
Ok(JsUndefined::new().upcast())
|
||||
}
|
||||
|
||||
method value(mut ctx) {
|
||||
let mut this = ctx.this();
|
||||
|
||||
let json_str = ctx.argument::<JsString>(0)?.value();
|
||||
{
|
||||
let guard = ctx.lock();
|
||||
let mut this = this.borrow_mut(&guard);
|
||||
let _ = this.value(&json_str);
|
||||
}
|
||||
|
||||
Ok(JsUndefined::new().upcast())
|
||||
}
|
||||
|
||||
method select(mut ctx) {
|
||||
let this = ctx.this();
|
||||
|
||||
let result_str = {
|
||||
let guard = ctx.lock();
|
||||
let this = this.borrow(&guard);
|
||||
this.select()
|
||||
};
|
||||
|
||||
Ok(JsString::new(&mut ctx, &result_str).upcast())
|
||||
}
|
||||
}
|
||||
|
||||
pub class JsSelectorMut for SelectorMutCls {
|
||||
init(mut _ctx) {
|
||||
Ok(SelectorMutCls {})
|
||||
}
|
||||
|
||||
method emptyPathError(mut _ctx) {
|
||||
panic!("{:?}", JsonPathError::EmptyPath);
|
||||
}
|
||||
|
||||
method emptyValueError(mut _ctx) {
|
||||
panic!("{:?}", JsonPathError::EmptyValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
register_module!(mut m, {
|
||||
m.export_class::<JsCompileFn>("CompileFn")
|
||||
.expect("CompileFn class error");
|
||||
m.export_class::<JsSelectorFn>("SelectorFn")
|
||||
.expect("SelectorFn class error");
|
||||
m.export_class::<JsSelector>("Selector")
|
||||
.expect("Selector class error");
|
||||
m.export_class::<JsSelectorMut>("SelectorMut")
|
||||
.expect("SelectorMut class error");
|
||||
m.export_function("select", select)?;
|
||||
m.export_function("deleteValue", delete)?;
|
||||
m.export_function("replaceWith", replace_with)?;
|
||||
m.export_function("selectStr", select_str)?;
|
||||
Ok(())
|
||||
});
|
1484
nodejs/package-lock.json
generated
1484
nodejs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,35 +0,0 @@
|
||||
{
|
||||
"name": "jsonpath-rs",
|
||||
"version": "0.2.2",
|
||||
"description": "It is JsonPath implementation. The core implementation is written in Rust",
|
||||
"author": "Changseok Han <freestrings@gmail.com>",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"jsonpath",
|
||||
"native-addon",
|
||||
"rust-binding",
|
||||
"rust",
|
||||
"json",
|
||||
"parsing"
|
||||
],
|
||||
"main": "lib/index.js",
|
||||
"dependencies": {
|
||||
"check-node-version": "*",
|
||||
"neon-cli": "^0.2.0"
|
||||
},
|
||||
"scripts": {
|
||||
"install": "./build.sh",
|
||||
"test": "mocha"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^6.1.4"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/freestrings/jsonpath.git"
|
||||
},
|
||||
"engineStrict": true,
|
||||
"engines": {
|
||||
"node": ">=8.0 <12.0"
|
||||
}
|
||||
}
|
@ -1,896 +0,0 @@
|
||||
const jsonpath = require('../lib/index.js');
|
||||
|
||||
let jsonObj = {
|
||||
"store": {
|
||||
"book": [
|
||||
{
|
||||
"category": "reference",
|
||||
"author": "Nigel Rees",
|
||||
"title": "Sayings of the Century",
|
||||
"price": 8.95
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Evelyn Waugh",
|
||||
"title": "Sword of Honour",
|
||||
"price": 12.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "J. R. R. Tolkien",
|
||||
"title": "The Lord of the Rings",
|
||||
"isbn": "0-395-19395-8",
|
||||
"price": 22.99
|
||||
}
|
||||
],
|
||||
"bicycle": {
|
||||
"color": "red",
|
||||
"price": 19.95
|
||||
}
|
||||
},
|
||||
"expensive": 10
|
||||
};
|
||||
|
||||
let list = {
|
||||
'$.store.book[*].author': [
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
],
|
||||
|
||||
'$..author':[
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
],
|
||||
|
||||
'$.store.*': [
|
||||
[
|
||||
{
|
||||
"category": "reference",
|
||||
"author": "Nigel Rees",
|
||||
"title": "Sayings of the Century",
|
||||
"price": 8.95
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Evelyn Waugh",
|
||||
"title": "Sword of Honour",
|
||||
"price": 12.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "J. R. R. Tolkien",
|
||||
"title": "The Lord of the Rings",
|
||||
"isbn": "0-395-19395-8",
|
||||
"price": 22.99
|
||||
}
|
||||
],
|
||||
{
|
||||
"color": "red",
|
||||
"price": 19.95
|
||||
}
|
||||
],
|
||||
|
||||
'$.store..price':[
|
||||
8.95,
|
||||
12.99,
|
||||
8.99,
|
||||
22.99,
|
||||
19.95
|
||||
],
|
||||
|
||||
'$..book[2]': [
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
}
|
||||
],
|
||||
|
||||
'$..book[-2]': [
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
}
|
||||
],
|
||||
|
||||
'$..book[0,1]': [
|
||||
{
|
||||
"category": "reference",
|
||||
"author": "Nigel Rees",
|
||||
"title": "Sayings of the Century",
|
||||
"price": 8.95
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Evelyn Waugh",
|
||||
"title": "Sword of Honour",
|
||||
"price": 12.99
|
||||
}
|
||||
],
|
||||
|
||||
'$..book[:2]': [
|
||||
{
|
||||
"category": "reference",
|
||||
"author": "Nigel Rees",
|
||||
"title": "Sayings of the Century",
|
||||
"price": 8.95
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Evelyn Waugh",
|
||||
"title": "Sword of Honour",
|
||||
"price": 12.99
|
||||
}
|
||||
],
|
||||
|
||||
'$..book[1:2]': [
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Evelyn Waugh",
|
||||
"title": "Sword of Honour",
|
||||
"price": 12.99
|
||||
}
|
||||
],
|
||||
|
||||
'$..book[-2:]': [
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "J. R. R. Tolkien",
|
||||
"title": "The Lord of the Rings",
|
||||
"isbn": "0-395-19395-8",
|
||||
"price": 22.99
|
||||
}
|
||||
],
|
||||
|
||||
'$..book[2:]': [
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "J. R. R. Tolkien",
|
||||
"title": "The Lord of the Rings",
|
||||
"isbn": "0-395-19395-8",
|
||||
"price": 22.99
|
||||
}
|
||||
],
|
||||
|
||||
'$..book[?(@.isbn)]': [
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "J. R. R. Tolkien",
|
||||
"title": "The Lord of the Rings",
|
||||
"isbn": "0-395-19395-8",
|
||||
"price": 22.99
|
||||
}
|
||||
],
|
||||
|
||||
'$.store.book[?(@.price < 10)]': [
|
||||
{
|
||||
"category": "reference",
|
||||
"author": "Nigel Rees",
|
||||
"title": "Sayings of the Century",
|
||||
"price": 8.95
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
}
|
||||
],
|
||||
|
||||
'$..*': [
|
||||
{
|
||||
"book": [
|
||||
{
|
||||
"category": "reference",
|
||||
"author": "Nigel Rees",
|
||||
"title": "Sayings of the Century",
|
||||
"price": 8.95
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Evelyn Waugh",
|
||||
"title": "Sword of Honour",
|
||||
"price": 12.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "J. R. R. Tolkien",
|
||||
"title": "The Lord of the Rings",
|
||||
"isbn": "0-395-19395-8",
|
||||
"price": 22.99
|
||||
}
|
||||
],
|
||||
"bicycle": {
|
||||
"color": "red",
|
||||
"price": 19.95
|
||||
}
|
||||
},
|
||||
10,
|
||||
[
|
||||
{
|
||||
"category": "reference",
|
||||
"author": "Nigel Rees",
|
||||
"title": "Sayings of the Century",
|
||||
"price": 8.95
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Evelyn Waugh",
|
||||
"title": "Sword of Honour",
|
||||
"price": 12.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "J. R. R. Tolkien",
|
||||
"title": "The Lord of the Rings",
|
||||
"isbn": "0-395-19395-8",
|
||||
"price": 22.99
|
||||
}
|
||||
],
|
||||
{
|
||||
"color": "red",
|
||||
"price": 19.95
|
||||
},
|
||||
{
|
||||
"category": "reference",
|
||||
"author": "Nigel Rees",
|
||||
"title": "Sayings of the Century",
|
||||
"price": 8.95
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Evelyn Waugh",
|
||||
"title": "Sword of Honour",
|
||||
"price": 12.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "J. R. R. Tolkien",
|
||||
"title": "The Lord of the Rings",
|
||||
"isbn": "0-395-19395-8",
|
||||
"price": 22.99
|
||||
},
|
||||
"reference",
|
||||
"Nigel Rees",
|
||||
"Sayings of the Century",
|
||||
8.95,
|
||||
"fiction",
|
||||
"Evelyn Waugh",
|
||||
"Sword of Honour",
|
||||
12.99,
|
||||
"fiction",
|
||||
"Herman Melville",
|
||||
"Moby Dick",
|
||||
"0-553-21311-3",
|
||||
8.99,
|
||||
"fiction",
|
||||
"J. R. R. Tolkien",
|
||||
"The Lord of the Rings",
|
||||
"0-395-19395-8",
|
||||
22.99,
|
||||
"red",
|
||||
19.95
|
||||
],
|
||||
|
||||
'$..book[ ?( (@.price < 13 || $.store.bicycle.price < @.price) && @.price <=10 ) ]': [
|
||||
{
|
||||
"category": "reference",
|
||||
"author": "Nigel Rees",
|
||||
"title": "Sayings of the Century",
|
||||
"price": 8.95
|
||||
},
|
||||
{
|
||||
"category": "fiction",
|
||||
"author": "Herman Melville",
|
||||
"title": "Moby Dick",
|
||||
"isbn": "0-553-21311-3",
|
||||
"price": 8.99
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
describe('compile test', () => {
|
||||
it('basic', (done) => {
|
||||
let template = jsonpath.compile('$.a');
|
||||
let result = template({'a': 1});
|
||||
if (result[0] === 1) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('selector test', () => {
|
||||
it('basic', (done) => {
|
||||
let selector = jsonpath.selector({'a': 1});
|
||||
let result = selector('$.a');
|
||||
if (result[0] === 1) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('select test', () => {
|
||||
it('basic', (done) => {
|
||||
let result = jsonpath.select({'a': 1}, '$.a');
|
||||
if (result[0] === 1) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('filter test', () => {
|
||||
|
||||
function run(done, path, expected) {
|
||||
let result = jsonpath.select(jsonObj, path);
|
||||
if (JSON.stringify(result) === JSON.stringify(expected)) {
|
||||
done();
|
||||
}
|
||||
}
|
||||
|
||||
for( var i in list ) {
|
||||
it(i, (done) => {
|
||||
run (done, i, list[i]);
|
||||
})
|
||||
}
|
||||
|
||||
it('object equal', (done) => {
|
||||
let selector = new jsonpath.Selector();
|
||||
selector.path('$..[?(@.a == 1)]');
|
||||
selector.value({
|
||||
'a': 1,
|
||||
'b': {'a': 1},
|
||||
'c': {'a': 1},
|
||||
});
|
||||
let result = selector.select();
|
||||
if (JSON.stringify(result) === JSON.stringify([{'a': 1}, {'a': 1}])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('escaped single quote notation', (done) => {
|
||||
let result = jsonpath.select({"single'quote":"value"}, "$['single\\'quote']");
|
||||
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('escaped double quote notation', (done) => {
|
||||
let result = jsonpath.select({"single\"quote":"value"}, "$['single\"quote']");
|
||||
if (JSON.stringify(result) === JSON.stringify(["value"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[::]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third", "forth", "fifth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[::2]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[::2]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "third", "fifth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[1: :]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1: :]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["second", "third", "forth", "fifth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[1:2:]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1:2:]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["second"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[1::2]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[1::2]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["second", "forth"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[0:3:1]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:1]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "second", "third"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array range with step - $[0:3:2]', (done) => {
|
||||
let result = jsonpath.select(["first", "second", "third", "forth", "fifth"], "$[0:3:2]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "third"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('array keys', (done) => {
|
||||
let result = jsonpath.select({
|
||||
"key1": "value1",
|
||||
"key2": 2
|
||||
}, "$['key1', 'key2']");
|
||||
if (JSON.stringify(result) === JSON.stringify(["value1", 2])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('SelectorMut test', () => {
|
||||
it('delete', (done) => {
|
||||
let jsonObjNew = JSON.parse(JSON.stringify(jsonObj));
|
||||
let result = jsonpath.deleteValue(jsonObjNew, '$.store.book');
|
||||
if (JSON.stringify(result) === JSON.stringify({
|
||||
'store': {
|
||||
'book': null,
|
||||
'bicycle': {
|
||||
'color': 'red',
|
||||
'price': 19.95,
|
||||
},
|
||||
},
|
||||
'expensive': 10,
|
||||
})) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('replaceWith', (done) => {
|
||||
let jsonObjNew = JSON.parse(JSON.stringify(jsonObj));
|
||||
let result = jsonpath.replaceWith(jsonObjNew, '$.store.book', (v) => {
|
||||
let ret = v[0];
|
||||
ret.price = 9;
|
||||
return ret;
|
||||
});
|
||||
if (JSON.stringify(result) === JSON.stringify({
|
||||
'store': {
|
||||
'book': {
|
||||
'category': 'reference',
|
||||
'author': 'Nigel Rees',
|
||||
'title': 'Sayings of the Century',
|
||||
'price': 9,
|
||||
},
|
||||
'bicycle': {
|
||||
'color': 'red',
|
||||
'price': 19.95,
|
||||
},
|
||||
},
|
||||
'expensive': 10,
|
||||
})) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('SeletorMut delete', (done) => {
|
||||
let jsonObjNew = JSON.parse(JSON.stringify(jsonObj));
|
||||
let selector = new jsonpath.SelectorMut();
|
||||
selector.path('$.store.book').value(jsonObjNew).deleteValue();
|
||||
|
||||
let result = selector.take();
|
||||
if (JSON.stringify(result) === JSON.stringify({
|
||||
'store': {
|
||||
'book': null,
|
||||
'bicycle': {
|
||||
'color': 'red',
|
||||
'price': 19.95,
|
||||
},
|
||||
},
|
||||
'expensive': 10,
|
||||
})) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('SeletorMut replaceWith', (done) => {
|
||||
let jsonObjNew = JSON.parse(JSON.stringify(jsonObj));
|
||||
let selector = new jsonpath.SelectorMut();
|
||||
selector.path('$.store.book').value(jsonObjNew).replaceWith((v) => {
|
||||
let ret = v[0];
|
||||
ret.price = 9;
|
||||
return ret;
|
||||
});
|
||||
|
||||
let result = selector.take();
|
||||
if (JSON.stringify(result) === JSON.stringify({
|
||||
'store': {
|
||||
'book': {
|
||||
'category': 'reference',
|
||||
'author': 'Nigel Rees',
|
||||
'title': 'Sayings of the Century',
|
||||
'price': 9,
|
||||
},
|
||||
'bicycle': {
|
||||
'color': 'red',
|
||||
'price': 19.95,
|
||||
},
|
||||
},
|
||||
'expensive': 10,
|
||||
})) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Selector test', () => {
|
||||
it('select', (done) => {
|
||||
let selector = new jsonpath.Selector().value(jsonObj);
|
||||
for(var i in list) {
|
||||
if(JSON.stringify(list[i]) !== JSON.stringify(selector.path(i).select())) {
|
||||
throw `fail: ${i}`;
|
||||
}
|
||||
}
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('README test', () => {
|
||||
it('jsonpath.Selector', (done) => {
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let selector = new jsonpath.Selector().value(jsonObj);
|
||||
|
||||
{
|
||||
let jsonObj = selector.path('$..[?(@.age >= 30)]').select();
|
||||
let resultObj = [{"name": "친구3", "age": 30}];
|
||||
if(JSON.stringify(jsonObj) !== JSON.stringify(resultObj)) {
|
||||
throw 'jsonpath.Selector: $..[?(@.age >= 30)]';
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let jsonObj = selector.path('$..[?(@.age == 20)]').select();
|
||||
let resultObj = [{"name": "친구1", "age": 20}, {"name": "친구2", "age": 20}];
|
||||
if(JSON.stringify(jsonObj) !== JSON.stringify(resultObj)) {
|
||||
throw 'jsonpath.Selector: $..[?(@.age >= 20)]';
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let jsonObj = selector.value({"friends": [ {"name": "친구5", "age": 20} ]}).select();
|
||||
let resultObj = [{"name": "친구5", "age": 20}];
|
||||
if(JSON.stringify(jsonObj) !== JSON.stringify(resultObj)) {
|
||||
throw 'jsonpath.Selector: change value';
|
||||
}
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it('jsonpath.SelectorMut', (done) => {
|
||||
let jsonObj = {
|
||||
'school': {
|
||||
'friends': [
|
||||
{'name': '친구1', 'age': 20},
|
||||
{'name': '친구2', 'age': 20},
|
||||
],
|
||||
},
|
||||
'friends': [
|
||||
{'name': '친구3', 'age': 30},
|
||||
{'name': '친구4'},
|
||||
],
|
||||
};
|
||||
|
||||
let selector = new jsonpath.SelectorMut();
|
||||
selector.path('$..[?(@.age == 20)]');
|
||||
|
||||
{
|
||||
selector.value(jsonObj).deleteValue();
|
||||
|
||||
let resultObj = {
|
||||
'school': {'friends': [null, null]},
|
||||
'friends': [
|
||||
{'name': '친구3', 'age': 30},
|
||||
{'name': '친구4'},
|
||||
],
|
||||
};
|
||||
if (JSON.stringify(selector.take()) !== JSON.stringify(resultObj)) {
|
||||
throw 'jsonpath.SelectorMut.deleteValue';
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
selector.value(jsonObj).replaceWith((v) => {
|
||||
v.age = v.age * 2;
|
||||
return v;
|
||||
});
|
||||
|
||||
let resultObj = {
|
||||
'school': {
|
||||
'friends': [
|
||||
{'name': '친구1', 'age': 40},
|
||||
{'name': '친구2', 'age': 40},
|
||||
],
|
||||
},
|
||||
'friends': [
|
||||
{'name': '친구3', 'age': 30},
|
||||
{'name': '친구4'},
|
||||
],
|
||||
};
|
||||
if (JSON.stringify(selector.take()) !== JSON.stringify(resultObj)) {
|
||||
throw 'jsonpath.SelectorMut.replaceWith';
|
||||
}
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it('jsonpath.select(json: string|object, jsonpath: string)', (done) => {
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
|
||||
let selectAsString = jsonpath.select(JSON.stringify(jsonObj), '$..friends[0]');
|
||||
let selectAsObj = jsonpath.select(jsonObj, '$..friends[0]');
|
||||
|
||||
if(
|
||||
JSON.stringify(ret) !== JSON.stringify(selectAsString) ||
|
||||
JSON.stringify(ret) !== JSON.stringify(selectAsObj)
|
||||
) {
|
||||
throw 'jsonpath.select(json: string|object, jsonpath: string)';
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it('jsonpath.compile(jsonpath: string)', (done) => {
|
||||
let template = jsonpath.compile('$..friends[0]');
|
||||
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let selectAsString = template(JSON.stringify(jsonObj));
|
||||
let selectAsObj = template(jsonObj);
|
||||
|
||||
if(
|
||||
JSON.stringify(ret) !== JSON.stringify(selectAsString) ||
|
||||
JSON.stringify(ret) !== JSON.stringify(selectAsObj)
|
||||
) {
|
||||
throw 'jsonpath.compile(jsonpath: string) 1';
|
||||
}
|
||||
|
||||
let jsonObj2 = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "Millicent Norman"},
|
||||
{"name": "Vincent Cannon"}
|
||||
]
|
||||
},
|
||||
"friends": [ {"age": 30}, {"age": 40} ]
|
||||
};
|
||||
|
||||
let ret2 = [
|
||||
{"age": 30},
|
||||
{"name": "Millicent Norman"}
|
||||
];
|
||||
|
||||
let selectAsString2 = template(JSON.stringify(jsonObj2));
|
||||
let selectAsObj2 = template(jsonObj2);
|
||||
|
||||
if(
|
||||
JSON.stringify(ret2) !== JSON.stringify(selectAsString2) ||
|
||||
JSON.stringify(ret2) !== JSON.stringify(selectAsObj2)
|
||||
) {
|
||||
throw 'jsonpath.compile(jsonpath: string) 2';
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it('jsonpath.selector(json: string|object)', (done) => {
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let ret1 = [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let ret2 = [
|
||||
{"name": "친구4"},
|
||||
{"name": "친구2", "age": 20}
|
||||
];
|
||||
|
||||
let selector = jsonpath.selector(jsonObj);
|
||||
let select1 = selector('$..friends[0]');
|
||||
let select2 = selector('$..friends[1]');
|
||||
|
||||
if(
|
||||
JSON.stringify(ret1) !== JSON.stringify(select1) ||
|
||||
JSON.stringify(ret2) !== JSON.stringify(select2)
|
||||
) {
|
||||
throw 'jsonpath.selector(json: string|object)';
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it('jsonpath.deleteValue(json: string|object, path: string)', (done) => {
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let _1 = jsonpath.deleteValue(jsonObj, '$..friends[0]');
|
||||
let result = jsonpath.deleteValue(_1, '$..friends[1]');
|
||||
|
||||
if(JSON.stringify(result) === JSON.stringify({
|
||||
"school": { "friends": [null, null]},
|
||||
"friends": [null, null]
|
||||
})) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('jsonpath.replaceWith(json: string|object, path: string, fun: function(json: object) => json: object', (done) => {
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
};
|
||||
|
||||
let result = jsonpath.replaceWith(jsonObj, '$..friends[0]', (v) => {
|
||||
v.age = v.age * 2;
|
||||
return v;
|
||||
});
|
||||
|
||||
if(JSON.stringify(result) === JSON.stringify({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 40},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 60},
|
||||
{"name": "친구4"}
|
||||
]
|
||||
})) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('ISSUE test', () => {
|
||||
it('Results do not match other implementations #6', (done) => {
|
||||
let result = jsonpath.select(["first", "second"], "$[:]");
|
||||
if (JSON.stringify(result) === JSON.stringify(["first", "second"])) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
59
src/ffi/mod.rs
Normal file
59
src/ffi/mod.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::os::raw::{c_char, c_void};
|
||||
|
||||
use {parser, select, select_as_str};
|
||||
|
||||
const INVALID_PATH: &str = "invalid path";
|
||||
const INVALID_JSON: &str = "invalud json";
|
||||
|
||||
fn to_str(v: *const c_char, err_msg: &str) -> &str {
|
||||
unsafe { CStr::from_ptr(v) }.to_str().expect(err_msg)
|
||||
}
|
||||
|
||||
fn to_char_ptr(v: &str) -> *const c_char {
|
||||
let s = CString::new(v).unwrap_or_else(|_| panic!("invalid string: {}", v));
|
||||
let ptr = s.as_ptr();
|
||||
std::mem::forget(s);
|
||||
ptr
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn ffi_select(json_str: *const c_char, path: *const c_char) -> *const c_char {
|
||||
let json_str = to_str(json_str, INVALID_JSON);
|
||||
let path = to_str(path, INVALID_PATH);
|
||||
match select_as_str(json_str, path) {
|
||||
Ok(v) => to_char_ptr(v.as_str()),
|
||||
Err(e) => {
|
||||
panic!("{:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
#[allow(clippy::forget_copy)]
|
||||
pub extern "C" fn ffi_path_compile(path: *const c_char) -> *mut c_void {
|
||||
let path = to_str(path, INVALID_PATH);
|
||||
let ref_node = Box::into_raw(Box::new(parser::Parser::compile(path).unwrap()));
|
||||
let ptr = ref_node as *mut c_void;
|
||||
std::mem::forget(ref_node);
|
||||
ptr
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn ffi_select_with_compiled_path(
|
||||
path_ptr: *mut c_void,
|
||||
json_ptr: *const c_char,
|
||||
) -> *const c_char {
|
||||
let node = unsafe { Box::from_raw(path_ptr as *mut parser::Node) };
|
||||
let json_str = to_str(json_ptr, INVALID_JSON);
|
||||
let json = serde_json::from_str(json_str)
|
||||
.unwrap_or_else(|_| panic!("invalid json string: {}", json_str));
|
||||
|
||||
let mut selector = select::Selector::default();
|
||||
let found = selector.compiled_path(&node).value(&json).select().unwrap();
|
||||
std::mem::forget(node);
|
||||
|
||||
let result = serde_json::to_string(&found)
|
||||
.unwrap_or_else(|_| panic!("json serialize error: {:?}", found));
|
||||
to_char_ptr(result.as_str())
|
||||
}
|
50
src/lib.rs
50
src/lib.rs
@ -125,7 +125,6 @@
|
||||
extern crate array_tool;
|
||||
extern crate core;
|
||||
extern crate env_logger;
|
||||
extern crate indexmap;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate serde;
|
||||
@ -133,10 +132,12 @@ extern crate serde_json;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
pub use parser::parser::{Node, Parser};
|
||||
pub use parser::Parser; // TODO private
|
||||
pub use select::JsonPathError;
|
||||
pub use select::{Selector, SelectorMut};
|
||||
|
||||
#[doc(hidden)]
|
||||
mod ffi;
|
||||
#[doc(hidden)]
|
||||
mod parser;
|
||||
#[doc(hidden)]
|
||||
@ -170,10 +171,10 @@ mod select;
|
||||
/// ]);
|
||||
/// ```
|
||||
pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> {
|
||||
let node = Parser::compile(path);
|
||||
let node = parser::Parser::compile(path);
|
||||
move |json| match &node {
|
||||
Ok(node) => {
|
||||
let mut selector = Selector::new();
|
||||
let mut selector = Selector::default();
|
||||
selector.compiled_path(node).value(json).select()
|
||||
}
|
||||
Err(e) => Err(JsonPathError::Path(e.to_string())),
|
||||
@ -214,8 +215,9 @@ pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPath
|
||||
/// &json!({"name": "친구2", "age": 20})
|
||||
/// ]);
|
||||
/// ```
|
||||
pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value>, JsonPathError> {
|
||||
let mut selector = Selector::new();
|
||||
#[allow(clippy::needless_lifetimes)]
|
||||
pub fn selector<'a>(json: &'a Value) -> impl FnMut(&str) -> Result<Vec<&'a Value>, JsonPathError> {
|
||||
let mut selector = Selector::default();
|
||||
let _ = selector.value(json);
|
||||
move |path: &str| selector.str_path(path)?.reset_value().select()
|
||||
}
|
||||
@ -269,7 +271,7 @@ pub fn selector<'a>(json: &'a Value) -> impl FnMut(&'a str) -> Result<Vec<&Value
|
||||
pub fn selector_as<T: serde::de::DeserializeOwned>(
|
||||
json: &Value,
|
||||
) -> impl FnMut(&str) -> Result<Vec<T>, JsonPathError> + '_ {
|
||||
let mut selector = Selector::new();
|
||||
let mut selector = Selector::default();
|
||||
let _ = selector.value(json);
|
||||
move |path: &str| selector.str_path(path)?.reset_value().select_as()
|
||||
}
|
||||
@ -299,8 +301,8 @@ pub fn selector_as<T: serde::de::DeserializeOwned>(
|
||||
/// &json!({"name": "친구1", "age": 20})
|
||||
/// ]);
|
||||
/// ```
|
||||
pub fn select<'a>(json: &'a Value, path: &'a str) -> Result<Vec<&'a Value>, JsonPathError> {
|
||||
Selector::new().str_path(path)?.value(json).select()
|
||||
pub fn select<'a>(json: &'a Value, path: &str) -> Result<Vec<&'a Value>, JsonPathError> {
|
||||
Selector::default().str_path(path)?.value(json).select()
|
||||
}
|
||||
|
||||
/// It is the same to `select` function but it return the result as string.
|
||||
@ -328,7 +330,7 @@ pub fn select<'a>(json: &'a Value, path: &'a str) -> Result<Vec<&'a Value>, Json
|
||||
/// ```
|
||||
pub fn select_as_str(json_str: &str, path: &str) -> Result<String, JsonPathError> {
|
||||
let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
|
||||
let ret = Selector::new().str_path(path)?.value(&json).select()?;
|
||||
let ret = Selector::default().str_path(path)?.value(&json).select()?;
|
||||
serde_json::to_string(&ret).map_err(|e| JsonPathError::Serde(e.to_string()))
|
||||
}
|
||||
|
||||
@ -375,7 +377,7 @@ pub fn select_as<T: serde::de::DeserializeOwned>(
|
||||
path: &str,
|
||||
) -> Result<Vec<T>, JsonPathError> {
|
||||
let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
|
||||
Selector::new().str_path(path)?.value(&json).select_as()
|
||||
Selector::default().str_path(path)?.value(&json).select_as()
|
||||
}
|
||||
|
||||
/// Delete(= replace with null) the JSON property using the jsonpath.
|
||||
@ -411,14 +413,9 @@ pub fn select_as<T: serde::de::DeserializeOwned>(
|
||||
/// ]}));
|
||||
/// ```
|
||||
pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
|
||||
let mut selector = SelectorMut::new();
|
||||
let ret = selector
|
||||
.str_path(path)?
|
||||
.value(value)
|
||||
.delete()?
|
||||
.take()
|
||||
.unwrap_or(Value::Null);
|
||||
Ok(ret)
|
||||
let mut selector = SelectorMut::default();
|
||||
let value = selector.str_path(path)?.value(value).delete()?;
|
||||
Ok(value.take().unwrap_or(Value::Null))
|
||||
}
|
||||
|
||||
/// Select JSON properties using a jsonpath and transform the result and then replace it. via closure that implements `FnMut` you can transform the selected results.
|
||||
@ -448,7 +445,7 @@ pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
|
||||
/// 0
|
||||
/// };
|
||||
///
|
||||
/// json!(age)
|
||||
/// Some(json!(age))
|
||||
/// }).unwrap();
|
||||
///
|
||||
/// assert_eq!(ret, json!({
|
||||
@ -465,14 +462,9 @@ pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
|
||||
/// ```
|
||||
pub fn replace_with<F>(value: Value, path: &str, fun: &mut F) -> Result<Value, JsonPathError>
|
||||
where
|
||||
F: FnMut(&Value) -> Value,
|
||||
F: FnMut(Value) -> Option<Value>,
|
||||
{
|
||||
let mut selector = SelectorMut::new();
|
||||
let ret = selector
|
||||
.str_path(path)?
|
||||
.value(value)
|
||||
.replace_with(fun)?
|
||||
.take()
|
||||
.unwrap_or(Value::Null);
|
||||
Ok(ret)
|
||||
let mut selector = SelectorMut::default();
|
||||
let value = selector.str_path(path)?.value(value).replace_with(fun)?;
|
||||
Ok(value.take().unwrap_or(Value::Null))
|
||||
}
|
||||
|
@ -1,10 +1,691 @@
|
||||
pub mod parser;
|
||||
mod path_reader;
|
||||
pub(crate) mod tokenizer;
|
||||
mod tokenizer;
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
use self::tokenizer::*;
|
||||
|
||||
const DUMMY: usize = 0;
|
||||
|
||||
type ParseResult<T> = Result<T, String>;
|
||||
|
||||
mod utils {
|
||||
use std::str::FromStr;
|
||||
|
||||
pub fn string_to_num<F, S: FromStr>(string: &str, msg_handler: F) -> Result<S, String>
|
||||
where
|
||||
F: Fn() -> String,
|
||||
{
|
||||
match string.parse() {
|
||||
Ok(n) => Ok(n),
|
||||
_ => Err(msg_handler()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum ParseToken {
|
||||
// '$'
|
||||
Absolute,
|
||||
// '@'
|
||||
Relative,
|
||||
// '.'
|
||||
In,
|
||||
// '..'
|
||||
Leaves,
|
||||
// '*'
|
||||
All,
|
||||
|
||||
Key(String),
|
||||
Keys(Vec<String>),
|
||||
// []
|
||||
Array,
|
||||
// 메타토큰
|
||||
ArrayEof,
|
||||
// ?( filter )
|
||||
Filter(FilterToken),
|
||||
// 1 : 2
|
||||
Range(Option<isize>, Option<isize>, Option<usize>),
|
||||
// 1, 2, 3
|
||||
Union(Vec<isize>),
|
||||
|
||||
Number(f64),
|
||||
|
||||
Bool(bool),
|
||||
|
||||
Eof,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum FilterToken {
|
||||
Equal,
|
||||
NotEqual,
|
||||
Little,
|
||||
LittleOrEqual,
|
||||
Greater,
|
||||
GreaterOrEqual,
|
||||
And,
|
||||
Or,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Node {
|
||||
left: Option<Box<Node>>,
|
||||
right: Option<Box<Node>>,
|
||||
token: ParseToken,
|
||||
}
|
||||
|
||||
pub struct Parser;
|
||||
|
||||
impl Parser {
|
||||
pub fn compile(input: &str) -> ParseResult<Node> {
|
||||
let mut tokenizer = TokenReader::new(input);
|
||||
Ok(Self::json_path(&mut tokenizer)?)
|
||||
}
|
||||
|
||||
fn json_path(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#json_path");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Absolute(_)) => {
|
||||
let node = Self::node(ParseToken::Absolute);
|
||||
Self::paths(node, tokenizer)
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn paths(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#paths");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::paths_dot(prev, tokenizer)
|
||||
}
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
let node = Self::array(prev, tokenizer)?;
|
||||
Self::paths(node, tokenizer)
|
||||
}
|
||||
_ => Ok(prev),
|
||||
}
|
||||
}
|
||||
|
||||
fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#paths_dot");
|
||||
let node = Self::path(prev, tokenizer)?;
|
||||
Self::paths(node, tokenizer)
|
||||
}
|
||||
|
||||
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => Self::path_leaves(prev, tokenizer),
|
||||
Ok(Token::Asterisk(_)) => Self::path_in_all(prev, tokenizer),
|
||||
Ok(Token::Key(_, _)) => Self::path_in_key(prev, tokenizer),
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::array(prev, tokenizer)
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn path_leaves(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_leaves");
|
||||
Self::eat_token(tokenizer);
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Asterisk(_)) => Self::path_leaves_all(prev, tokenizer),
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
let mut leaves_node = Self::node(ParseToken::Leaves);
|
||||
leaves_node.left = Some(Box::new(prev));
|
||||
Ok(Self::paths(leaves_node, tokenizer)?)
|
||||
}
|
||||
_ => Self::path_leaves_key(prev, tokenizer),
|
||||
}
|
||||
}
|
||||
|
||||
fn path_leaves_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_leaves_key");
|
||||
Ok(Node {
|
||||
token: ParseToken::Leaves,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::key(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
|
||||
fn path_leaves_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_leaves_all");
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Leaves,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
|
||||
fn path_in_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_in_all");
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::In,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
|
||||
fn path_in_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_in_key");
|
||||
Ok(Node {
|
||||
token: ParseToken::In,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::key(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
|
||||
fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#key");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(_, v)) => Ok(Self::node(ParseToken::Key(v))),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#boolean");
|
||||
|
||||
fn validation_bool_value(v: &str) -> bool {
|
||||
let b = v.as_bytes();
|
||||
!b.is_empty() && (b[0] == b't' || b[0] == b'T' || b[0] == b'f' || b[0] == b'F')
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(_, ref v)) if validation_bool_value(v) => {
|
||||
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn array_keys(tokenizer: &mut TokenReader, first_key: String) -> ParseResult<Node> {
|
||||
let mut keys = vec![first_key];
|
||||
|
||||
while let Ok(Token::Comma(_)) = tokenizer.peek_token() {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
|
||||
keys.push(val);
|
||||
}
|
||||
_ => return Err(tokenizer.err_msg()),
|
||||
}
|
||||
|
||||
Self::eat_whitespace(tokenizer);
|
||||
}
|
||||
|
||||
Ok(Self::node(ParseToken::Keys(keys)))
|
||||
}
|
||||
|
||||
fn array_quote_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_quote_value");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
|
||||
if let Ok(Token::Comma(_)) = tokenizer.peek_token() {
|
||||
Self::array_keys(tokenizer, val)
|
||||
} else {
|
||||
Ok(Self::node(ParseToken::Key(val)))
|
||||
}
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn array_start(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_start");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Question(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::filter(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
Ok(Token::Asterisk(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
_ => Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::array_value(tokenizer)?)),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn array(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array");
|
||||
let ret = Self::array_start(prev, tokenizer)?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Self::close_token(ret, Token::CloseArray(DUMMY), tokenizer)
|
||||
}
|
||||
|
||||
fn array_value_key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_value_key");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Comma(_)) => Self::union(digit, tokenizer),
|
||||
Ok(Token::Split(_)) => Self::range_from(digit, tokenizer),
|
||||
_ => Ok(Self::node(ParseToken::Number(digit as f64))),
|
||||
}
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_value");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => Self::array_value_key(tokenizer),
|
||||
Ok(Token::Split(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::range_to(tokenizer)
|
||||
}
|
||||
Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => {
|
||||
Self::array_quote_value(tokenizer)
|
||||
}
|
||||
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||
_ => {
|
||||
Self::eat_token(tokenizer);
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn union(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#union");
|
||||
let mut values = vec![num];
|
||||
while match tokenizer.peek_token() {
|
||||
Ok(Token::Comma(_)) => true,
|
||||
_ => false,
|
||||
} {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
values.push(digit);
|
||||
}
|
||||
_ => {
|
||||
return Err(tokenizer.err_msg());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self::node(ParseToken::Union(values)))
|
||||
}
|
||||
|
||||
fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> {
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Split(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
}
|
||||
_ => {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => {}
|
||||
_ => {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, str_step)) => {
|
||||
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
|
||||
Ok(step) => Ok(Some(step)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn range_from(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#range_from");
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => Self::range(from, tokenizer),
|
||||
Ok(Token::Split(_)) => match Self::range_value(tokenizer)? {
|
||||
Some(step) => Ok(Self::node(ParseToken::Range(Some(from), None, Some(step)))),
|
||||
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
|
||||
},
|
||||
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
|
||||
}
|
||||
}
|
||||
|
||||
fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#range_to");
|
||||
|
||||
if let Some(step) = Self::range_value(tokenizer)? {
|
||||
return Ok(Self::node(ParseToken::Range(None, None, Some(step))));
|
||||
}
|
||||
|
||||
if let Ok(Token::CloseArray(_)) = tokenizer.peek_token() {
|
||||
return Ok(Self::node(ParseToken::Range(None, None, None)));
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref to_str)) => {
|
||||
let to = utils::string_to_num(to_str, || tokenizer.err_msg_with_pos(pos))?;
|
||||
let step = Self::range_value(tokenizer)?;
|
||||
Ok(Self::node(ParseToken::Range(None, Some(to), step)))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn range(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#range");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref str_to)) => {
|
||||
let to = utils::string_to_num(str_to, || tokenizer.err_msg_with_pos(pos))?;
|
||||
let step = Self::range_value(tokenizer)?;
|
||||
Ok(Self::node(ParseToken::Range(Some(from), Some(to), step)))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn filter(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#filter");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::OpenParenthesis(_)) => {
|
||||
let ret = Self::exprs(tokenizer)?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn exprs(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
Self::eat_whitespace(tokenizer);
|
||||
debug!("#exprs");
|
||||
let node = match tokenizer.peek_token() {
|
||||
Ok(Token::OpenParenthesis(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
trace!("\t-exprs - open_parenthesis");
|
||||
let ret = Self::exprs(tokenizer)?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)?
|
||||
}
|
||||
_ => {
|
||||
trace!("\t-exprs - else");
|
||||
Self::expr(tokenizer)?
|
||||
}
|
||||
};
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Self::condition_expr(node, tokenizer)
|
||||
}
|
||||
|
||||
fn condition_expr(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#condition_expr");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::And(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Filter(FilterToken::And),
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::exprs(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
Ok(Token::Or(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Filter(FilterToken::Or),
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::exprs(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
_ => Ok(prev),
|
||||
}
|
||||
}
|
||||
|
||||
fn expr(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#expr");
|
||||
|
||||
let has_prop_candidate = match tokenizer.peek_token() {
|
||||
Ok(Token::At(_)) => true,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let node = Self::term(tokenizer)?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
if match tokenizer.peek_token() {
|
||||
Ok(Token::Equal(_))
|
||||
| Ok(Token::NotEqual(_))
|
||||
| Ok(Token::Little(_))
|
||||
| Ok(Token::LittleOrEqual(_))
|
||||
| Ok(Token::Greater(_))
|
||||
| Ok(Token::GreaterOrEqual(_)) => true,
|
||||
_ => false,
|
||||
} {
|
||||
Self::op(node, tokenizer)
|
||||
} else if has_prop_candidate {
|
||||
Ok(node)
|
||||
} else {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
|
||||
fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#term_num");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, val)) => match tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => Self::term_num_float(val.as_str(), tokenizer),
|
||||
_ => {
|
||||
let number = utils::string_to_num(&val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(Self::node(ParseToken::Number(number)))
|
||||
}
|
||||
},
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn term_num_float(num: &str, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#term_num_float");
|
||||
Self::eat_token(tokenizer);
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, frac)) => {
|
||||
let mut f = String::new();
|
||||
f.push_str(&num);
|
||||
f.push('.');
|
||||
f.push_str(frac.as_str());
|
||||
let number = utils::string_to_num(&f, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(Self::node(ParseToken::Number(number)))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#term");
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::At(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
let node = Self::node(ParseToken::Relative);
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Whitespace(_, _)) => {
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Ok(node)
|
||||
}
|
||||
_ => Self::paths(node, tokenizer),
|
||||
}
|
||||
}
|
||||
Ok(Token::Absolute(_)) => {
|
||||
Self::json_path(tokenizer)
|
||||
},
|
||||
Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => {
|
||||
Self::array_quote_value(tokenizer)
|
||||
},
|
||||
Ok(Token::Key(_, key)) => {
|
||||
match key.as_bytes()[0] {
|
||||
b'-' | b'0'..=b'9' => Self::term_num(tokenizer),
|
||||
_ => Self::boolean(tokenizer),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#op");
|
||||
let token = match tokenizer.next_token() {
|
||||
Ok(Token::Equal(_)) => ParseToken::Filter(FilterToken::Equal),
|
||||
Ok(Token::NotEqual(_)) => ParseToken::Filter(FilterToken::NotEqual),
|
||||
Ok(Token::Little(_)) => ParseToken::Filter(FilterToken::Little),
|
||||
Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual),
|
||||
Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater),
|
||||
Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual),
|
||||
_ => {
|
||||
return Err(tokenizer.err_msg());
|
||||
}
|
||||
};
|
||||
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
Ok(Node {
|
||||
token,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::term(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
|
||||
fn eat_whitespace(tokenizer: &mut TokenReader) {
|
||||
while let Ok(Token::Whitespace(_, _)) = tokenizer.peek_token() {
|
||||
let _ = tokenizer.next_token();
|
||||
}
|
||||
}
|
||||
|
||||
fn eat_token(tokenizer: &mut TokenReader) {
|
||||
let _ = tokenizer.next_token();
|
||||
}
|
||||
|
||||
fn node(token: ParseToken) -> Node {
|
||||
Node {
|
||||
left: None,
|
||||
right: None,
|
||||
token,
|
||||
}
|
||||
}
|
||||
|
||||
fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#close_token");
|
||||
match tokenizer.next_token() {
|
||||
Ok(ref t) if t.is_match_token_type(token) => Ok(ret),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait NodeVisitor {
|
||||
fn visit(&mut self, node: &Node) {
|
||||
match &node.token {
|
||||
ParseToken::Absolute
|
||||
| ParseToken::Relative
|
||||
| ParseToken::All
|
||||
| ParseToken::Key(_)
|
||||
| ParseToken::Keys(_)
|
||||
| ParseToken::Range(_, _, _)
|
||||
| ParseToken::Union(_)
|
||||
| ParseToken::Number(_)
|
||||
| ParseToken::Bool(_) => {
|
||||
self.visit_token(&node.token);
|
||||
}
|
||||
ParseToken::In | ParseToken::Leaves => {
|
||||
if let Some(n) = &node.left {
|
||||
self.visit(&*n);
|
||||
}
|
||||
|
||||
self.visit_token(&node.token);
|
||||
|
||||
if let Some(n) = &node.right {
|
||||
self.visit(&*n);
|
||||
}
|
||||
}
|
||||
ParseToken::Array => {
|
||||
if let Some(n) = &node.left {
|
||||
self.visit(&*n);
|
||||
}
|
||||
|
||||
self.visit_token(&node.token);
|
||||
|
||||
if let Some(n) = &node.right {
|
||||
self.visit(&*n);
|
||||
}
|
||||
|
||||
self.visit_token(&ParseToken::ArrayEof);
|
||||
}
|
||||
ParseToken::Filter(FilterToken::And) | ParseToken::Filter(FilterToken::Or) => {
|
||||
if let Some(n) = &node.left {
|
||||
self.visit(&*n);
|
||||
}
|
||||
|
||||
if let Some(n) = &node.right {
|
||||
self.visit(&*n);
|
||||
}
|
||||
|
||||
self.visit_token(&node.token);
|
||||
}
|
||||
ParseToken::Filter(_) => {
|
||||
if let Some(n) = &node.left {
|
||||
self.visit(&*n);
|
||||
}
|
||||
|
||||
self.end_term();
|
||||
|
||||
if let Some(n) = &node.right {
|
||||
self.visit(&*n);
|
||||
}
|
||||
|
||||
self.end_term();
|
||||
|
||||
self.visit_token(&node.token);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_token(&mut self, token: &ParseToken);
|
||||
fn end_term(&mut self) {}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod parser_tests {
|
||||
use parser::parser::{FilterToken, NodeVisitor, ParseToken, Parser};
|
||||
use parser::{FilterToken, NodeVisitor, ParseToken, Parser};
|
||||
|
||||
struct NodeVisitorTestImpl<'a> {
|
||||
input: &'a str,
|
||||
@ -41,6 +722,34 @@ mod parser_tests {
|
||||
interpreter.start()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_error() {
|
||||
setup();
|
||||
|
||||
fn invalid(path: &str) {
|
||||
assert!(run(path).is_err());
|
||||
}
|
||||
|
||||
invalid("$[]");
|
||||
invalid("$[a]");
|
||||
invalid("$[?($.a)]");
|
||||
invalid("$[?(@.a > @.b]");
|
||||
invalid("$[?(@.a < @.b&&(@.c < @.d)]");
|
||||
invalid("@.");
|
||||
invalid("$..[?(a <= @.a)]"); // invalid term value
|
||||
invalid("$['a', b]");
|
||||
invalid("$[0, >=]");
|
||||
invalid("$[a:]");
|
||||
invalid("$[:a]");
|
||||
invalid("$[::a]");
|
||||
invalid("$[:>]");
|
||||
invalid("$[1:>]");
|
||||
invalid("$[1,,]");
|
||||
invalid("$[?]");
|
||||
invalid("$[?(1 = 1)]");
|
||||
invalid("$[?(1 = >)]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_path() {
|
||||
setup();
|
||||
@ -103,24 +812,40 @@ mod parser_tests {
|
||||
])
|
||||
);
|
||||
|
||||
match run("$.") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
assert_eq!(
|
||||
run("$.$a"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("$a".to_owned())
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run("$.['$a']"),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("$a".to_owned()),
|
||||
ParseToken::ArrayEof,
|
||||
])
|
||||
);
|
||||
|
||||
if run("$.").is_ok() {
|
||||
panic!();
|
||||
}
|
||||
|
||||
match run("$..") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
if run("$..").is_ok() {
|
||||
panic!();
|
||||
}
|
||||
|
||||
match run("$. a") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
if run("$. a").is_ok() {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_array_sytax() {
|
||||
fn parse_array_syntax() {
|
||||
setup();
|
||||
|
||||
assert_eq!(
|
||||
@ -434,6 +1159,18 @@ mod parser_tests {
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run(r#"$[?(@ > 1)]"#),
|
||||
Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative,
|
||||
ParseToken::Number(1_f64),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
run("$[:]"),
|
||||
Ok(vec![
|
||||
@ -463,36 +1200,6 @@ mod parser_tests {
|
||||
ParseToken::ArrayEof
|
||||
])
|
||||
);
|
||||
|
||||
match run("$[") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[a]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?($.a)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(@.a > @.b]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(@.a < @.b&&(@.c < @.d)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -511,24 +1218,20 @@ mod parser_tests {
|
||||
])
|
||||
);
|
||||
|
||||
match run("$[1.1]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
if run("$[1.1]").is_ok() {
|
||||
panic!();
|
||||
}
|
||||
|
||||
match run("$[?(1.1<.2)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
if run("$[?(1.1<.2)]").is_ok() {
|
||||
panic!();
|
||||
}
|
||||
|
||||
match run("$[?(1.1<2.)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
if run("$[?(1.1<2.)]").is_ok() {
|
||||
panic!();
|
||||
}
|
||||
|
||||
match run("$[?(1.1<2.a)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
if run("$[?(1.1<2.a)]").is_ok() {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -553,7 +1256,7 @@ mod tokenizer_tests {
|
||||
}
|
||||
|
||||
fn run(input: &str, expected: (Vec<Token>, Option<TokenError>)) {
|
||||
let (vec, err) = collect_token(input.clone());
|
||||
let (vec, err) = collect_token(input);
|
||||
assert_eq!((vec, err), expected, "\"{}\"", input);
|
||||
}
|
||||
|
||||
@ -776,6 +1479,21 @@ mod tokenizer_tests {
|
||||
),
|
||||
);
|
||||
|
||||
run(
|
||||
r#"$['single\'1','single\'2']"#,
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::OpenArray(1),
|
||||
Token::SingleQuoted(2, "single\'1".to_string()),
|
||||
Token::Comma(13),
|
||||
Token::SingleQuoted(14, "single\'2".to_string()),
|
||||
Token::CloseArray(25),
|
||||
],
|
||||
Some(TokenError::Eof),
|
||||
),
|
||||
);
|
||||
|
||||
run(
|
||||
r#"$["double\"quote"]"#,
|
||||
(
|
||||
|
@ -1,709 +0,0 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::tokenizer::*;
|
||||
|
||||
const DUMMY: usize = 0;
|
||||
|
||||
type ParseResult<T> = Result<T, String>;
|
||||
|
||||
mod utils {
|
||||
use std::str::FromStr;
|
||||
|
||||
pub fn string_to_num<F, S: FromStr>(string: &String, msg_handler: F) -> Result<S, String>
|
||||
where
|
||||
F: Fn() -> String,
|
||||
{
|
||||
match string.as_str().parse() {
|
||||
Ok(n) => Ok(n),
|
||||
_ => Err(msg_handler()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum ParseToken {
|
||||
// '$'
|
||||
Absolute,
|
||||
// '@'
|
||||
Relative,
|
||||
// '.'
|
||||
In,
|
||||
// '..'
|
||||
Leaves,
|
||||
// '*'
|
||||
All,
|
||||
|
||||
Key(String),
|
||||
Keys(Vec<String>),
|
||||
// []
|
||||
Array,
|
||||
// 메타토큰
|
||||
ArrayEof,
|
||||
// ?( filter )
|
||||
Filter(FilterToken),
|
||||
// 1 : 2
|
||||
Range(Option<isize>, Option<isize>, Option<usize>),
|
||||
// 1, 2, 3
|
||||
Union(Vec<isize>),
|
||||
|
||||
Number(f64),
|
||||
|
||||
Bool(bool),
|
||||
|
||||
Eof,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum FilterToken {
|
||||
Equal,
|
||||
NotEqual,
|
||||
Little,
|
||||
LittleOrEqual,
|
||||
Greater,
|
||||
GreaterOrEqual,
|
||||
And,
|
||||
Or,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Node {
|
||||
left: Option<Box<Node>>,
|
||||
right: Option<Box<Node>>,
|
||||
token: ParseToken,
|
||||
}
|
||||
|
||||
pub struct Parser;
|
||||
|
||||
impl Parser {
|
||||
pub fn compile(input: &str) -> ParseResult<Node> {
|
||||
let mut tokenizer = TokenReader::new(input);
|
||||
Ok(Self::json_path(&mut tokenizer)?)
|
||||
}
|
||||
|
||||
fn json_path(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#json_path");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Absolute(_)) => {
|
||||
let node = Self::node(ParseToken::Absolute);
|
||||
Self::paths(node, tokenizer)
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn paths(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#paths");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::paths_dot(prev, tokenizer)
|
||||
}
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
let node = Self::array(prev, tokenizer)?;
|
||||
Self::paths(node, tokenizer)
|
||||
}
|
||||
_ => Ok(prev),
|
||||
}
|
||||
}
|
||||
|
||||
fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#paths_dot");
|
||||
let node = Self::path(prev, tokenizer)?;
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Equal(_))
|
||||
| Ok(Token::NotEqual(_))
|
||||
| Ok(Token::Little(_))
|
||||
| Ok(Token::LittleOrEqual(_))
|
||||
| Ok(Token::Greater(_))
|
||||
| Ok(Token::GreaterOrEqual(_))
|
||||
| Ok(Token::And(_))
|
||||
| Ok(Token::Or(_)) => Ok(node),
|
||||
_ => Self::paths(node, tokenizer),
|
||||
}
|
||||
}
|
||||
|
||||
fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => Self::path_leaves(prev, tokenizer),
|
||||
Ok(Token::Asterisk(_)) => Self::path_in_all(prev, tokenizer),
|
||||
Ok(Token::Key(_, _)) => Self::path_in_key(prev, tokenizer),
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::array(prev, tokenizer)
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn path_leaves(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_leaves");
|
||||
Self::eat_token(tokenizer);
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Asterisk(_)) => Self::path_leaves_all(prev, tokenizer),
|
||||
Ok(Token::OpenArray(_)) => {
|
||||
let mut leaves_node = Self::node(ParseToken::Leaves);
|
||||
leaves_node.left = Some(Box::new(prev));
|
||||
Ok(Self::paths(leaves_node, tokenizer)?)
|
||||
}
|
||||
_ => Self::path_leaves_key(prev, tokenizer),
|
||||
}
|
||||
}
|
||||
|
||||
fn path_leaves_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_leaves_key");
|
||||
Ok(Node {
|
||||
token: ParseToken::Leaves,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::key(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
|
||||
fn path_leaves_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_leaves_all");
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Leaves,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
|
||||
fn path_in_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_in_all");
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::In,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
|
||||
fn path_in_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#path_in_key");
|
||||
Ok(Node {
|
||||
token: ParseToken::In,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::key(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
|
||||
fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#key");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(_, v)) => Ok(Self::node(ParseToken::Key(v))),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#boolean");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(_, v)) => {
|
||||
Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn array_keys(tokenizer: &mut TokenReader, first_key: String) -> ParseResult<Node> {
|
||||
let mut keys = vec![first_key];
|
||||
while tokenizer.peek_is(COMMA) {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
if !(tokenizer.peek_is(SINGLE_QUOTE) || tokenizer.peek_is(DOUBLE_QUOTE)) {
|
||||
return Err(tokenizer.err_msg());
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
|
||||
keys.push(val);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Self::eat_whitespace(tokenizer);
|
||||
}
|
||||
|
||||
Ok(Self::node(ParseToken::Keys(keys)))
|
||||
}
|
||||
|
||||
fn array_quote_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_quote_value");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
|
||||
if !tokenizer.peek_is(COMMA) {
|
||||
Ok(Self::node(ParseToken::Key(val)))
|
||||
} else {
|
||||
Self::array_keys(tokenizer, val)
|
||||
}
|
||||
}
|
||||
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn array_start(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_start");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Question(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::filter(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
Ok(Token::Asterisk(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::node(ParseToken::All))),
|
||||
})
|
||||
}
|
||||
_ => Ok(Node {
|
||||
token: ParseToken::Array,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::array_value(tokenizer)?)),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn array(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array");
|
||||
let ret = Self::array_start(prev, tokenizer)?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Self::close_token(ret, Token::CloseArray(DUMMY), tokenizer)
|
||||
}
|
||||
|
||||
fn array_value_key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_value_key");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Comma(_)) => Self::union(digit, tokenizer),
|
||||
Ok(Token::Split(_)) => Self::range_from(digit, tokenizer),
|
||||
_ => Ok(Self::node(ParseToken::Number(digit as f64))),
|
||||
}
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#array_value");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => Self::array_value_key(tokenizer),
|
||||
Ok(Token::Split(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::range_to(tokenizer)
|
||||
}
|
||||
Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => {
|
||||
Self::array_quote_value(tokenizer)
|
||||
}
|
||||
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||
_ => {
|
||||
Self::eat_token(tokenizer);
|
||||
Err(tokenizer.err_msg())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn union(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#union");
|
||||
let mut values = vec![num];
|
||||
while match tokenizer.peek_token() {
|
||||
Ok(Token::Comma(_)) => true,
|
||||
_ => false,
|
||||
} {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
values.push(digit);
|
||||
}
|
||||
_ => {
|
||||
return Err(tokenizer.err_msg());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self::node(ParseToken::Union(values)))
|
||||
}
|
||||
|
||||
fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> {
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
if tokenizer.peek_is(SPLIT) {
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
if tokenizer.peek_is(KEY) {
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, str_step)) => {
|
||||
match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
|
||||
Ok(step) => Ok(Some(step)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn range_from(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#range_from");
|
||||
Self::eat_token(tokenizer);
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => Self::range(from, tokenizer),
|
||||
Ok(Token::Split(_)) => match Self::range_value(tokenizer)? {
|
||||
Some(step) => Ok(Self::node(ParseToken::Range(Some(from), None, Some(step)))),
|
||||
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
|
||||
},
|
||||
_ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
|
||||
}
|
||||
}
|
||||
|
||||
fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#range_to");
|
||||
|
||||
match Self::range_value(tokenizer)? {
|
||||
Some(step) => return Ok(Self::node(ParseToken::Range(None, None, Some(step)))),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::CloseArray(_)) => {
|
||||
return Ok(Self::node(ParseToken::Range(None, None, None)));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref to_str)) => {
|
||||
let to = utils::string_to_num(to_str, || tokenizer.err_msg_with_pos(pos))?;
|
||||
let step = Self::range_value(tokenizer)?;
|
||||
Ok(Self::node(ParseToken::Range(None, Some(to), step)))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn range(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#range");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref str_to)) => {
|
||||
let to = utils::string_to_num(str_to, || tokenizer.err_msg_with_pos(pos))?;
|
||||
let step = Self::range_value(tokenizer)?;
|
||||
Ok(Self::node(ParseToken::Range(Some(from), Some(to), step)))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn filter(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#filter");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::OpenParenthesis(_)) => {
|
||||
let ret = Self::exprs(tokenizer)?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
|
||||
}
|
||||
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn exprs(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
Self::eat_whitespace(tokenizer);
|
||||
debug!("#exprs");
|
||||
let node = match tokenizer.peek_token() {
|
||||
Ok(Token::OpenParenthesis(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
trace!("\t-exprs - open_parenthesis");
|
||||
let ret = Self::exprs(tokenizer)?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)?
|
||||
}
|
||||
_ => {
|
||||
trace!("\t-exprs - else");
|
||||
Self::expr(tokenizer)?
|
||||
}
|
||||
};
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Self::condition_expr(node, tokenizer)
|
||||
}
|
||||
|
||||
fn condition_expr(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#condition_expr");
|
||||
match tokenizer.peek_token() {
|
||||
Ok(Token::And(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Filter(FilterToken::And),
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::exprs(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
Ok(Token::Or(_)) => {
|
||||
Self::eat_token(tokenizer);
|
||||
Ok(Node {
|
||||
token: ParseToken::Filter(FilterToken::Or),
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::exprs(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
_ => Ok(prev),
|
||||
}
|
||||
}
|
||||
|
||||
fn expr(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#expr");
|
||||
|
||||
let has_prop_candidate = match tokenizer.peek_token() {
|
||||
Ok(Token::At(_)) => true,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let node = Self::term(tokenizer)?;
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
if match tokenizer.peek_token() {
|
||||
Ok(Token::Equal(_))
|
||||
| Ok(Token::NotEqual(_))
|
||||
| Ok(Token::Little(_))
|
||||
| Ok(Token::LittleOrEqual(_))
|
||||
| Ok(Token::Greater(_))
|
||||
| Ok(Token::GreaterOrEqual(_)) => true,
|
||||
_ => false,
|
||||
} {
|
||||
Self::op(node, tokenizer)
|
||||
} else if has_prop_candidate {
|
||||
Ok(node)
|
||||
} else {
|
||||
return Err(tokenizer.err_msg());
|
||||
}
|
||||
}
|
||||
|
||||
fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#term_num");
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, val)) => match tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => Self::term_num_float(val.as_str(), tokenizer),
|
||||
_ => {
|
||||
let number = utils::string_to_num(&val, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(Self::node(ParseToken::Number(number)))
|
||||
}
|
||||
},
|
||||
Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn term_num_float(mut num: &str, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#term_num_float");
|
||||
Self::eat_token(tokenizer);
|
||||
match tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, frac)) => {
|
||||
let mut f = String::new();
|
||||
f.push_str(&mut num);
|
||||
f.push('.');
|
||||
f.push_str(frac.as_str());
|
||||
let number = utils::string_to_num(&f, || tokenizer.err_msg_with_pos(pos))?;
|
||||
Ok(Self::node(ParseToken::Number(number)))
|
||||
}
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
|
||||
fn peek_key(tokenizer: &mut TokenReader) -> Option<String> {
|
||||
if let Ok(Token::Key(_, k)) = tokenizer.peek_token() {
|
||||
Some(k.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#term");
|
||||
|
||||
if tokenizer.peek_is(AT) {
|
||||
Self::eat_token(tokenizer);
|
||||
let node = Self::node(ParseToken::Relative);
|
||||
|
||||
return match tokenizer.peek_token() {
|
||||
Ok(Token::Whitespace(_, _)) => {
|
||||
Self::eat_whitespace(tokenizer);
|
||||
Ok(node)
|
||||
}
|
||||
_ => Self::paths(node, tokenizer),
|
||||
};
|
||||
}
|
||||
|
||||
if tokenizer.peek_is(ABSOLUTE) {
|
||||
return Self::json_path(tokenizer);
|
||||
}
|
||||
|
||||
if tokenizer.peek_is(DOUBLE_QUOTE) || tokenizer.peek_is(SINGLE_QUOTE) {
|
||||
return Self::array_quote_value(tokenizer);
|
||||
}
|
||||
|
||||
if tokenizer.peek_is(KEY) {
|
||||
return match Self::peek_key(tokenizer) {
|
||||
Some(key) => match key.chars().next() {
|
||||
Some(ch) => match ch {
|
||||
'-' | '0'...'9' => Self::term_num(tokenizer),
|
||||
_ => Self::boolean(tokenizer),
|
||||
},
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
},
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
};
|
||||
}
|
||||
|
||||
return Err(tokenizer.err_msg());
|
||||
}
|
||||
|
||||
fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#op");
|
||||
let token = match tokenizer.next_token() {
|
||||
Ok(Token::Equal(_)) => ParseToken::Filter(FilterToken::Equal),
|
||||
Ok(Token::NotEqual(_)) => ParseToken::Filter(FilterToken::NotEqual),
|
||||
Ok(Token::Little(_)) => ParseToken::Filter(FilterToken::Little),
|
||||
Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual),
|
||||
Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater),
|
||||
Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual),
|
||||
Err(TokenError::Eof) => ParseToken::Eof,
|
||||
_ => {
|
||||
return Err(tokenizer.err_msg());
|
||||
}
|
||||
};
|
||||
|
||||
Self::eat_whitespace(tokenizer);
|
||||
|
||||
Ok(Node {
|
||||
token,
|
||||
left: Some(Box::new(prev)),
|
||||
right: Some(Box::new(Self::term(tokenizer)?)),
|
||||
})
|
||||
}
|
||||
|
||||
fn eat_whitespace(tokenizer: &mut TokenReader) {
|
||||
while let Ok(Token::Whitespace(_, _)) = tokenizer.peek_token() {
|
||||
let _ = tokenizer.next_token();
|
||||
}
|
||||
}
|
||||
|
||||
fn eat_token(tokenizer: &mut TokenReader) {
|
||||
let _ = tokenizer.next_token();
|
||||
}
|
||||
|
||||
fn node(token: ParseToken) -> Node {
|
||||
Node {
|
||||
left: None,
|
||||
right: None,
|
||||
token,
|
||||
}
|
||||
}
|
||||
|
||||
fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
|
||||
debug!("#close_token");
|
||||
match tokenizer.next_token() {
|
||||
Ok(ref t) if t.partial_eq(token) => Ok(ret),
|
||||
_ => Err(tokenizer.err_msg()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait NodeVisitor {
|
||||
fn visit(&mut self, node: &Node) {
|
||||
match &node.token {
|
||||
ParseToken::Absolute
|
||||
| ParseToken::Relative
|
||||
| ParseToken::All
|
||||
| ParseToken::Key(_)
|
||||
| ParseToken::Keys(_)
|
||||
| ParseToken::Range(_, _, _)
|
||||
| ParseToken::Union(_)
|
||||
| ParseToken::Number(_)
|
||||
| ParseToken::Bool(_) => {
|
||||
self.visit_token(&node.token);
|
||||
}
|
||||
ParseToken::In | ParseToken::Leaves => {
|
||||
match &node.left {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.visit_token(&node.token);
|
||||
|
||||
match &node.right {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
ParseToken::Array => {
|
||||
match &node.left {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.visit_token(&node.token);
|
||||
|
||||
match &node.right {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
}
|
||||
self.visit_token(&ParseToken::ArrayEof);
|
||||
}
|
||||
ParseToken::Filter(FilterToken::And) | ParseToken::Filter(FilterToken::Or) => {
|
||||
match &node.left {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match &node.right {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.visit_token(&node.token);
|
||||
}
|
||||
ParseToken::Filter(_) => {
|
||||
match &node.left {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.end_term();
|
||||
|
||||
match &node.right {
|
||||
Some(n) => self.visit(&*n),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.end_term();
|
||||
|
||||
self.visit_token(&node.token);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_token(&mut self, token: &ParseToken);
|
||||
fn end_term(&mut self) {}
|
||||
}
|
@ -2,30 +2,6 @@ use std::result::Result;
|
||||
|
||||
use super::path_reader::{PathReader, ReaderError};
|
||||
|
||||
pub const ABSOLUTE: &'static str = "$";
|
||||
pub const DOT: &'static str = ".";
|
||||
pub const AT: &'static str = "@";
|
||||
pub const OPEN_ARRAY: &'static str = "[";
|
||||
pub const CLOSE_ARRAY: &'static str = "]";
|
||||
pub const ASTERISK: &'static str = "*";
|
||||
pub const QUESTION: &'static str = "?";
|
||||
pub const COMMA: &'static str = ",";
|
||||
pub const SPLIT: &'static str = ":";
|
||||
pub const OPEN_PARENTHESIS: &'static str = "(";
|
||||
pub const CLOSE_PARENTHESIS: &'static str = ")";
|
||||
pub const KEY: &'static str = "Key";
|
||||
pub const DOUBLE_QUOTE: &'static str = "\"";
|
||||
pub const SINGLE_QUOTE: &'static str = "'";
|
||||
pub const EQUAL: &'static str = "==";
|
||||
pub const GREATER_OR_EQUAL: &'static str = ">=";
|
||||
pub const GREATER: &'static str = ">";
|
||||
pub const LITTLE: &'static str = "<";
|
||||
pub const LITTLE_OR_EQUAL: &'static str = "<=";
|
||||
pub const NOT_EQUAL: &'static str = "!=";
|
||||
pub const AND: &'static str = "&&";
|
||||
pub const OR: &'static str = "||";
|
||||
pub const WHITESPACE: &'static str = " ";
|
||||
|
||||
const CH_DOLLA: char = '$';
|
||||
const CH_DOT: char = '.';
|
||||
const CH_ASTERISK: char = '*';
|
||||
@ -86,60 +62,104 @@ pub enum Token {
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn partial_eq(&self, other: Token) -> bool {
|
||||
self.to_simple() == other.to_simple()
|
||||
}
|
||||
|
||||
pub fn simple_eq(&self, str_token: &str) -> bool {
|
||||
self.to_simple() == str_token
|
||||
}
|
||||
|
||||
fn to_simple(&self) -> &'static str {
|
||||
pub fn is_match_token_type(&self, other: Token) -> bool {
|
||||
match self {
|
||||
Token::Absolute(_) => ABSOLUTE,
|
||||
Token::Dot(_) => DOT,
|
||||
Token::At(_) => AT,
|
||||
Token::OpenArray(_) => OPEN_ARRAY,
|
||||
Token::CloseArray(_) => CLOSE_ARRAY,
|
||||
Token::Asterisk(_) => ASTERISK,
|
||||
Token::Question(_) => QUESTION,
|
||||
Token::Comma(_) => COMMA,
|
||||
Token::Split(_) => SPLIT,
|
||||
Token::OpenParenthesis(_) => OPEN_PARENTHESIS,
|
||||
Token::CloseParenthesis(_) => CLOSE_PARENTHESIS,
|
||||
Token::Key(_, _) => KEY,
|
||||
Token::DoubleQuoted(_, _) => DOUBLE_QUOTE,
|
||||
Token::SingleQuoted(_, _) => SINGLE_QUOTE,
|
||||
Token::Equal(_) => EQUAL,
|
||||
Token::GreaterOrEqual(_) => GREATER_OR_EQUAL,
|
||||
Token::Greater(_) => GREATER,
|
||||
Token::Little(_) => LITTLE,
|
||||
Token::LittleOrEqual(_) => LITTLE_OR_EQUAL,
|
||||
Token::NotEqual(_) => NOT_EQUAL,
|
||||
Token::And(_) => AND,
|
||||
Token::Or(_) => OR,
|
||||
Token::Whitespace(_, _) => WHITESPACE,
|
||||
Token::Absolute(_) => match other {
|
||||
Token::Absolute(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Dot(_) => match other {
|
||||
Token::Dot(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::At(_) => match other {
|
||||
Token::At(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::OpenArray(_) => match other {
|
||||
Token::OpenArray(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::CloseArray(_) => match other {
|
||||
Token::CloseArray(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Asterisk(_) => match other {
|
||||
Token::Asterisk(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Question(_) => match other {
|
||||
Token::Question(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Comma(_) => match other {
|
||||
Token::Comma(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Split(_) => match other {
|
||||
Token::Split(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::OpenParenthesis(_) => match other {
|
||||
Token::OpenParenthesis(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::CloseParenthesis(_) => match other {
|
||||
Token::CloseParenthesis(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Key(_, _) => match other {
|
||||
Token::Key(_, _) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::DoubleQuoted(_, _) => match other {
|
||||
Token::DoubleQuoted(_, _) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::SingleQuoted(_, _) => match other {
|
||||
Token::SingleQuoted(_, _) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Equal(_) => match other {
|
||||
Token::Equal(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::GreaterOrEqual(_) => match other {
|
||||
Token::GreaterOrEqual(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Greater(_) => match other {
|
||||
Token::Greater(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Little(_) => match other {
|
||||
Token::Little(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::LittleOrEqual(_) => match other {
|
||||
Token::LittleOrEqual(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::NotEqual(_) => match other {
|
||||
Token::NotEqual(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::And(_) => match other {
|
||||
Token::And(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Or(_) => match other {
|
||||
Token::Or(_) => true,
|
||||
_ => false
|
||||
},
|
||||
Token::Whitespace(_, _) => match other {
|
||||
Token::Whitespace(_, _) => true,
|
||||
_ => false
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn simple_matched_token(ch: char, pos: usize) -> Option<Token> {
|
||||
match ch {
|
||||
CH_DOLLA => Some(Token::Absolute(pos)),
|
||||
CH_DOT => Some(Token::Dot(pos)),
|
||||
CH_ASTERISK => Some(Token::Asterisk(pos)),
|
||||
CH_LARRAY => Some(Token::OpenArray(pos)),
|
||||
CH_RARRAY => Some(Token::CloseArray(pos)),
|
||||
CH_LPAREN => Some(Token::OpenParenthesis(pos)),
|
||||
CH_RPAREN => Some(Token::CloseParenthesis(pos)),
|
||||
CH_AT => Some(Token::At(pos)),
|
||||
CH_QUESTION => Some(Token::Question(pos)),
|
||||
CH_COMMA => Some(Token::Comma(pos)),
|
||||
CH_SEMICOLON => Some(Token::Split(pos)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Tokenizer<'a> {
|
||||
input: PathReader<'a>,
|
||||
}
|
||||
@ -152,6 +172,37 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn dolla(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let fun = |c: &char| match c {
|
||||
&CH_DOT
|
||||
| &CH_ASTERISK
|
||||
| &CH_LARRAY
|
||||
| &CH_RARRAY
|
||||
| &CH_LPAREN
|
||||
| &CH_RPAREN
|
||||
| &CH_AT
|
||||
| &CH_QUESTION
|
||||
| &CH_COMMA
|
||||
| &CH_SEMICOLON
|
||||
| &CH_LITTLE
|
||||
| &CH_GREATER
|
||||
| &CH_EQUAL
|
||||
| &CH_AMPERSAND
|
||||
| &CH_PIPE
|
||||
| &CH_EXCLAMATION
|
||||
=> false,
|
||||
_ => !c.is_whitespace(),
|
||||
};
|
||||
let (_, mut vec) = self.input.take_while(fun).map_err(to_token_error)?;
|
||||
vec.insert(0, ch);
|
||||
|
||||
if vec.len() == 1 {
|
||||
Ok(Token::Absolute(pos))
|
||||
} else {
|
||||
Ok(Token::Key(pos, vec))
|
||||
}
|
||||
}
|
||||
|
||||
fn quote(&mut self, ch: char) -> Result<String, TokenError> {
|
||||
let (_, mut val) = self
|
||||
.input
|
||||
@ -161,7 +212,7 @@ impl<'a> Tokenizer<'a> {
|
||||
if let Some('\\') = val.chars().last() {
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
let _ = val.pop();
|
||||
let (_, mut val_remain) = self
|
||||
let (_, val_remain) = self
|
||||
.input
|
||||
.take_while(|c| *c != ch)
|
||||
.map_err(to_token_error)?;
|
||||
@ -260,17 +311,25 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
|
||||
fn other(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let fun = |c: &char| match simple_matched_token(*c, pos) {
|
||||
Some(_) => false,
|
||||
_ if c == &CH_LITTLE
|
||||
|| c == &CH_GREATER
|
||||
|| c == &CH_EQUAL
|
||||
|| c == &CH_AMPERSAND
|
||||
|| c == &CH_PIPE
|
||||
|| c == &CH_EXCLAMATION =>
|
||||
{
|
||||
false
|
||||
}
|
||||
let fun = |c: &char| match c {
|
||||
&CH_DOLLA
|
||||
| &CH_DOT
|
||||
| &CH_ASTERISK
|
||||
| &CH_LARRAY
|
||||
| &CH_RARRAY
|
||||
| &CH_LPAREN
|
||||
| &CH_RPAREN
|
||||
| &CH_AT
|
||||
| &CH_QUESTION
|
||||
| &CH_COMMA
|
||||
| &CH_SEMICOLON
|
||||
| &CH_LITTLE
|
||||
| &CH_GREATER
|
||||
| &CH_EQUAL
|
||||
| &CH_AMPERSAND
|
||||
| &CH_PIPE
|
||||
| &CH_EXCLAMATION
|
||||
=> false,
|
||||
_ => !c.is_whitespace(),
|
||||
};
|
||||
let (_, mut vec) = self.input.take_while(fun).map_err(to_token_error)?;
|
||||
@ -280,20 +339,28 @@ impl<'a> Tokenizer<'a> {
|
||||
|
||||
pub fn next_token(&mut self) -> Result<Token, TokenError> {
|
||||
let (pos, ch) = self.input.next_char().map_err(to_token_error)?;
|
||||
match simple_matched_token(ch, pos) {
|
||||
Some(t) => Ok(t),
|
||||
None => match ch {
|
||||
CH_SINGLE_QUOTE => self.single_quote(pos, ch),
|
||||
CH_DOUBLE_QUOTE => self.double_quote(pos, ch),
|
||||
CH_EQUAL => self.equal(pos, ch),
|
||||
CH_GREATER => self.greater(pos, ch),
|
||||
CH_LITTLE => self.little(pos, ch),
|
||||
CH_AMPERSAND => self.and(pos, ch),
|
||||
CH_PIPE => self.or(pos, ch),
|
||||
CH_EXCLAMATION => self.not_equal(pos, ch),
|
||||
_ if ch.is_whitespace() => self.whitespace(pos, ch),
|
||||
_ => self.other(pos, ch),
|
||||
},
|
||||
match ch {
|
||||
CH_DOLLA => self.dolla(pos, ch),
|
||||
CH_DOT => Ok(Token::Dot(pos)),
|
||||
CH_ASTERISK => Ok(Token::Asterisk(pos)),
|
||||
CH_LARRAY => Ok(Token::OpenArray(pos)),
|
||||
CH_RARRAY => Ok(Token::CloseArray(pos)),
|
||||
CH_LPAREN => Ok(Token::OpenParenthesis(pos)),
|
||||
CH_RPAREN => Ok(Token::CloseParenthesis(pos)),
|
||||
CH_AT => Ok(Token::At(pos)),
|
||||
CH_QUESTION => Ok(Token::Question(pos)),
|
||||
CH_COMMA => Ok(Token::Comma(pos)),
|
||||
CH_SEMICOLON => Ok(Token::Split(pos)),
|
||||
CH_SINGLE_QUOTE => self.single_quote(pos, ch),
|
||||
CH_DOUBLE_QUOTE => self.double_quote(pos, ch),
|
||||
CH_EQUAL => self.equal(pos, ch),
|
||||
CH_GREATER => self.greater(pos, ch),
|
||||
CH_LITTLE => self.little(pos, ch),
|
||||
CH_AMPERSAND => self.and(pos, ch),
|
||||
CH_PIPE => self.or(pos, ch),
|
||||
CH_EXCLAMATION => self.not_equal(pos, ch),
|
||||
_ if ch.is_whitespace() => self.whitespace(pos, ch),
|
||||
_ => self.other(pos, ch),
|
||||
}
|
||||
}
|
||||
|
||||
@ -321,7 +388,7 @@ impl<'a> TokenReader<'a> {
|
||||
}
|
||||
Err(e) => {
|
||||
return TokenReader {
|
||||
origin_input: input.clone(),
|
||||
origin_input: input,
|
||||
err: e,
|
||||
err_pos: tokenizer.current_pos(),
|
||||
tokens,
|
||||
@ -332,13 +399,6 @@ impl<'a> TokenReader<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek_is(&self, simple_token: &str) -> bool {
|
||||
match self.peek_token() {
|
||||
Ok(t) => t.simple_eq(simple_token),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek_token(&self) -> Result<&Token, TokenError> {
|
||||
match self.tokens.last() {
|
||||
Some((_, t)) => {
|
||||
|
335
src/select/cmp.rs
Normal file
335
src/select/cmp.rs
Normal file
@ -0,0 +1,335 @@
|
||||
use array_tool::vec::{Intersect, Union};
|
||||
use serde_json::Value;
|
||||
|
||||
pub(super) trait Cmp {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool;
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool;
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool;
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value>;
|
||||
|
||||
fn default(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpEq;
|
||||
|
||||
impl Cmp for CmpEq {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 == v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
(v1 - v2).abs() == 0_f64
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 == v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
|
||||
v1.to_vec().intersect(v2.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpNe;
|
||||
|
||||
impl Cmp for CmpNe {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 != v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
(v1 - v2).abs() != 0_f64
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 != v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
|
||||
v1.to_vec().intersect_if(v2.to_vec(), |a, b| a != b)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpGt;
|
||||
|
||||
impl Cmp for CmpGt {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 & !v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
v1 > v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 > v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpGe;
|
||||
|
||||
impl Cmp for CmpGe {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 >= v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
v1 >= v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 >= v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpLt;
|
||||
|
||||
impl Cmp for CmpLt {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
!v1 & v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
v1 < v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 < v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpLe;
|
||||
|
||||
impl Cmp for CmpLe {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 <= v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
|
||||
v1 <= v2
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
v1 <= v2
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpAnd;
|
||||
|
||||
impl Cmp for CmpAnd {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 && v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, _v1: f64, _v2: f64) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
!v1.is_empty() && !v2.is_empty()
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
|
||||
v1.to_vec().intersect(v2.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct CmpOr;
|
||||
|
||||
impl Cmp for CmpOr {
|
||||
fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
|
||||
v1 || v2
|
||||
}
|
||||
|
||||
fn cmp_f64(&self, _v1: f64, _v2: f64) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn cmp_string(&self, v1: &str, v2: &str) -> bool {
|
||||
!v1.is_empty() || !v2.is_empty()
|
||||
}
|
||||
|
||||
fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
|
||||
v1.to_vec().union(v2.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod cmp_inner_tests {
|
||||
use serde_json::Value;
|
||||
|
||||
use select::cmp::*;
|
||||
|
||||
#[test]
|
||||
fn cmp_eq() {
|
||||
let cmp_fn = CmpEq;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), false);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "1"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_ne() {
|
||||
let cmp_fn = CmpNe;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "1"), false);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_gt() {
|
||||
let cmp_fn = CmpGt;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), false);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), false);
|
||||
assert_eq!(cmp_fn.cmp_string("b", "a"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_ge() {
|
||||
let cmp_fn = CmpGe;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), false);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "1"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("ab", "a"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_lt() {
|
||||
let cmp_fn = CmpLt;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, false), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), false);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), false);
|
||||
assert_eq!(cmp_fn.cmp_string("ab", "b"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_le() {
|
||||
let cmp_fn = CmpLe;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, false), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), true);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), false);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("ab", "b"), true);
|
||||
assert_eq!(cmp_fn.cmp_string("abd", "abc"), false);
|
||||
assert_eq!(cmp_fn.cmp_string("1", "2"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_and() {
|
||||
let cmp_fn = CmpAnd;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, true), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, false), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.0, 0.0), true);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_or() {
|
||||
let cmp_fn = CmpOr;
|
||||
assert_eq!(cmp_fn.default(), false);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, false), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(true, true), true);
|
||||
assert_eq!(cmp_fn.cmp_bool(false, false), false);
|
||||
assert_eq!(cmp_fn.cmp_f64(0.0, 0.0), true);
|
||||
assert_eq!(cmp_fn.cmp_string("a", "a"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cmp_json() {
|
||||
let v1 = Value::Bool(true);
|
||||
let v2 = Value::String("1".to_string());
|
||||
let left = [&v1, &v2];
|
||||
let right = [&v1, &v2];
|
||||
let empty: Vec<&Value> = Vec::new();
|
||||
|
||||
assert_eq!(CmpEq.cmp_json(&left, &right), left.to_vec());
|
||||
assert_eq!(CmpNe.cmp_json(&left, &right), left.to_vec());
|
||||
assert_eq!(CmpGt.cmp_json(&left, &right), empty);
|
||||
assert_eq!(CmpGe.cmp_json(&left, &right), empty);
|
||||
assert_eq!(CmpLt.cmp_json(&left, &right), empty);
|
||||
assert_eq!(CmpLe.cmp_json(&left, &right), empty);
|
||||
assert_eq!(CmpAnd.cmp_json(&left, &right), left.to_vec());
|
||||
assert_eq!(CmpOr.cmp_json(&left, &right), left.to_vec());
|
||||
|
||||
assert_eq!(
|
||||
CmpEq.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(true)]),
|
||||
vec![&Value::Bool(true)]
|
||||
);
|
||||
assert_eq!(
|
||||
CmpEq.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(false)]),
|
||||
empty
|
||||
);
|
||||
assert_eq!(
|
||||
CmpNe.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(true)]),
|
||||
empty
|
||||
);
|
||||
assert_eq!(
|
||||
CmpNe.cmp_json(&[&Value::Bool(false)], &[&Value::Bool(true)]),
|
||||
vec![&Value::Bool(false)]
|
||||
);
|
||||
assert_eq!(
|
||||
CmpAnd.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(true)]),
|
||||
vec![&Value::Bool(true)]
|
||||
);
|
||||
assert_eq!(
|
||||
CmpOr.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(false)]),
|
||||
vec![&Value::Bool(true), &Value::Bool(false)]
|
||||
);
|
||||
}
|
||||
}
|
227
src/select/expr_term.rs
Normal file
227
src/select/expr_term.rs
Normal file
@ -0,0 +1,227 @@
|
||||
use serde_json::{Number, Value};
|
||||
use select::cmp::*;
|
||||
use select::{FilterKey, to_f64};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub(super) enum ExprTerm<'a> {
|
||||
String(String),
|
||||
Number(Number),
|
||||
Bool(bool),
|
||||
Json(Option<Vec<&'a Value>>, Option<FilterKey>, Vec<&'a Value>),
|
||||
}
|
||||
|
||||
impl<'a> ExprTerm<'a> {
|
||||
fn cmp<C1: Cmp, C2: Cmp>(
|
||||
&self,
|
||||
other: &Self,
|
||||
cmp_fn: &C1,
|
||||
reverse_cmp_fn: &C2,
|
||||
) -> ExprTerm<'a> {
|
||||
match &self {
|
||||
ExprTerm::String(s1) => match &other {
|
||||
ExprTerm::String(s2) => ExprTerm::Bool(cmp_fn.cmp_string(s1, s2)),
|
||||
ExprTerm::Json(_, _, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||
},
|
||||
ExprTerm::Number(n1) => match &other {
|
||||
ExprTerm::Number(n2) => ExprTerm::Bool(cmp_fn.cmp_f64(to_f64(n1), to_f64(n2))),
|
||||
ExprTerm::Json(_, _, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||
},
|
||||
ExprTerm::Bool(b1) => match &other {
|
||||
ExprTerm::Bool(b2) => ExprTerm::Bool(cmp_fn.cmp_bool(*b1, *b2)),
|
||||
ExprTerm::Json(_, _, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
|
||||
_ => ExprTerm::Bool(cmp_fn.default()),
|
||||
},
|
||||
ExprTerm::Json(rel, fk1, vec1) => {
|
||||
let ret: Vec<&Value> = match &other {
|
||||
ExprTerm::String(s2) => vec1
|
||||
.iter()
|
||||
.filter(|v1| match v1 {
|
||||
Value::String(s1) => cmp_fn.cmp_string(s1, s2),
|
||||
Value::Object(map1) => {
|
||||
if let Some(FilterKey::String(k)) = fk1 {
|
||||
if let Some(Value::String(s1)) = map1.get(k) {
|
||||
return cmp_fn.cmp_string(s1, s2);
|
||||
}
|
||||
}
|
||||
cmp_fn.default()
|
||||
}
|
||||
_ => cmp_fn.default(),
|
||||
})
|
||||
.cloned()
|
||||
.collect(),
|
||||
ExprTerm::Number(n2) => vec1
|
||||
.iter()
|
||||
.filter(|v1| match v1 {
|
||||
Value::Number(n1) => cmp_fn.cmp_f64(to_f64(n1), to_f64(n2)),
|
||||
Value::Object(map1) => {
|
||||
if let Some(FilterKey::String(k)) = fk1 {
|
||||
if let Some(Value::Number(n1)) = map1.get(k) {
|
||||
return cmp_fn.cmp_f64(to_f64(n1), to_f64(n2));
|
||||
}
|
||||
}
|
||||
cmp_fn.default()
|
||||
}
|
||||
_ => cmp_fn.default(),
|
||||
})
|
||||
.cloned()
|
||||
.collect(),
|
||||
ExprTerm::Bool(b2) => vec1
|
||||
.iter()
|
||||
.filter(|v1| match v1 {
|
||||
Value::Bool(b1) => cmp_fn.cmp_bool(*b1, *b2),
|
||||
Value::Object(map1) => {
|
||||
if let Some(FilterKey::String(k)) = fk1 {
|
||||
if let Some(Value::Bool(b1)) = map1.get(k) {
|
||||
return cmp_fn.cmp_bool(*b1, *b2);
|
||||
}
|
||||
}
|
||||
cmp_fn.default()
|
||||
}
|
||||
_ => cmp_fn.default(),
|
||||
})
|
||||
.cloned()
|
||||
.collect(),
|
||||
ExprTerm::Json(parent, _, vec2) => {
|
||||
if let Some(vec1) = rel {
|
||||
cmp_fn.cmp_json(vec1, vec2)
|
||||
} else if let Some(vec2) = parent {
|
||||
cmp_fn.cmp_json(vec1, vec2)
|
||||
} else {
|
||||
cmp_fn.cmp_json(vec1, vec2)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if ret.is_empty() {
|
||||
ExprTerm::Bool(cmp_fn.default())
|
||||
} else if let Some(rel) = rel {
|
||||
if let ExprTerm::Json(_, _, _) = &other {
|
||||
ExprTerm::Json(Some(rel.to_vec()), None, ret)
|
||||
} else {
|
||||
let mut tmp = Vec::new();
|
||||
for rel_value in rel {
|
||||
if let Value::Object(map) = rel_value {
|
||||
for map_value in map.values() {
|
||||
for result_value in &ret {
|
||||
if map_value.eq(*result_value) {
|
||||
tmp.push(*rel_value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ExprTerm::Json(Some(tmp), None, ret)
|
||||
}
|
||||
} else {
|
||||
ExprTerm::Json(None, None, ret)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eq(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("eq - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpEq, &CmpEq);
|
||||
debug!("eq = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn ne(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("ne - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpNe, &CmpNe);
|
||||
debug!("ne = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn gt(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("gt - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpGt, &CmpLt);
|
||||
debug!("gt = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn ge(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("ge - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpGe, &CmpLe);
|
||||
debug!("ge = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn lt(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("lt - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpLt, &CmpGt);
|
||||
debug!("lt = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn le(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("le - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpLe, &CmpGe);
|
||||
debug!("le = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn and(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("and - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpAnd, &CmpAnd);
|
||||
debug!("and = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
|
||||
pub fn or(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
|
||||
debug!("or - {:?} : {:?}", &self, &other);
|
||||
let _ = ret.take();
|
||||
let tmp = self.cmp(other, &CmpOr, &CmpOr);
|
||||
debug!("or = {:?}", tmp);
|
||||
*ret = Some(tmp);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Into<ExprTerm<'a>> for &Vec<&'a Value> {
|
||||
fn into(self) -> ExprTerm<'a> {
|
||||
if self.len() == 1 {
|
||||
match &self[0] {
|
||||
Value::Number(v) => return ExprTerm::Number(v.clone()),
|
||||
Value::String(v) => return ExprTerm::String(v.clone()),
|
||||
Value::Bool(v) => return ExprTerm::Bool(*v),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
ExprTerm::Json(None, None, self.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod expr_term_inner_tests {
|
||||
use serde_json::{Number, Value};
|
||||
use select::expr_term::ExprTerm;
|
||||
|
||||
#[test]
|
||||
fn value_vec_into() {
|
||||
let v = Value::Bool(true);
|
||||
let vec = &vec![&v];
|
||||
let term: ExprTerm = vec.into();
|
||||
assert_eq!(term, ExprTerm::Bool(true));
|
||||
|
||||
let v = Value::String("a".to_string());
|
||||
let vec = &vec![&v];
|
||||
let term: ExprTerm = vec.into();
|
||||
assert_eq!(term, ExprTerm::String("a".to_string()));
|
||||
|
||||
let v = serde_json::from_str("1.0").unwrap();
|
||||
let vec = &vec![&v];
|
||||
let term: ExprTerm = vec.into();
|
||||
assert_eq!(term, ExprTerm::Number(Number::from_f64(1.0).unwrap()));
|
||||
}
|
||||
}
|
1623
src/select/mod.rs
1623
src/select/mod.rs
File diff suppressed because it is too large
Load Diff
99
src/select/value_walker.rs
Normal file
99
src/select/value_walker.rs
Normal file
@ -0,0 +1,99 @@
|
||||
use serde_json::Value;
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub(super) struct ValueWalker;
|
||||
|
||||
impl<'a> ValueWalker {
|
||||
pub fn all_with_num(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, index: f64) {
|
||||
Self::walk(vec, tmp, &|v| if v.is_array() {
|
||||
if let Some(item) = v.get(index as usize) {
|
||||
Some(vec![item])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
});
|
||||
}
|
||||
|
||||
pub fn all_with_str(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, key: &str, is_filter: bool) {
|
||||
if is_filter {
|
||||
Self::walk(vec, tmp, &|v| match v {
|
||||
Value::Object(map) if map.contains_key(key) => Some(vec![v]),
|
||||
_ => None,
|
||||
});
|
||||
} else {
|
||||
Self::walk(vec, tmp, &|v| match v {
|
||||
Value::Object(map) => match map.get(key) {
|
||||
Some(v) => Some(vec![v]),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all(vec: &[&'a Value], tmp: &mut Vec<&'a Value>) {
|
||||
Self::walk(vec, tmp, &|v| match v {
|
||||
Value::Array(vec) => Some(vec.iter().collect()),
|
||||
Value::Object(map) => {
|
||||
let mut tmp = Vec::new();
|
||||
for (_, v) in map {
|
||||
tmp.push(v);
|
||||
}
|
||||
Some(tmp)
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
}
|
||||
|
||||
fn walk<F>(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, fun: &F) where F: Fn(&Value) -> Option<Vec<&Value>> {
|
||||
for v in vec {
|
||||
Self::_walk(v, tmp, fun);
|
||||
}
|
||||
}
|
||||
|
||||
fn _walk<F>(v: &'a Value, tmp: &mut Vec<&'a Value>, fun: &F) where F: Fn(&Value) -> Option<Vec<&Value>> {
|
||||
if let Some(mut ret) = fun(v) {
|
||||
tmp.append(&mut ret);
|
||||
}
|
||||
|
||||
match v {
|
||||
Value::Array(vec) => {
|
||||
for v in vec {
|
||||
Self::_walk(v, tmp, fun);
|
||||
}
|
||||
}
|
||||
Value::Object(map) => {
|
||||
for (_, v) in map {
|
||||
Self::_walk(&v, tmp, fun);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_dedup(v: &'a Value,
|
||||
tmp: &mut Vec<&'a Value>,
|
||||
key: &str,
|
||||
visited: &mut HashSet<*const Value>, ) {
|
||||
match v {
|
||||
Value::Object(map) => {
|
||||
if map.contains_key(key) {
|
||||
let ptr = v as *const Value;
|
||||
if !visited.contains(&ptr) {
|
||||
visited.insert(ptr);
|
||||
tmp.push(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Array(vec) => {
|
||||
for v in vec {
|
||||
Self::walk_dedup(v, tmp, key, visited);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
253
tests/array_filter.rs
Normal file
253
tests/array_filter.rs
Normal file
@ -0,0 +1,253 @@
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{read_json, select_and_then_compare, setup};
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn array_range_default() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[1, 2]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_all() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[ : ]",
|
||||
json!(["first", "second"]),
|
||||
json!(["first", "second"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_all() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[::]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_only_step_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[::2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "third", "fifth"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_only_start_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[1::]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second", "third", "forth", "fifth"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_empty_step_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[1:2:]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_empty_end_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[1::2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second", "forth"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_by_1() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[0:3:1]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "second", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_step_by_2() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[0:3:2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_only_negative_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[-4:]",
|
||||
json!(["first", "second", "third"]),
|
||||
json!(["first", "second", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_only_end_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[:4]",
|
||||
json!(["first", "second", "third"]),
|
||||
json!(["first", "second", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_only_from_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[1: ]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_range_only_nagative_end_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[:-2]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
{"id": 0, "name": "Millicent Norman"}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..friends[2].name",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!(["Gray Berry", "Gray Berry"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_all_index() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..friends[*].name",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
"Vincent Cannon",
|
||||
"Gray Berry",
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_all_and_then_key() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$['school']['friends'][*].['name']",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!(["Millicent Norman", "Vincent Cannon", "Gray Berry"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_index_and_then_key() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$['school']['friends'][0].['name']",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!(["Millicent Norman"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_multiple_key() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.["eyeColor", "name"]"#,
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!(["blue", "Leonor Herman"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bugs40_bracket_notation_after_recursive_descent() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[0]",
|
||||
json!([
|
||||
"first",
|
||||
{
|
||||
"key": [
|
||||
"first nested",
|
||||
{
|
||||
"more": [
|
||||
{"nested": ["deepest", "second"]},
|
||||
["more", "values"]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]),
|
||||
json!([
|
||||
"first",
|
||||
"first nested",
|
||||
{
|
||||
"nested" : [
|
||||
"deepest",
|
||||
"second"
|
||||
]
|
||||
},
|
||||
"deepest",
|
||||
"more"
|
||||
]),
|
||||
);
|
||||
}
|
@ -30,8 +30,8 @@ pub fn read_contents(path: &str) -> String {
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) {
|
||||
let mut selector = Selector::new();
|
||||
pub fn select_and_then_compare(path: &str, json: Value, target: Value) {
|
||||
let mut selector = Selector::default();
|
||||
let result = selector
|
||||
.str_path(path)
|
||||
.unwrap()
|
||||
@ -41,7 +41,7 @@ pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) {
|
||||
assert_eq!(
|
||||
result,
|
||||
match target {
|
||||
Value::Array(vec) => vec.clone(),
|
||||
Value::Array(vec) => vec,
|
||||
_ => panic!("Give me the Array!"),
|
||||
},
|
||||
"{}",
|
||||
@ -50,7 +50,7 @@ pub fn select_and_then_compare<'a>(path: &str, json: Value, target: Value) {
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn compare_result<'a>(result: Vec<&Value>, target: Value) {
|
||||
pub fn compare_result(result: Vec<&Value>, target: Value) {
|
||||
let result = serde_json::to_value(result).unwrap();
|
||||
assert_eq!(result, target);
|
||||
}
|
||||
|
841
tests/filter.rs
841
tests/filter.rs
@ -1,593 +1,10 @@
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use common::{read_json, select_and_then_compare, setup};
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn array() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[1, 2]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[1: ]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[:-2]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{"id": 0, "name": "Millicent Norman"}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..friends[2].name",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!(["Gray Berry", "Gray Berry"]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..friends[*].name",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
"Vincent Cannon",
|
||||
"Gray Berry",
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$['school']['friends'][*].['name']",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!(["Millicent Norman", "Vincent Cannon", "Gray Berry"]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$['school']['friends'][0].['name']",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!(["Millicent Norman"]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.["eyeColor", "name"]"#,
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!(["blue", "Leonor Herman"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(@.friends[0])]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(@.friends[10])]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(1==1)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[?(1==1)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([[
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_default() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(@.friends == @.friends)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?(@.name)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?(@.id >= 2)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?(@.id >= 2 || @.id == 1)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" },
|
||||
{ "id" : 1, "name" : "Vincent Cannon" }
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?( (@.id >= 2 || @.id == 1) && @.id == 0)]",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..friends[?(@.id == $.index)].id",
|
||||
read_json("./benches/data_obj.json"),
|
||||
json!([0, 0]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..book[?($.store.bicycle.price < @.price)].price",
|
||||
read_json("./benches/example.json"),
|
||||
json!([22.99]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..book[?( (@.price == 12.99 || @.category == 'reference') && @.price > 10)].price",
|
||||
read_json("./benches/example.json"),
|
||||
json!([12.99]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.age > 40)]",
|
||||
json!([
|
||||
{ "name": "이름1", "age": 40, "phone": "+33 12341234" },
|
||||
{ "name": "이름2", "age": 42, "phone": "++44 12341234" }
|
||||
]),
|
||||
json!([
|
||||
{ "name" : "이름2", "age" : 42, "phone" : "++44 12341234" }
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.age >= 30)]",
|
||||
json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}),
|
||||
json!([
|
||||
{ "name" : "친구3", "age" : 30 }
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_number() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.[?(@.a == 1)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
select_and_then_compare("$.[?(@.a != 2)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
select_and_then_compare("$.[?(@.a < 2)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
select_and_then_compare("$.[?(@.a <= 1)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
select_and_then_compare("$.[?(@.a > 0)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
select_and_then_compare("$.[?(@.a >= 0)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_string() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a == "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([{ "a": "b" }]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a != "c")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([{ "a": "b" }]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a < "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a <= "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([{ "a": "b" }]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a > "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a >= "b")]"#,
|
||||
json!({ "a": "b" }),
|
||||
json!([{ "a": "b" }]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_object() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a == @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([{"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a != @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a < @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a <= @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a > @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a >= @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_complex() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(1 == @.a)]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?("1" != @.a)]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a <= 1)]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a > "1")]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([Value::Null]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn example() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store.book[*].author"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..author"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store.*"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
[
|
||||
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||
{"category" : "fiction", "author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
],
|
||||
{"color" : "red","price" : 19.95},
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store..price"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([8.95, 12.99, 8.99, 22.99, 19.95]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[2]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[-2]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[0, 1]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[:2]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[2:]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[?(@.isbn)]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store.book[?(@.price < 10)]"#,
|
||||
read_json("./benches/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..*"#,
|
||||
read_json("./benches/example.json"),
|
||||
read_json("./benches/giveme_every_thing_result.json"),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filer_same_obj() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..[?(@.a == 1)]"#,
|
||||
json!({
|
||||
"a": 1,
|
||||
"b" : {"a": 1},
|
||||
"c" : {"a": 1}
|
||||
}),
|
||||
json!([
|
||||
{"a": 1},
|
||||
{"a": 1}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[ : ]",
|
||||
json!(["first", "second"]),
|
||||
json!(["first", "second"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[::]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[::2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "third", "fifth"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[1::]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second", "third", "forth", "fifth"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[1:2:]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[1::2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["second", "forth"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[0:3:1]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "second", "third"]),
|
||||
);
|
||||
select_and_then_compare(
|
||||
"$[0:3:2]",
|
||||
json!(["first", "second", "third", "forth", "fifth"]),
|
||||
json!(["first", "third"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn quote() {
|
||||
setup();
|
||||
@ -603,3 +20,261 @@ fn quote() {
|
||||
json!(["value"]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filter_next_all() {
|
||||
setup();
|
||||
|
||||
for path in &[r#"$.*"#, r#"$[*]"#] {
|
||||
select_and_then_compare(
|
||||
path,
|
||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filter_all() {
|
||||
setup();
|
||||
|
||||
for path in &[r#"$..*"#, r#"$..[*]"#] {
|
||||
select_and_then_compare(
|
||||
path,
|
||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||
json!([ "string", 42, { "key" : "value" }, [ 0, 1 ], "value", 0, 1 ]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filter_array_next_all() {
|
||||
setup();
|
||||
|
||||
for path in &[r#"$.*.*"#, r#"$[*].*"#, r#"$.*[*]"#, r#"$[*][*]"#] {
|
||||
select_and_then_compare(
|
||||
path,
|
||||
json!(["string", 42, { "key": "value" }, [0, 1]]),
|
||||
json!(["value", 0, 1]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filter_all_complex() {
|
||||
setup();
|
||||
|
||||
for path in &[r#"$..friends.*"#, r#"$[*].friends.*"#] {
|
||||
select_and_then_compare(
|
||||
path,
|
||||
read_json("./benchmark/data_array.json"),
|
||||
json!([
|
||||
{ "id" : 0, "name" : "Millicent Norman" },
|
||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||
{ "id" : 2, "name" : "Gray Berry" },
|
||||
{ "id" : 0, "name" : "Tillman Mckay" },
|
||||
{ "id" : 1, "name" : "Rivera Berg" },
|
||||
{ "id" : 2, "name" : "Rosetta Erickson" }
|
||||
]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filter_parent_with_matched_child() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.a[?(@.b.c == 1)]",
|
||||
json!({
|
||||
"a": {
|
||||
"b": {
|
||||
"c": 1
|
||||
}
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"b" : {
|
||||
"c" : 1
|
||||
}
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filter_parent_exist_child() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.a[?(@.b.c)]",
|
||||
json!({
|
||||
"a": {
|
||||
"b": {
|
||||
"c": 1
|
||||
}
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"b" : {
|
||||
"c" : 1
|
||||
}
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filter_parent_paths() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[?(@.key.subKey == 'subKey2')]",
|
||||
json!([
|
||||
{"key": {"seq": 1, "subKey": "subKey1"}},
|
||||
{"key": {"seq": 2, "subKey": "subKey2"}},
|
||||
{"key": 42},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"key": {"seq": 2, "subKey": "subKey2"}}]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bugs33_exist_in_all() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.first.second)]",
|
||||
json!({
|
||||
"foo": {
|
||||
"first": { "second": "value" }
|
||||
},
|
||||
"foo2": {
|
||||
"first": {}
|
||||
},
|
||||
"foo3": {
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"first": {
|
||||
"second": "value"
|
||||
}
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bugs33_exist_left_in_all_with_and_condition() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.first && @.first.second)]",
|
||||
json!({
|
||||
"foo": {
|
||||
"first": { "second": "value" }
|
||||
},
|
||||
"foo2": {
|
||||
"first": {}
|
||||
},
|
||||
"foo3": {
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"first": {
|
||||
"second": "value"
|
||||
}
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bugs33_exist_right_in_all_with_and_condition() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.b.c.d && @.b)]",
|
||||
json!({
|
||||
"a": {
|
||||
"b": {
|
||||
"c": {
|
||||
"d" : {
|
||||
"e" : 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
{
|
||||
"b" : {
|
||||
"c" : {
|
||||
"d" : {
|
||||
"e" : 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bugs38_array_notation_in_filter() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[?(@['key']==42)]",
|
||||
json!([
|
||||
{"key": 0},
|
||||
{"key": 42},
|
||||
{"key": -1},
|
||||
{"key": 41},
|
||||
{"key": 43},
|
||||
{"key": 42.0001},
|
||||
{"key": 41.9999},
|
||||
{"key": 100},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"key": 42}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$[?(@['key'].subKey == 'subKey2')]",
|
||||
json!([
|
||||
{"key": {"seq": 1, "subKey": "subKey1"}},
|
||||
{"key": {"seq": 2, "subKey": "subKey2"}},
|
||||
{"key": 42},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"key": {"seq": 2, "subKey": "subKey2"}}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$[?(@['key']['subKey'] == 'subKey2')]",
|
||||
json!([
|
||||
{"key": {"seq": 1, "subKey": "subKey1"}},
|
||||
{"key": {"seq": 2, "subKey": "subKey2"}},
|
||||
{"key": 42},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"key": {"seq": 2, "subKey": "subKey2"}}]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..key[?(@['subKey'] == 'subKey2')]",
|
||||
json!([
|
||||
{"key": {"seq": 1, "subKey": "subKey1"}},
|
||||
{"key": {"seq": 2, "subKey": "subKey2"}},
|
||||
{"key": 42},
|
||||
{"some": "value"}
|
||||
]),
|
||||
json!([{"seq": 2, "subKey": "subKey2"}]),
|
||||
);
|
||||
}
|
242
tests/jsonpath_examples.rs
Normal file
242
tests/jsonpath_examples.rs
Normal file
@ -0,0 +1,242 @@
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{read_json, select_and_then_compare, setup};
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn example_authros_of_all_books() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store.book[*].author"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_authors() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..author"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
"Nigel Rees",
|
||||
"Evelyn Waugh",
|
||||
"Herman Melville",
|
||||
"J. R. R. Tolkien"
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_things_both_books_and_bicycles() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store.*"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
[
|
||||
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||
{"category" : "fiction", "author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
],
|
||||
{"color" : "red","price" : 19.95},
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn the_price_of_everything() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store..price"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([8.95, 12.99, 8.99, 22.99, 19.95]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn the_third_book() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[2]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn the_second_to_last_book() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[-2]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn the_first_two_books() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[0, 1]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_books_from_index_0_inclusive_until_index_2_exclusive() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[:2]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_books_from_index_1_inclusive_until_index_2_exclusive() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[2:]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_books_with_an_isbn_number() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..book[?(@.isbn)]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_books_in_store_cheaper_than_10() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.store.book[?(@.price < 10)]"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn give_me_every_thing() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..*"#,
|
||||
read_json("./benchmark/example.json"),
|
||||
read_json("./benchmark/giveme_every_thing_result.json"),
|
||||
);
|
||||
}
|
147
tests/lib.rs
147
tests/lib.rs
@ -7,50 +7,77 @@ use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use common::{compare_result, read_contents, read_json, setup};
|
||||
use jsonpath::JsonPathError;
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn compile() {
|
||||
let compile_object = |path| {
|
||||
let mut template = jsonpath::compile(path);
|
||||
let json_obj = read_json("./benchmark/data_obj.json");
|
||||
let json = template(&json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
};
|
||||
|
||||
let compile_array = |path| {
|
||||
let mut template = jsonpath::compile(path);
|
||||
let json_obj = read_json("./benchmark/data_array.json");
|
||||
let json = template(&json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Rosetta Erickson"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
};
|
||||
|
||||
fn compile_error() {
|
||||
let mut template = jsonpath::compile("$[");
|
||||
assert!(template(&Value::Null).is_err());
|
||||
}
|
||||
|
||||
setup();
|
||||
|
||||
let mut template = jsonpath::compile("$..friends[2]");
|
||||
let json_obj = read_json("./benches/data_obj.json");
|
||||
let json = template(&json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
|
||||
let json_obj = read_json("./benches/data_array.json");
|
||||
let json = template(&json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Rosetta Erickson"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
compile_object("$..friends[2]");
|
||||
compile_array("$..friends[2]");
|
||||
compile_error();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn selector() {
|
||||
setup();
|
||||
|
||||
let json_obj = read_json("./benches/data_obj.json");
|
||||
let mut reader = jsonpath::selector(&json_obj);
|
||||
let json = reader("$..friends[2]").unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
fn select<'a, F>(selector: &mut F, path: &'a str, target: Value)
|
||||
where
|
||||
F: FnMut(&'a str) -> Result<Vec<&Value>, JsonPathError>,
|
||||
{
|
||||
let json = selector(path).unwrap();
|
||||
compare_result(json, target);
|
||||
};
|
||||
|
||||
let json = reader("$..friends[0]").unwrap();
|
||||
let ret = json!([
|
||||
{"id": 0},
|
||||
{"id": 0,"name": "Millicent Norman"}
|
||||
]);
|
||||
compare_result(json, ret);
|
||||
let json_obj = read_json("./benchmark/data_obj.json");
|
||||
let mut selector = jsonpath::selector(&json_obj);
|
||||
|
||||
select(
|
||||
&mut selector,
|
||||
"$..friends[2]",
|
||||
json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]),
|
||||
);
|
||||
select(
|
||||
&mut selector,
|
||||
"$..friends[0]",
|
||||
json!([
|
||||
{"id": 0},
|
||||
{"id": 0,"name": "Millicent Norman"}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -61,36 +88,48 @@ fn selector_as() {
|
||||
name: Option<String>,
|
||||
}
|
||||
|
||||
let json_obj = read_json("./benches/data_obj.json");
|
||||
fn select<'a, F>(selector: &mut F, path: &'a str, target: Vec<Friend>)
|
||||
where
|
||||
F: FnMut(&'a str) -> Result<Vec<Friend>, JsonPathError>,
|
||||
{
|
||||
let json = selector(path).unwrap();
|
||||
assert_eq!(json, target);
|
||||
};
|
||||
|
||||
let json_obj = read_json("./benchmark/data_obj.json");
|
||||
let mut selector = jsonpath::selector_as::<Friend>(&json_obj);
|
||||
let json = selector("$..friends[2]").unwrap();
|
||||
|
||||
let ret = vec![
|
||||
Friend {
|
||||
id: 2,
|
||||
name: Some("Gray Berry".to_string()),
|
||||
},
|
||||
Friend {
|
||||
id: 2,
|
||||
name: Some("Gray Berry".to_string()),
|
||||
},
|
||||
];
|
||||
assert_eq!(json, ret);
|
||||
select(
|
||||
&mut selector,
|
||||
"$..friends[2]",
|
||||
vec![
|
||||
Friend {
|
||||
id: 2,
|
||||
name: Some("Gray Berry".to_string()),
|
||||
},
|
||||
Friend {
|
||||
id: 2,
|
||||
name: Some("Gray Berry".to_string()),
|
||||
},
|
||||
],
|
||||
);
|
||||
|
||||
let json = selector("$..friends[0]").unwrap();
|
||||
let ret = vec![
|
||||
Friend { id: 0, name: None },
|
||||
Friend {
|
||||
id: 0,
|
||||
name: Some("Millicent Norman".to_string()),
|
||||
},
|
||||
];
|
||||
assert_eq!(json, ret);
|
||||
select(
|
||||
&mut selector,
|
||||
"$..friends[0]",
|
||||
vec![
|
||||
Friend { id: 0, name: None },
|
||||
Friend {
|
||||
id: 0,
|
||||
name: Some("Millicent Norman".to_string()),
|
||||
},
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn select() {
|
||||
let json_obj = read_json("./benches/example.json");
|
||||
let json_obj = read_json("./benchmark/example.json");
|
||||
let json = jsonpath::select(&json_obj, "$..book[2]").unwrap();
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
@ -104,7 +143,7 @@ fn select() {
|
||||
|
||||
#[test]
|
||||
fn select_str() {
|
||||
let json_str = read_contents("./benches/example.json");
|
||||
let json_str = read_contents("./benchmark/example.json");
|
||||
let result_str = jsonpath::select_as_str(&json_str, "$..book[2]").unwrap();
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
|
@ -1,58 +0,0 @@
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{read_json, setup};
|
||||
use jsonpath::{Selector, SelectorMut};
|
||||
use serde_json::Value;
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn selector_mut() {
|
||||
setup();
|
||||
|
||||
let mut selector_mut = SelectorMut::new();
|
||||
|
||||
let mut nums = Vec::new();
|
||||
let result = selector_mut
|
||||
.str_path(r#"$.store..price"#)
|
||||
.unwrap()
|
||||
.value(read_json("./benches/example.json"))
|
||||
.replace_with(&mut |v| {
|
||||
match v {
|
||||
Value::Number(n) => {
|
||||
nums.push(n.as_f64().unwrap());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Value::String("a".to_string())
|
||||
})
|
||||
.unwrap()
|
||||
.take()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
nums,
|
||||
vec![8.95_f64, 12.99_f64, 8.99_f64, 22.99_f64, 19.95_f64]
|
||||
);
|
||||
|
||||
let mut selector = Selector::new();
|
||||
let result = selector
|
||||
.str_path(r#"$.store..price"#)
|
||||
.unwrap()
|
||||
.value(&result)
|
||||
.select()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
vec![
|
||||
&json!("a"),
|
||||
&json!("a"),
|
||||
&json!("a"),
|
||||
&json!("a"),
|
||||
&json!("a")
|
||||
],
|
||||
result
|
||||
);
|
||||
}
|
376
tests/op.rs
Normal file
376
tests/op.rs
Normal file
@ -0,0 +1,376 @@
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{read_json, select_and_then_compare, setup};
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn op_object_eq() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(@.friends == @.friends)]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_object_ge() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?(@.id >= 2)]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_object_or_default() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?(@.id >= 2 || @.id == 1)]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" },
|
||||
{ "id" : 1, "name" : "Vincent Cannon" }
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_object_and_or() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?( (@.id >= 2 || @.id == 1) && @.id == 0)]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_result_type() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..friends[?(@.id == $.index)].id",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([0, 0]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_absolute_path_result_type() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..book[?($.store.bicycle.price < @.price)].price",
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([22.99]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_complicated() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..book[?( (@.price == 12.99 || @.category == 'reference') && @.price > 10)].price",
|
||||
read_json("./benchmark/example.json"),
|
||||
json!([12.99]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_gt() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.age > 40)]",
|
||||
json!([
|
||||
{ "name": "이름1", "age": 40, "phone": "+33 12341234" },
|
||||
{ "name": "이름2", "age": 42, "phone": "++44 12341234" }
|
||||
]),
|
||||
json!([
|
||||
{ "name" : "이름2", "age" : 42, "phone" : "++44 12341234" }
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_ge() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..[?(@.age >= 30)]",
|
||||
json!({
|
||||
"school": {
|
||||
"friends": [
|
||||
{"name": "친구1", "age": 20},
|
||||
{"name": "친구2", "age": 20}
|
||||
]
|
||||
},
|
||||
"friends": [
|
||||
{"name": "친구3", "age": 30},
|
||||
{"name": "친구4"}
|
||||
]}),
|
||||
json!([
|
||||
{ "name" : "친구3", "age" : 30 }
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_eq_for_number() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.[?(@.a == 1)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_ne_for_number() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.[?(@.a != 2)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_lt_for_number() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.[?(@.a < 2)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_le_for_number() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.[?(@.a <= 1)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_gt_for_number() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.[?(@.a > 0)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_ge_for_number() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare("$.[?(@.a >= 0)]", json!({ "a": 1 }), json!([{ "a": 1 }]));
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[test]
|
||||
fn op_eq_for_string_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a == "b")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_ne_for_string_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a != "c")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]),
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_lt_for_string_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a < "b")]"#, json!({ "a": "b" }), json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_le_for_string_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a <= "b")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_gt_for_string_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a > "b")]"#, json!({ "a": "b" }), json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_ge_for_string_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a >= "b")]"#, json!({ "a": "b" }), json!([{ "a": "b" }]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_eq_for_object_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a == @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([{"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_ne_for_object_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a != @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_lt_for_object_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a < @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_le_for_object_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a <= @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_gt_for_object_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a > @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_ge_for_object_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a >= @.c)]"#,
|
||||
json!({"a": { "1": 1 }, "b": { "2": 2 }, "c": { "1": 1 }}),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_eq_for_complex_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(1 == @.a)]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_ne_for_complex_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?("1" != @.a)]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_le_for_complex_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a <= 1)]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_gt_for_complex_value() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$.[?(@.a > "1")]"#,
|
||||
json!({ "a": { "b": 1 } }),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_compare_different_types() {
|
||||
setup();
|
||||
|
||||
for path in [
|
||||
r#"$[?("1" == 1)]"#,
|
||||
r#"$[?(1 == "1")]"#,
|
||||
r#"$[?(true == 1)]"#,
|
||||
r#"$[?(@ == 1)]"#,
|
||||
]
|
||||
.iter()
|
||||
{
|
||||
select_and_then_compare(path, json!({}), json!([]));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_for_same_type() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
r#"$..[?(@.a == 1)]"#,
|
||||
json!({
|
||||
"a": 1,
|
||||
"b" : {"a": 1},
|
||||
"c" : {"a": 1}
|
||||
}),
|
||||
json!([
|
||||
{"a": 1},
|
||||
{"a": 1}
|
||||
]),
|
||||
);
|
||||
}
|
115
tests/paths.rs
Normal file
115
tests/paths.rs
Normal file
@ -0,0 +1,115 @@
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{select_and_then_compare, setup};
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn dolla_token_in_path() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$..$ref",
|
||||
json!({
|
||||
"Junk1": "This is a test to illustrate use of '$' in the attr for the expression $..['$ref'] ",
|
||||
"$ref": "Match Root",
|
||||
"Subset1":[
|
||||
{"Junk2": "Data...",
|
||||
"$ref": "Match Subset1"
|
||||
}
|
||||
],
|
||||
"hierachy1":{
|
||||
"hierachy2.1":{
|
||||
"hierachy2.1.1":{ "$ref":"Match 2.1.1"},
|
||||
"hierachy2.1.2":{ "ref":"Match 2.1.2"},
|
||||
"hierachy2.1.3":{ "ref":"No Match 2.1.3"},
|
||||
"hierachy2.1.4":{ "$ref":"Match 2.1.4"},
|
||||
"hierachy2.1.5":{ "ref":"No Match 2.1.5"}
|
||||
},
|
||||
"hierachy2.2":{
|
||||
"hierachy2.2.1":{ "ref":"No Match 2.2.1"},
|
||||
"hierachy2.2.2":{ "$ref":"Match 2.2.2"},
|
||||
"hierachy2.2.3":{ "ref":"No Match 2.2.3"},
|
||||
"hierachy2.2.4":{ "ref":"No Match 2.2.5"},
|
||||
"hierachy2.2.5":{ "$ref":"Match 2.2.5"}
|
||||
},
|
||||
"hierachy2.3":{
|
||||
"hierachy2.3.1":{ "ref":"No Match 2.3.1"},
|
||||
"hierachy2.3.2":{ "ref":"No Match 2.3.2"},
|
||||
"hierachy2.3.3":{ "ref":"No Match 2.3.3"},
|
||||
"hierachy2.3.4":{ "ref":"No Match 2.3.4"},
|
||||
"hierachy2.3.5":{ "ref":"No Match 2.3.5"},
|
||||
"hierachy2.3.6":{
|
||||
"hierachy2.3.6.1":{ "$ref":"Match 2.3.6.1"},
|
||||
"hierachy2.3.6.2":{ "ref":"No Match 2.3.6.2"},
|
||||
"hierachy2.3.6.3":{ "ref":"No Match 2.3.6.3"},
|
||||
"hierachy2.3.6.4":{ "ref":"No Match 2.3.6.4"},
|
||||
"hierachy2.3.6.5":{ "ref":"No Match 2.3.6.5"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
"Match Root",
|
||||
"Match Subset1",
|
||||
"Match 2.1.1",
|
||||
"Match 2.1.4",
|
||||
"Match 2.2.2",
|
||||
"Match 2.2.5",
|
||||
"Match 2.3.6.1"
|
||||
]),
|
||||
);
|
||||
|
||||
select_and_then_compare(
|
||||
"$..['$ref']",
|
||||
json!({
|
||||
"Junk1": "This is a test to illustrate use of '$' in the attr for the expression $..['$ref'] ",
|
||||
"$ref": "Match Root",
|
||||
"Subset1":[
|
||||
{"Junk2": "Data...",
|
||||
"$ref": "Match Subset1"
|
||||
}
|
||||
],
|
||||
"hierachy1":{
|
||||
"hierachy2.1":{
|
||||
"hierachy2.1.1":{ "$ref":"Match 2.1.1"},
|
||||
"hierachy2.1.2":{ "ref":"Match 2.1.2"},
|
||||
"hierachy2.1.3":{ "ref":"No Match 2.1.3"},
|
||||
"hierachy2.1.4":{ "$ref":"Match 2.1.4"},
|
||||
"hierachy2.1.5":{ "ref":"No Match 2.1.5"}
|
||||
},
|
||||
"hierachy2.2":{
|
||||
"hierachy2.2.1":{ "ref":"No Match 2.2.1"},
|
||||
"hierachy2.2.2":{ "$ref":"Match 2.2.2"},
|
||||
"hierachy2.2.3":{ "ref":"No Match 2.2.3"},
|
||||
"hierachy2.2.4":{ "ref":"No Match 2.2.5"},
|
||||
"hierachy2.2.5":{ "$ref":"Match 2.2.5"}
|
||||
},
|
||||
"hierachy2.3":{
|
||||
"hierachy2.3.1":{ "ref":"No Match 2.3.1"},
|
||||
"hierachy2.3.2":{ "ref":"No Match 2.3.2"},
|
||||
"hierachy2.3.3":{ "ref":"No Match 2.3.3"},
|
||||
"hierachy2.3.4":{ "ref":"No Match 2.3.4"},
|
||||
"hierachy2.3.5":{ "ref":"No Match 2.3.5"},
|
||||
"hierachy2.3.6":{
|
||||
"hierachy2.3.6.1":{ "$ref":"Match 2.3.6.1"},
|
||||
"hierachy2.3.6.2":{ "ref":"No Match 2.3.6.2"},
|
||||
"hierachy2.3.6.3":{ "ref":"No Match 2.3.6.3"},
|
||||
"hierachy2.3.6.4":{ "ref":"No Match 2.3.6.4"},
|
||||
"hierachy2.3.6.5":{ "ref":"No Match 2.3.6.5"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
json!([
|
||||
"Match Root",
|
||||
"Match Subset1",
|
||||
"Match 2.1.1",
|
||||
"Match 2.1.4",
|
||||
"Match 2.2.2",
|
||||
"Match 2.2.5",
|
||||
"Match 2.3.6.1"
|
||||
]),
|
||||
);
|
||||
}
|
@ -173,7 +173,7 @@ fn readme_selector() {
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let mut selector = Selector::new();
|
||||
let mut selector = Selector::default();
|
||||
|
||||
let result = selector
|
||||
.str_path("$..[?(@.age >= 30)]")
|
||||
@ -211,7 +211,7 @@ fn readme_selector_mut() {
|
||||
{"name": "친구4"}
|
||||
]});
|
||||
|
||||
let mut selector_mut = SelectorMut::new();
|
||||
let mut selector_mut = SelectorMut::default();
|
||||
|
||||
let result = selector_mut
|
||||
.str_path("$..[?(@.age == 20)].age")
|
||||
@ -224,7 +224,7 @@ fn readme_selector_mut() {
|
||||
0
|
||||
};
|
||||
|
||||
json!(age)
|
||||
Some(json!(age))
|
||||
})
|
||||
.unwrap()
|
||||
.take()
|
||||
@ -482,12 +482,10 @@ fn readme_delete() {
|
||||
|
||||
#[test]
|
||||
fn readme_delete2() {
|
||||
let json_obj = common::read_json("./benches/example.json");
|
||||
let json_obj = common::read_json("./benchmark/example.json");
|
||||
|
||||
let ret = jsonpath::delete(json_obj, "$.store.book").unwrap();
|
||||
|
||||
println!("{:?}", ret);
|
||||
|
||||
assert_eq!(
|
||||
ret,
|
||||
json!({
|
||||
@ -524,7 +522,7 @@ fn readme_replace_with() {
|
||||
0
|
||||
};
|
||||
|
||||
json!(age)
|
||||
Some(json!(age))
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
|
108
tests/return_type.rs
Normal file
108
tests/return_type.rs
Normal file
@ -0,0 +1,108 @@
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{read_json, select_and_then_compare, setup};
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn return_type_for_single_object() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type_for_single_object_key_matched() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.friends[?(@.name)]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([
|
||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type_for_child_object_matched() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(@.friends[0])]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type_for_child_object_not_matched() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(@.friends[10])]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type_for_object_filter_true() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school[?(1==1)]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([{
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
}]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type_for_array_filter_true() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$.school.friends[?(1==1)]",
|
||||
read_json("./benchmark/data_obj.json"),
|
||||
json!([[
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type_empty() {
|
||||
setup();
|
||||
|
||||
select_and_then_compare(
|
||||
"$[?(@.key==43)]",
|
||||
json!([{"key": 42}]),
|
||||
json!([]),
|
||||
);
|
||||
}
|
131
tests/selector.rs
Normal file
131
tests/selector.rs
Normal file
@ -0,0 +1,131 @@
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use common::{read_json, setup};
|
||||
use jsonpath::{Parser, Selector, SelectorMut};
|
||||
use serde_json::Value;
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn selector_mut() {
|
||||
setup();
|
||||
|
||||
let mut selector_mut = SelectorMut::default();
|
||||
|
||||
let mut nums = Vec::new();
|
||||
let result = selector_mut
|
||||
.str_path(r#"$.store..price"#)
|
||||
.unwrap()
|
||||
.value(read_json("./benchmark/example.json"))
|
||||
.replace_with(&mut |v| {
|
||||
if let Value::Number(n) = v {
|
||||
nums.push(n.as_f64().unwrap());
|
||||
}
|
||||
Some(Value::String("a".to_string()))
|
||||
})
|
||||
.unwrap()
|
||||
.take()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
nums,
|
||||
vec![8.95_f64, 12.99_f64, 8.99_f64, 22.99_f64, 19.95_f64]
|
||||
);
|
||||
|
||||
let mut selector = Selector::default();
|
||||
let result = selector
|
||||
.str_path(r#"$.store..price"#)
|
||||
.unwrap()
|
||||
.value(&result)
|
||||
.select()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
vec![
|
||||
&json!("a"),
|
||||
&json!("a"),
|
||||
&json!("a"),
|
||||
&json!("a"),
|
||||
&json!("a")
|
||||
],
|
||||
result
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn selector_node_ref() {
|
||||
let node = Parser::compile("$.*").unwrap();
|
||||
let mut selector = Selector::default();
|
||||
selector.compiled_path(&node);
|
||||
assert!(std::ptr::eq(selector.node_ref().unwrap(), &node));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn selector_delete() {
|
||||
setup();
|
||||
|
||||
let mut selector_mut = SelectorMut::default();
|
||||
|
||||
let result = selector_mut
|
||||
.str_path(r#"$.store..price[?(@>13)]"#)
|
||||
.unwrap()
|
||||
.value(read_json("./benchmark/example.json"))
|
||||
.delete()
|
||||
.unwrap()
|
||||
.take()
|
||||
.unwrap();
|
||||
|
||||
let mut selector = Selector::default();
|
||||
let result = selector
|
||||
.str_path(r#"$.store..price"#)
|
||||
.unwrap()
|
||||
.value(&result)
|
||||
.select()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![
|
||||
&json!(8.95),
|
||||
&json!(12.99),
|
||||
&json!(8.99),
|
||||
&Value::Null,
|
||||
&Value::Null
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn selector_remove() {
|
||||
setup();
|
||||
|
||||
let mut selector_mut = SelectorMut::default();
|
||||
|
||||
let result = selector_mut
|
||||
.str_path(r#"$.store..price[?(@>13)]"#)
|
||||
.unwrap()
|
||||
.value(read_json("./benchmark/example.json"))
|
||||
.remove()
|
||||
.unwrap()
|
||||
.take()
|
||||
.unwrap();
|
||||
|
||||
let mut selector = Selector::default();
|
||||
let result = selector
|
||||
.str_path(r#"$.store..price"#)
|
||||
.unwrap()
|
||||
.value(&result)
|
||||
.select()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![
|
||||
&json!(8.95),
|
||||
&json!(12.99),
|
||||
&json!(8.99)
|
||||
]
|
||||
);
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "jsonpath-wasm"
|
||||
version = "0.2.2"
|
||||
version = "0.2.5"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
description = "It is Webassembly version of jsonpath_lib that is JsonPath engine written in Rust - Demo: https://freestrings.github.io/jsonpath"
|
||||
keywords = ["jsonpath", "json", "webassembly", "parsing", "rust"]
|
||||
@ -14,10 +14,10 @@ crate-type = ["cdylib", "rlib"]
|
||||
default = ["console_error_panic_hook", "wee_alloc"]
|
||||
|
||||
[dependencies]
|
||||
cfg-if = "0.1.2"
|
||||
cfg-if = "0.1"
|
||||
wasm-bindgen = { version = "0.2", features = ["serde-serialize"] }
|
||||
console_error_panic_hook = { version = "0.1.1", optional = true }
|
||||
wee_alloc = { version = "0.4.2", optional = true }
|
||||
console_error_panic_hook = { version = "0.1", optional = true }
|
||||
wee_alloc = { version = "0.4", optional = true }
|
||||
|
||||
jsonpath_lib = { path = "../" }
|
||||
serde = "1.0"
|
||||
|
@ -1,6 +1,5 @@
|
||||
import * as jsonpath from "jsonpath-wasm";
|
||||
|
||||
|
||||
let jsonObj = {
|
||||
"school": {
|
||||
"friends": [
|
||||
@ -19,14 +18,21 @@ let ret = [
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let selector = new jsonpath.Selector();
|
||||
selector.path('$..friends[0]');
|
||||
selector.value(jsonObj);
|
||||
const path = '$..friends[0]';
|
||||
|
||||
let selectToObj = selector.selectTo();
|
||||
let selectToString = selector.selectToStr();
|
||||
let ret1 = jsonpath.select(jsonObj, path);
|
||||
let ret2 = jsonpath.compile(path)(jsonObj);
|
||||
let ret3 = jsonpath.selector(jsonObj)(path);
|
||||
|
||||
let selector = new jsonpath.Selector();
|
||||
selector.path(path);
|
||||
selector.value(jsonObj);
|
||||
let ret4 = selector.select();
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret) == JSON.stringify(selectToObj),
|
||||
JSON.stringify(ret) == selectToString
|
||||
JSON.stringify(ret) == JSON.stringify(ret1),
|
||||
JSON.stringify(ret) == JSON.stringify(ret2),
|
||||
JSON.stringify(ret) == JSON.stringify(ret3),
|
||||
JSON.stringify(ret) == JSON.stringify(ret4)
|
||||
);
|
||||
|
@ -7,9 +7,12 @@
|
||||
"start": "webpack-dev-server"
|
||||
},
|
||||
"devDependencies": {
|
||||
"copy-webpack-plugin": "^5.0.1",
|
||||
"copy-webpack-plugin": "^5.1.1",
|
||||
"webpack": "^4.29.6",
|
||||
"webpack-cli": "^3.3.0",
|
||||
"webpack-dev-server": "^3.2.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"jsonpath-wasm": "^0.2.4"
|
||||
}
|
||||
}
|
@ -18,14 +18,20 @@ let ret = [
|
||||
{"name": "친구1", "age": 20}
|
||||
];
|
||||
|
||||
let selector = new jsonpath.Selector();
|
||||
selector.path('$..friends[0]');
|
||||
selector.value(jsonObj);
|
||||
const path = '$..friends[0]';
|
||||
|
||||
let selectToObj = selector.selectTo();
|
||||
let selectToString = selector.selectToStr();
|
||||
let ret1 = jsonpath.select(jsonObj, path);
|
||||
let ret2 = jsonpath.compile(path)(jsonObj);
|
||||
let ret3 = jsonpath.selector(jsonObj)(path);
|
||||
|
||||
let selector = new jsonpath.Selector();
|
||||
selector.path(path);
|
||||
selector.value(jsonObj);
|
||||
let ret4 = selector.select();
|
||||
|
||||
console.log(
|
||||
JSON.stringify(ret) == JSON.stringify(selectToObj),
|
||||
JSON.stringify(ret) == selectToString
|
||||
);
|
||||
JSON.stringify(ret) == JSON.stringify(ret1),
|
||||
JSON.stringify(ret) == JSON.stringify(ret2),
|
||||
JSON.stringify(ret) == JSON.stringify(ret3),
|
||||
JSON.stringify(ret) == JSON.stringify(ret4)
|
||||
);
|
@ -4,6 +4,6 @@
|
||||
"start": "node index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"jsonpath-wasm": "0"
|
||||
"jsonpath-wasm": "^0.2.4"
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user