mirror of
https://github.com/fluencelabs/jsonpath
synced 2025-06-18 02:21:30 +00:00
NodeJs native binding 추가
This commit is contained in:
14
.idea/runConfigurations/_trace__filter__array.xml
generated
14
.idea/runConfigurations/_trace__filter__array.xml
generated
@ -1,14 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="[trace] filter::array" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib filter::tests::array" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs>
|
||||
<env name="RUST_LOG" value="trace" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
14
.idea/runConfigurations/_trace__filter__example.xml
generated
14
.idea/runConfigurations/_trace__filter__example.xml
generated
@ -1,14 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="[trace] filter::example" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib filter::tests::example" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs>
|
||||
<env name="RUST_LOG" value="trace" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
14
.idea/runConfigurations/_trace__filter__op.xml
generated
14
.idea/runConfigurations/_trace__filter__op.xml
generated
@ -1,14 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="[trace] filter::op" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib filter::tests::op" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs>
|
||||
<env name="RUST_LOG" value="trace" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
@ -1,14 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="[trace] filter::return_type" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib filter::tests::return_type" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs>
|
||||
<env name="RUST_LOG" value="trace" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
14
.idea/runConfigurations/_trace__filter__step_in.xml
generated
14
.idea/runConfigurations/_trace__filter__step_in.xml
generated
@ -1,14 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="[trace] filter::step_in" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib filter::tests::step_in" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs>
|
||||
<env name="RUST_LOG" value="trace" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
@ -1,14 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="[trace] parser::parse_array_float" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib parser::parser::tests::parse_array_float" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs>
|
||||
<env name="RUST_LOG" value="trace" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
@ -1,14 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="[trace] parser::parse_array_sytax" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib parser::parser::tests::parse_array_sytax" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs>
|
||||
<env name="RUST_LOG" value="trace" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
@ -1,14 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="[trace] parser::parse_path" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib parser::parser::tests::parse_path" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs>
|
||||
<env name="RUST_LOG" value="trace" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
6
.idea/runConfigurations/all.xml
generated
6
.idea/runConfigurations/all.xml
generated
@ -1,10 +1,10 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="all" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib --all" />
|
||||
<option name="command" value="test --package jsonpath_lib" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
|
12
.idea/runConfigurations/filter.xml
generated
Normal file
12
.idea/runConfigurations/filter.xml
generated
Normal file
@ -0,0 +1,12 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="filter" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test filter """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/filter_all.xml
generated
12
.idea/runConfigurations/filter_all.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="filter-all" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib filter::tests" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/lib.xml
generated
Normal file
12
.idea/runConfigurations/lib.xml
generated
Normal file
@ -0,0 +1,12 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="lib" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test lib """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/parser.xml
generated
Normal file
12
.idea/runConfigurations/parser.xml
generated
Normal file
@ -0,0 +1,12 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="parser" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test parser """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/parser_all.xml
generated
12
.idea/runConfigurations/parser_all.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="parser-all" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib parser::parser" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/tokenize_all.xml
generated
12
.idea/runConfigurations/tokenize_all.xml
generated
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="tokenize-all" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --lib parser::tokenizer" />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="true" />
|
||||
<option name="backtrace" value="NO" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
12
.idea/runConfigurations/tokenizer.xml
generated
Normal file
12
.idea/runConfigurations/tokenizer.xml
generated
Normal file
@ -0,0 +1,12 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="tokenizer" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="channel" value="DEFAULT" />
|
||||
<option name="command" value="test --package jsonpath_lib --test tokenizer """ />
|
||||
<option name="allFeatures" value="false" />
|
||||
<option name="nocapture" value="false" />
|
||||
<option name="backtrace" value="SHORT" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
80
.travis.yml
80
.travis.yml
@ -13,9 +13,6 @@ matrix:
|
||||
|
||||
- rust: stable
|
||||
env: RUST_BACKTRACE=1
|
||||
# addons:
|
||||
# firefox: latest
|
||||
# chrome: stable
|
||||
before_script:
|
||||
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
|
||||
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
|
||||
@ -26,64 +23,21 @@ matrix:
|
||||
- cargo test --verbose --all
|
||||
- cd wasm
|
||||
- wasm-pack build
|
||||
# - wasm-pack test --chrome --firefox --headless
|
||||
|
||||
# Builds with wasm-pack.
|
||||
- rust: beta
|
||||
env: RUST_BACKTRACE=1
|
||||
# addons:
|
||||
# firefox: latest
|
||||
# chrome: stable
|
||||
before_script:
|
||||
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
|
||||
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
|
||||
- cargo install-update -a
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f
|
||||
- language: node_js
|
||||
node_js:
|
||||
- 'node'
|
||||
- '11'
|
||||
- '10'
|
||||
- '9'
|
||||
- '8'
|
||||
before_install:
|
||||
- curl https://sh.rustup.rs -sSf > /tmp/rustup.sh
|
||||
- sh /tmp/rustup.sh -y
|
||||
- export PATH="$HOME/.cargo/bin:$PATH"
|
||||
- source "$HOME/.cargo/env"
|
||||
- cd nodejs
|
||||
- node -v
|
||||
- npm -v
|
||||
- npm install
|
||||
script:
|
||||
- cargo build --verbose --all
|
||||
- cargo test --verbose --all
|
||||
- cd wasm
|
||||
- wasm-pack build
|
||||
# - wasm-pack test --chrome --firefox --headless
|
||||
|
||||
# Builds on nightly.
|
||||
- rust: nightly
|
||||
env: RUST_BACKTRACE=1
|
||||
before_script:
|
||||
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
|
||||
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
|
||||
- cargo install-update -a
|
||||
- rustup target add wasm32-unknown-unknown
|
||||
script:
|
||||
- cargo build --verbose --all
|
||||
- cargo test --verbose --all
|
||||
- cd wasm
|
||||
- cargo check
|
||||
- cargo check --target wasm32-unknown-unknown
|
||||
- cargo check --no-default-features
|
||||
- cargo check --target wasm32-unknown-unknown --no-default-features
|
||||
- cargo check --no-default-features --features console_error_panic_hook
|
||||
- cargo check --target wasm32-unknown-unknown --no-default-features --features console_error_panic_hook
|
||||
- cargo check --no-default-features --features "console_error_panic_hook wee_alloc"
|
||||
- cargo check --target wasm32-unknown-unknown --no-default-features --features "console_error_panic_hook wee_alloc"
|
||||
|
||||
# Builds on beta.
|
||||
- rust: beta
|
||||
env: RUST_BACKTRACE=1
|
||||
before_script:
|
||||
- (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update)
|
||||
- (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate)
|
||||
- cargo install-update -a
|
||||
- rustup target add wasm32-unknown-unknown
|
||||
script:
|
||||
- cargo build --verbose --all
|
||||
- cargo test --verbose --all
|
||||
- cd wasm
|
||||
- cargo check
|
||||
- cargo check --target wasm32-unknown-unknown
|
||||
- cargo check --no-default-features
|
||||
- cargo check --target wasm32-unknown-unknown --no-default-features
|
||||
- cargo check --no-default-features --features console_error_panic_hook
|
||||
- cargo check --target wasm32-unknown-unknown --no-default-features --features console_error_panic_hook
|
||||
# Note: no enabling the `wee_alloc` feature here because it requires
|
||||
# nightly for now.
|
||||
- npm test
|
@ -30,5 +30,5 @@ name = "jsonpath_lib"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[profile.release]
|
||||
debug = true
|
||||
lto = false
|
||||
#debug = true
|
||||
#lto = false
|
||||
|
@ -15,13 +15,39 @@ fn read_json(path: &str) -> String {
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_reader(b: &mut Bencher) {
|
||||
fn bench_selector(b: &mut Bencher) {
|
||||
let string = read_json("./benches/example.json");
|
||||
let path = r#"$..book[?(@.price<30 && @.category=="fiction")]"#;
|
||||
let json: Value = serde_json::from_str(string.as_str()).unwrap();
|
||||
let mut reader = jsonpath::reader(json);
|
||||
let mut selector = jsonpath::selector(&json);
|
||||
b.iter(move || {
|
||||
for _ in 1..10000 {
|
||||
let _ = reader("$.store").unwrap();
|
||||
for _ in 1..1000 {
|
||||
let _ = selector(path).unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_select(b: &mut Bencher) {
|
||||
let string = read_json("./benches/example.json");
|
||||
let path = r#"$..book[?(@.price<30 && @.category=="fiction")]"#;
|
||||
let json: Value = serde_json::from_str(string.as_str()).unwrap();
|
||||
b.iter(move || {
|
||||
for _ in 1..1000 {
|
||||
let _ = jsonpath::select(&json, path).unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_compile(b: &mut Bencher) {
|
||||
let string = read_json("./benches/example.json");
|
||||
let path = r#"$..book[?(@.price<30 && @.category=="fiction")]"#;
|
||||
let json: Value = serde_json::from_str(string.as_str()).unwrap();
|
||||
let mut template = jsonpath::compile(path);
|
||||
b.iter(move || {
|
||||
for _ in 1..1000 {
|
||||
let _ = template(&json).unwrap();
|
||||
}
|
||||
});
|
||||
}
|
1
benches/bench_bin/.gitignore
vendored
1
benches/bench_bin/.gitignore
vendored
@ -2,3 +2,4 @@
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
bin/
|
||||
.idea
|
15
benches/bench_bin/.idea/bench_bin.iml
generated
Normal file
15
benches/bench_bin/.idea/bench_bin.iml
generated
Normal file
@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/examples" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/benches" isTestSource="true" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
4
benches/bench_bin/.idea/encodings.xml
generated
Normal file
4
benches/bench_bin/.idea/encodings.xml
generated
Normal file
@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Encoding" addBOMForNewFiles="with NO BOM" />
|
||||
</project>
|
9
benches/bench_bin/.idea/misc.xml
generated
Normal file
9
benches/bench_bin/.idea/misc.xml
generated
Normal file
@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="CargoProjects">
|
||||
<cargoProject FILE="$PROJECT_DIR$/Cargo.toml" />
|
||||
</component>
|
||||
<component name="RustProjectSettings">
|
||||
<option name="toolchainHomeDirectory" value="$USER_HOME$/.cargo/bin" />
|
||||
</component>
|
||||
</project>
|
8
benches/bench_bin/.idea/modules.xml
generated
Normal file
8
benches/bench_bin/.idea/modules.xml
generated
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/bench_bin.iml" filepath="$PROJECT_DIR$/.idea/bench_bin.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
6
benches/bench_bin/.idea/vcs.xml
generated
Normal file
6
benches/bench_bin/.idea/vcs.xml
generated
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
|
||||
</component>
|
||||
</project>
|
152
benches/bench_bin/.idea/workspace.xml
generated
Normal file
152
benches/bench_bin/.idea/workspace.xml
generated
Normal file
@ -0,0 +1,152 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="3fa6f740-0ee1-4afb-b0ae-9239bf5ced3d" name="Default Changelist" comment="">
|
||||
<change beforePath="$PROJECT_DIR$/../bench.rs" beforeDir="false" afterPath="$PROJECT_DIR$/../bench.rs" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/src/main.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/main.rs" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/../bench_node_vs_rust.sh" beforeDir="false" afterPath="$PROJECT_DIR$/../bench_node_vs_rust.sh" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/../javascript/bench.js" beforeDir="false" afterPath="$PROJECT_DIR$/../javascript/bench.js" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/../../nodejs/lib/index.js" beforeDir="false" afterPath="$PROJECT_DIR$/../../nodejs/lib/index.js" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/../../nodejs/native/src/lib.rs" beforeDir="false" afterPath="$PROJECT_DIR$/../../nodejs/native/src/lib.rs" afterDir="false" />
|
||||
</list>
|
||||
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
||||
<option name="LAST_RESOLUTION" value="IGNORE" />
|
||||
</component>
|
||||
<component name="FileEditorManager">
|
||||
<leaf SIDE_TABS_SIZE_LIMIT_KEY="300" />
|
||||
</component>
|
||||
<component name="Git.Settings">
|
||||
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$/../.." />
|
||||
</component>
|
||||
<component name="IdeDocumentHistory">
|
||||
<option name="CHANGED_PATHS">
|
||||
<list>
|
||||
<option value="$PROJECT_DIR$/../../src/lib.rs" />
|
||||
<option value="$PROJECT_DIR$/src/main.rs" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="ProjectFrameBounds" extendedState="6">
|
||||
<option name="x" value="67" />
|
||||
<option name="y" value="27" />
|
||||
<option name="width" value="1533" />
|
||||
<option name="height" value="1053" />
|
||||
</component>
|
||||
<component name="ProjectLevelVcsManager" settingsEditedManually="true" />
|
||||
<component name="ProjectView">
|
||||
<navigator proportions="" version="1">
|
||||
<foldersAlwaysOnTop value="true" />
|
||||
</navigator>
|
||||
<panes>
|
||||
<pane id="Scope" />
|
||||
<pane id="PackagesPane" />
|
||||
<pane id="ProjectPane">
|
||||
<subPane>
|
||||
<expand>
|
||||
<path>
|
||||
<item name="bench_bin" type="b2602c69:ProjectViewProjectNode" />
|
||||
<item name="bench_bin" type="462c0819:PsiDirectoryNode" />
|
||||
</path>
|
||||
<path>
|
||||
<item name="bench_bin" type="b2602c69:ProjectViewProjectNode" />
|
||||
<item name="bench_bin" type="462c0819:PsiDirectoryNode" />
|
||||
<item name="src" type="462c0819:PsiDirectoryNode" />
|
||||
</path>
|
||||
</expand>
|
||||
<select />
|
||||
</subPane>
|
||||
</pane>
|
||||
</panes>
|
||||
</component>
|
||||
<component name="PropertiesComponent">
|
||||
<property name="last_opened_file_path" value="$PROJECT_DIR$" />
|
||||
<property name="org.rust.cargo.project.model.PROJECT_DISCOVERY" value="true" />
|
||||
</component>
|
||||
<component name="RunDashboard">
|
||||
<option name="ruleStates">
|
||||
<list>
|
||||
<RuleState>
|
||||
<option name="name" value="ConfigurationTypeDashboardGroupingRule" />
|
||||
</RuleState>
|
||||
<RuleState>
|
||||
<option name="name" value="StatusDashboardGroupingRule" />
|
||||
</RuleState>
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="SvnConfiguration">
|
||||
<configuration />
|
||||
</component>
|
||||
<component name="TaskManager">
|
||||
<task active="true" id="Default" summary="Default task">
|
||||
<changelist id="3fa6f740-0ee1-4afb-b0ae-9239bf5ced3d" name="Default Changelist" comment="" />
|
||||
<created>1552690262696</created>
|
||||
<option name="number" value="Default" />
|
||||
<option name="presentableId" value="Default" />
|
||||
<updated>1552690262696</updated>
|
||||
</task>
|
||||
<servers />
|
||||
</component>
|
||||
<component name="ToolWindowManager">
|
||||
<frame x="67" y="25" width="1853" height="1055" extended-state="6" />
|
||||
<layout>
|
||||
<window_info active="true" content_ui="combo" id="Project" order="0" visible="true" weight="0.23076923" />
|
||||
<window_info id="Structure" order="1" side_tool="true" weight="0.25" />
|
||||
<window_info id="Designer" order="2" />
|
||||
<window_info id="Favorites" order="3" side_tool="true" />
|
||||
<window_info anchor="bottom" id="Message" order="0" />
|
||||
<window_info anchor="bottom" id="Find" order="1" />
|
||||
<window_info anchor="bottom" id="Run" order="2" weight="0.32829374" />
|
||||
<window_info anchor="bottom" id="Debug" order="3" weight="0.4" />
|
||||
<window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
|
||||
<window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
|
||||
<window_info anchor="bottom" id="TODO" order="6" />
|
||||
<window_info anchor="bottom" id="Version Control" order="7" />
|
||||
<window_info anchor="bottom" id="Terminal" order="8" />
|
||||
<window_info anchor="bottom" id="Event Log" order="9" side_tool="true" />
|
||||
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="0" type="SLIDING" weight="0.4" />
|
||||
<window_info anchor="right" id="Ant Build" order="1" weight="0.25" />
|
||||
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="2" weight="0.25" />
|
||||
<window_info anchor="right" id="Maven" order="3" />
|
||||
<window_info anchor="right" id="Cargo" order="4" />
|
||||
<window_info anchor="right" id="Palette	" order="5" />
|
||||
</layout>
|
||||
</component>
|
||||
<component name="editorHistoryManager">
|
||||
<entry file="file://$PROJECT_DIR$/../../src/lib.rs">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="353">
|
||||
<caret line="297" column="1" lean-forward="true" selection-start-line="297" selection-start-column="1" selection-end-line="297" selection-end-column="1" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$USER_HOME$/.cargo/registry/src/github.com-1ecc6299db9ec823/serde_json-1.0.39/src/de.rs">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="279">
|
||||
<caret line="2311" column="47" selection-start-line="2311" selection-start-column="47" selection-end-line="2311" selection-end-column="47" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$USER_HOME$/.rustup/toolchains/nightly-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/src/libcore/str/mod.rs">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="197">
|
||||
<caret line="3895" column="11" selection-start-line="3895" selection-start-column="11" selection-end-line="3895" selection-end-column="11" />
|
||||
<folding>
|
||||
<element signature="e#126082#126083#0" expanded="true" />
|
||||
<element signature="e#126120#126121#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$PROJECT_DIR$/src/main.rs">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="-680">
|
||||
<caret line="6" column="13" selection-start-line="6" selection-start-column="13" selection-end-line="6" selection-end-column="13" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</component>
|
||||
</project>
|
@ -2,7 +2,7 @@
|
||||
set -e
|
||||
|
||||
if [ -d "target/release" ]; then
|
||||
./target/release/bench_bin
|
||||
./target/release/bench_bin $1 $2
|
||||
else
|
||||
echo "빌드먼저"
|
||||
fi
|
@ -4,6 +4,8 @@ extern crate serde_json;
|
||||
use serde_json::Value;
|
||||
use std::io::Read;
|
||||
|
||||
use std::env;
|
||||
|
||||
fn read_json(path: &str) -> String {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
@ -14,8 +16,33 @@ fn read_json(path: &str) -> String {
|
||||
fn main() {
|
||||
let string = read_json("../example.json");
|
||||
let json: Value = serde_json::from_str(string.as_str()).unwrap();
|
||||
let mut selector = jsonpath::selector(json);
|
||||
for _ in 1..100000 {
|
||||
let _ = selector(r#"$..book[?(@.price<30 && @.category=="fiction")]"#).unwrap();
|
||||
let path = r#"$..book[?(@.price<30 && @.category=="fiction")]"#;
|
||||
|
||||
let args: Vec<String> = env::args().collect();
|
||||
let iter = match &args[2].as_str().parse::<usize>() {
|
||||
Ok(iter) => *iter,
|
||||
_ => 100000
|
||||
};
|
||||
|
||||
match &args[1].as_str() {
|
||||
&"compile" => {
|
||||
let mut template = jsonpath::compile(path);
|
||||
for _ in 1..iter {
|
||||
let _ = template(&json).unwrap();
|
||||
}
|
||||
}
|
||||
&"selector" => {
|
||||
let mut selector = jsonpath::selector(&json);
|
||||
for _ in 1..iter {
|
||||
let _ = selector(path).unwrap();
|
||||
}
|
||||
}
|
||||
&"select" => {
|
||||
let json: Value = serde_json::from_str(string.as_str()).unwrap();
|
||||
for _ in 1..iter {
|
||||
let _ = jsonpath::select(&json, path).unwrap();
|
||||
}
|
||||
}
|
||||
_ => panic!("Invalid argument")
|
||||
}
|
||||
}
|
||||
|
@ -5,19 +5,42 @@ DIR="$(pwd)"
|
||||
|
||||
cd "${DIR}"/bench_bin && cargo build --release
|
||||
|
||||
printf "\n\n$..book[?(@.price<30 && @.category=="fiction")] (loop 100,000)"
|
||||
ITER=100000
|
||||
|
||||
printf "\n\n$..book[?(@.price<30 && @.category=="fiction")] (loop ${ITER})"
|
||||
printf "\n\n"
|
||||
|
||||
echo "Rust: " && time ./bench.sh
|
||||
#echo "Rust - compile: " && time ./bench.sh compile ${ITER}
|
||||
#printf "\n"
|
||||
#sleep 1
|
||||
#echo "Rust - selector: " && time ./bench.sh selector ${ITER}
|
||||
#printf "\n"
|
||||
#sleep 1
|
||||
echo "Rust - select: " && time ./bench.sh select ${ITER}
|
||||
printf "\n"
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath module: " && time ./bench.sh jsonpath
|
||||
sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath: " && time ./bench.sh jsonpath ${ITER}
|
||||
printf "\n"
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm module - selector: " && time ./bench.sh wasmSelector
|
||||
sleep 1
|
||||
#cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm - selector: " && time ./bench.sh wasmSelector ${ITER}
|
||||
#printf "\n"
|
||||
#sleep 1
|
||||
#cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm - compile: " && time ./bench.sh wasmCompile ${ITER}
|
||||
#printf "\n"
|
||||
#sleep 1
|
||||
#cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm - compile-alloc: " && time ./bench.sh wasmCompileAlloc ${ITER}
|
||||
#printf "\n"
|
||||
#sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm - select:" && time ./bench.sh wasmSelect ${ITER}
|
||||
printf "\n"
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm module - compile: " && time ./bench.sh wasmCompile
|
||||
printf "\n"
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm module - compile-alloc: " && time ./bench.sh wasmCompileAlloc
|
||||
printf "\n"
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm module - select:" && time ./bench.sh wasmSelect
|
||||
printf "\n"
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm module - select-alloc:" && time ./bench.sh wasmSelectAlloc
|
||||
sleep 1
|
||||
#cd "${DIR}"/javascript && echo "NodeJs - jsonpath-wasm - select-alloc:" && time ./bench.sh wasmSelectAlloc ${ITER}
|
||||
#printf "\n"
|
||||
#sleep 1
|
||||
#cd "${DIR}"/javascript && echo "NodeJs - jsonpath-rs - compile:" && time ./bench.sh nativeCompile ${ITER}
|
||||
#printf "\n"
|
||||
#sleep 1
|
||||
#cd "${DIR}"/javascript && echo "NodeJs - jsonpath-rs - selector:" && time ./bench.sh nativeSelector ${ITER}
|
||||
#printf "\n"
|
||||
#sleep 1
|
||||
cd "${DIR}"/javascript && echo "NodeJs - jsonpath-rs - select:" && time ./bench.sh nativeSelect ${ITER}
|
@ -35,40 +35,65 @@ let json = {
|
||||
},
|
||||
'expensive': 10,
|
||||
};
|
||||
let jsonStr = JSON.stringify(json);
|
||||
|
||||
function getJson() {
|
||||
return JSON.parse(jsonStr);
|
||||
}
|
||||
const path = '$..book[?(@.price<30 && @.category=="fiction")]';
|
||||
const jp = require('jsonpath');
|
||||
const jpw = require('@nodejs/jsonpath-wasm');
|
||||
const iter = 100000;
|
||||
const jpwRs = require('jsonpath-rs');
|
||||
|
||||
function jsonpath() {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = jp.query(json, '$..book[?(@.price<30 && @.category=="fiction")]');
|
||||
let _ = jp.query(getJson(), path);
|
||||
}
|
||||
}
|
||||
|
||||
function nativeCompile() {
|
||||
let template = jpwRs.compile(path);
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = template(JSON.stringify(json));
|
||||
}
|
||||
}
|
||||
|
||||
function nativeSelector() {
|
||||
let selector = jpwRs.selector(getJson());
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = selector(path);
|
||||
}
|
||||
}
|
||||
|
||||
function nativeSelect() {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = jpwRs.select(JSON.stringify(json), path);
|
||||
}
|
||||
}
|
||||
|
||||
function wasmSelector() {
|
||||
let selector = jpw.selector(json);
|
||||
let selector = jpw.selector(getJson());
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = selector('$..book[?(@.price<30 && @.category=="fiction")]');
|
||||
let _ = selector(path);
|
||||
}
|
||||
}
|
||||
|
||||
function wasmCompile() {
|
||||
let template = jpw.compile('$..book[?(@.price<30 && @.category=="fiction")]');
|
||||
let template = jpw.compile(path);
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = template(json);
|
||||
let _ = template(getJson());
|
||||
}
|
||||
}
|
||||
|
||||
function wasmCompileAlloc() {
|
||||
let ptr = jpw.alloc_json(json);
|
||||
let ptr = jpw.alloc_json(getJson());
|
||||
if (ptr == 0) {
|
||||
console.error('Invalid pointer');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let template = jpw.compile('$..book[?(@.price<30 && @.category=="fiction")]');
|
||||
let template = jpw.compile(path);
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = template(ptr);
|
||||
}
|
||||
@ -79,12 +104,12 @@ function wasmCompileAlloc() {
|
||||
|
||||
function wasmSelect() {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = jpw.select(json, '$..book[?(@.price<30 && @.category=="fiction")]');
|
||||
let _ = jpw.select(getJson(), path);
|
||||
}
|
||||
}
|
||||
|
||||
function wasmSelectAlloc() {
|
||||
let ptr = jpw.alloc_json(json);
|
||||
let ptr = jpw.alloc_json(getJson());
|
||||
if (ptr == 0) {
|
||||
console.error('Invalid pointer');
|
||||
return;
|
||||
@ -92,33 +117,13 @@ function wasmSelectAlloc() {
|
||||
|
||||
try {
|
||||
for (var i = 0; i < iter; i++) {
|
||||
let _ = jpw.select(ptr, '$..book[?(@.price<30 && @.category=="fiction")]');
|
||||
let _ = jpw.select(ptr, path);
|
||||
}
|
||||
} finally {
|
||||
jpw.dealloc_json(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
let functionName = process.argv[2];
|
||||
|
||||
switch (functionName) {
|
||||
case 'jsonpath':
|
||||
jsonpath();
|
||||
break;
|
||||
case 'wasmSelector':
|
||||
wasmSelector();
|
||||
break;
|
||||
case 'wasmCompile':
|
||||
wasmCompile();
|
||||
break;
|
||||
case 'wasmSelect':
|
||||
wasmSelect();
|
||||
break;
|
||||
case 'wasmCompileAlloc':
|
||||
wasmCompileAlloc();
|
||||
break;
|
||||
case 'wasmSelectAlloc':
|
||||
wasmSelectAlloc();
|
||||
default:
|
||||
console.error('Invalid function name');
|
||||
}
|
||||
const functionName = process.argv[2];
|
||||
const iter = parseInt(process.argv[3], 10);
|
||||
eval(functionName + "()");
|
@ -1,2 +1,2 @@
|
||||
#!/bin/bash
|
||||
node bench.js $1
|
||||
node bench.js $1 $2
|
||||
|
88
build.sh
Executable file
88
build.sh
Executable file
@ -0,0 +1,88 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# project_root
|
||||
DIR="$(pwd)"
|
||||
WASM="${DIR}"/wasm
|
||||
WASM_WWW="${WASM}"/www
|
||||
WASM_WWW_BENCH="${WASM}"/www_bench
|
||||
WASM_BROWSER_PKG="${WASM}"/browser_pkg
|
||||
WASM_NODEJS_PKG="${WASM}"/nodejs_pkg
|
||||
BENCHES="${DIR}"/benches
|
||||
BENCHES_JS="${BENCHES}"/javascript
|
||||
NODEJS="${DIR}"/nodejs
|
||||
DOCS="${DIR}"/docs
|
||||
DOCS_BENCH="${DOCS}"/bench
|
||||
|
||||
__msg () {
|
||||
echo ">>>>>>>>>>$1<<<<<<<<<<"
|
||||
}
|
||||
|
||||
__cargo_clean () {
|
||||
cd "${BENCHES}"/bench_bin && cargo clean && \
|
||||
cd "${NODEJS}"/native && cargo clean && \
|
||||
cd "${WASM}" && cargo clean && \
|
||||
cd "${DIR}" && cargo clean
|
||||
}
|
||||
|
||||
echo
|
||||
__msg "clean"
|
||||
rm -rf \
|
||||
"${WASM_NODEJS_PKG}" \
|
||||
"${WASM_BROWSER_PKG}" \
|
||||
"${BENCHES_JS}"/node_modules \
|
||||
"${NODEJS}"/node_modules \
|
||||
"${WASM_WWW}"/node_modules \
|
||||
"${WASM_WWW_BENCH}"/node_modules \
|
||||
"${WASM_WWW}"/dist \
|
||||
"${WASM_WWW_BENCH}"/dist
|
||||
|
||||
if [ "$1" = "all" ]; then
|
||||
__msg "clean targets"
|
||||
__cargo_clean
|
||||
fi
|
||||
|
||||
__msg "npm install"
|
||||
echo
|
||||
cd "${WASM_WWW}" && npm install
|
||||
cd "${WASM_WWW_BENCH}" && npm install
|
||||
cd "${NODEJS}" && npm install
|
||||
cd "${BENCHES_JS}" && npm install
|
||||
|
||||
echo
|
||||
echo
|
||||
__msg "wasm-pack"
|
||||
cd "${WASM}" && \
|
||||
wasm-pack build --target=nodejs --scope nodejs --out-dir nodejs_pkg && \
|
||||
cd "${WASM_NODEJS_PKG}" && npm link
|
||||
|
||||
cd "${WASM}" && \
|
||||
wasm-pack build --target=browser --scope browser --out-dir browser_pkg && \
|
||||
cd "${WASM_BROWSER_PKG}" && npm link
|
||||
|
||||
echo
|
||||
__msg "link"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm link @browser/jsonpath-wasm
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm link @browser/jsonpath-wasm
|
||||
|
||||
cd "${BENCHES_JS}" && \
|
||||
npm link @nodejs/jsonpath-wasm && \
|
||||
npm link jsonpath-rs
|
||||
|
||||
echo
|
||||
__msg "docs"
|
||||
cd "${WASM_WWW}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS}"/*.js "${DOCS}"/*.wasm "${DOCS}"/*.html && \
|
||||
cp "${WASM_WWW}"/dist/*.* "${DOCS}"/
|
||||
|
||||
cd "${WASM_WWW_BENCH}" && \
|
||||
npm run build &&
|
||||
rm -f "${DOCS_BENCH}"/*.js "${DOCS_BENCH}"/*.wasm "${DOCS_BENCH}"/*.html && \
|
||||
cp "${WASM_WWW_BENCH}"/dist/*.* "${DOCS_BENCH}"/
|
||||
|
||||
__msg "done"
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
14
docs/bench/bootstrap.js
vendored
14
docs/bench/bootstrap.js
vendored
@ -61,9 +61,6 @@
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
@ -85,14 +82,17 @@
|
||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_throw": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper99": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper99"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper101": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper101"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper103": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper103"](p0i32,p1i32,p2i32);
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ };
|
||||
@ -192,7 +192,7 @@
|
||||
/******/ promises.push(installedWasmModuleData);
|
||||
/******/ else {
|
||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../browser_pkg/jsonpath_wasm_bg.wasm":"9a826648f4cbc2bc8591"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../browser_pkg/jsonpath_wasm_bg.wasm":"db8564aae9d99ec41b79"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var promise;
|
||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||
|
BIN
docs/bench/db8564aae9d99ec41b79.module.wasm
Normal file
BIN
docs/bench/db8564aae9d99ec41b79.module.wasm
Normal file
Binary file not shown.
14
docs/bootstrap.js
vendored
14
docs/bootstrap.js
vendored
@ -61,9 +61,6 @@
|
||||
/******/ "__wbindgen_object_drop_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_drop_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_cb_forget": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_cb_forget"](p0i32);
|
||||
/******/ },
|
||||
@ -85,14 +82,17 @@
|
||||
/******/ "__wbindgen_string_get": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_string_get"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_object_clone_ref": function(p0i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_object_clone_ref"](p0i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_throw": function(p0i32,p1i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_throw"](p0i32,p1i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper99": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper99"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper101": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper101"](p0i32,p1i32,p2i32);
|
||||
/******/ },
|
||||
/******/ "__wbindgen_closure_wrapper103": function(p0i32,p1i32,p2i32) {
|
||||
/******/ return installedModules["../browser_pkg/jsonpath_wasm.js"].exports["__wbindgen_closure_wrapper103"](p0i32,p1i32,p2i32);
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ };
|
||||
@ -192,7 +192,7 @@
|
||||
/******/ promises.push(installedWasmModuleData);
|
||||
/******/ else {
|
||||
/******/ var importObject = wasmImportObjects[wasmModuleId]();
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../browser_pkg/jsonpath_wasm_bg.wasm":"9a826648f4cbc2bc8591"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var req = fetch(__webpack_require__.p + "" + {"../browser_pkg/jsonpath_wasm_bg.wasm":"db8564aae9d99ec41b79"}[wasmModuleId] + ".module.wasm");
|
||||
/******/ var promise;
|
||||
/******/ if(importObject instanceof Promise && typeof WebAssembly.compileStreaming === 'function') {
|
||||
/******/ promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {
|
||||
|
BIN
docs/db8564aae9d99ec41b79.module.wasm
Normal file
BIN
docs/db8564aae9d99ec41b79.module.wasm
Normal file
Binary file not shown.
6
nodejs/.gitignore
vendored
Normal file
6
nodejs/.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
native/target
|
||||
native/index.node
|
||||
native/artifacts.json
|
||||
**/*~
|
||||
**/node_modules
|
||||
.idea
|
3
nodejs/README.md
Normal file
3
nodejs/README.md
Normal file
@ -0,0 +1,3 @@
|
||||
# jsonpath-rs
|
||||
|
||||
JsonPath engine for NodeJs with Rust native implementation.
|
34
nodejs/lib/index.js
Normal file
34
nodejs/lib/index.js
Normal file
@ -0,0 +1,34 @@
|
||||
const { Compile, Selector, selectStr } = require('../native');
|
||||
|
||||
function compile(path) {
|
||||
let compile = new Compile(path);
|
||||
return (json) => {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
return compile.template(json);
|
||||
};
|
||||
}
|
||||
|
||||
function selector(json) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
let selector = new Selector(json);
|
||||
return (path) => {
|
||||
return selector.selector(path);
|
||||
}
|
||||
}
|
||||
|
||||
function select(json, path) {
|
||||
if(typeof json != 'string') {
|
||||
json = JSON.stringify(json)
|
||||
}
|
||||
return selectStr(json, path);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
compile,
|
||||
selector,
|
||||
select
|
||||
};
|
5
nodejs/native/.gitignore
vendored
Normal file
5
nodejs/native/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
.idea/*
|
||||
.vscode
|
||||
!.idea/runConfigurations/
|
||||
/target/
|
||||
Cargo.lock
|
24
nodejs/native/Cargo.toml
Normal file
24
nodejs/native/Cargo.toml
Normal file
@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "jsonpath-rs"
|
||||
version = "0.1.0"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
description = "JsonPath engine for NodeJs with Rust native implementation."
|
||||
keywords = ["library", "jsonpath", "json"]
|
||||
repository = "https://github.com/freestrings/jsonpath"
|
||||
license = "MIT"
|
||||
|
||||
build = "build.rs"
|
||||
exclude = ["artifacts.json", "index.node"]
|
||||
|
||||
[build-dependencies]
|
||||
neon-build = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
jsonpath_lib = { path = "../../" }
|
||||
neon = "0.2.0"
|
||||
neon-serde = "0.1.1"
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
|
||||
[lib]
|
||||
name = "jsonpath_rs"
|
||||
crate-type = ["dylib"]
|
7
nodejs/native/build.rs
Normal file
7
nodejs/native/build.rs
Normal file
@ -0,0 +1,7 @@
|
||||
extern crate neon_build;
|
||||
|
||||
fn main() {
|
||||
neon_build::setup(); // must be called in build.rs
|
||||
|
||||
// add project-specific build logic here...
|
||||
}
|
126
nodejs/native/src/lib.rs
Normal file
126
nodejs/native/src/lib.rs
Normal file
@ -0,0 +1,126 @@
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
#[macro_use]
|
||||
extern crate neon;
|
||||
extern crate neon_serde;
|
||||
extern crate serde_json;
|
||||
|
||||
use jsonpath::prelude::*;
|
||||
use neon::prelude::*;
|
||||
use serde_json::Value;
|
||||
|
||||
///
|
||||
/// `neon_serde::from_value` has very poor performance.
|
||||
///
|
||||
fn select(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsValue>(0)?;
|
||||
let json: Value = neon_serde::from_value(&mut ctx, json_val)?;
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
|
||||
match jsonpath::select(&json, path.as_str()) {
|
||||
Ok(value) => Ok(neon_serde::to_value(&mut ctx, &value)?),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
}
|
||||
}
|
||||
|
||||
fn select_str(mut ctx: FunctionContext) -> JsResult<JsValue> {
|
||||
let json_val = ctx.argument::<JsString>(0)?.value();
|
||||
let json: Value = match serde_json::from_str(json_val.as_str()) {
|
||||
Ok(json) => json,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
let path = ctx.argument::<JsString>(1)?.value();
|
||||
match jsonpath::select(&json, path.as_str()) {
|
||||
Ok(value) => Ok(neon_serde::to_value(&mut ctx, &value)?),
|
||||
Err(e) => panic!("{:?}", e)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Compile {
|
||||
node: Node
|
||||
}
|
||||
|
||||
pub struct Selector {
|
||||
json: RefValueWrapper
|
||||
}
|
||||
|
||||
declare_types! {
|
||||
pub class JsCompile for Compile {
|
||||
init(mut ctx) {
|
||||
let path = ctx.argument::<JsString>(0)?.value();
|
||||
let mut parser = Parser::new(path.as_str());
|
||||
|
||||
let node = match parser.compile() {
|
||||
Ok(node) => node,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
|
||||
Ok(Compile { node })
|
||||
}
|
||||
|
||||
method template(mut ctx) {
|
||||
let this = ctx.this();
|
||||
|
||||
let node = {
|
||||
let guard = ctx.lock();
|
||||
let this = this.borrow(&guard);
|
||||
this.node.clone()
|
||||
};
|
||||
|
||||
// let o = ctx.argument::<JsValue>(0)?;
|
||||
// let json: Value = neon_serde::from_value(&mut ctx, o)?;
|
||||
let json_str = ctx.argument::<JsString>(0)?.value();
|
||||
let json: Value = match serde_json::from_str(&json_str) {
|
||||
Ok(json) => json,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
let mut jf = JsonValueFilter::new_from_value((&json).into());
|
||||
jf.visit(node);
|
||||
let v = jf.take_value().into_value();
|
||||
Ok(neon_serde::to_value(&mut ctx, &v)?)
|
||||
}
|
||||
}
|
||||
|
||||
pub class JsSelector for Selector {
|
||||
init(mut ctx) {
|
||||
// let o = ctx.argument::<JsValue>(0)?;
|
||||
// let json: Value = neon_serde::from_value(&mut ctx, o)?;
|
||||
let json_str = ctx.argument::<JsString>(0)?.value();
|
||||
let json: Value = match serde_json::from_str(&json_str) {
|
||||
Ok(json) => json,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
|
||||
Ok(Selector { json: (&json).into() })
|
||||
}
|
||||
|
||||
method selector(mut ctx) {
|
||||
let this = ctx.this();
|
||||
|
||||
let json = {
|
||||
let guard = ctx.lock();
|
||||
let this = this.borrow(&guard);
|
||||
this.json.clone()
|
||||
};
|
||||
|
||||
let path = ctx.argument::<JsString>(0)?.value();
|
||||
let mut parser = Parser::new(path.as_str());
|
||||
|
||||
let node = match parser.compile() {
|
||||
Ok(node) => node,
|
||||
Err(e) => panic!("{:?}", e)
|
||||
};
|
||||
|
||||
let mut jf = JsonValueFilter::new_from_value(json);
|
||||
jf.visit(node);
|
||||
let v = jf.take_value().into_value();
|
||||
Ok(neon_serde::to_value(&mut ctx, &v)?)
|
||||
}
|
||||
}
|
||||
}
|
||||
register_module!(mut m, {
|
||||
m.export_class::<JsCompile>("Compile").expect("Compile class error");
|
||||
m.export_class::<JsSelector>("Selector").expect("Selector class error");
|
||||
m.export_function("select", select)?;
|
||||
m.export_function("selectStr", select_str)?;
|
||||
Ok(())
|
||||
});
|
3172
nodejs/package-lock.json
generated
Normal file
3172
nodejs/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
20
nodejs/package.json
Normal file
20
nodejs/package.json
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "jsonpath-rs",
|
||||
"version": "0.1.0",
|
||||
"description": "JsonPath engine for NodeJs with Rust native implementation.",
|
||||
"author": "Changseok Han <freestrings@gmail.com>",
|
||||
"repository": "git+https://github.com/freestrings/jsonpath",
|
||||
"license": "MIT",
|
||||
"main": "lib/index.js",
|
||||
"dependencies": {
|
||||
"neon-cli": "^0.2.0",
|
||||
"node-pre-gyp": "0.6"
|
||||
},
|
||||
"scripts": {
|
||||
"install": "neon build --release",
|
||||
"test": "mocha"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^6.0.2"
|
||||
}
|
||||
}
|
30
nodejs/test/index.spec.js
Normal file
30
nodejs/test/index.spec.js
Normal file
@ -0,0 +1,30 @@
|
||||
const jsonpath = require('../lib/index.js');
|
||||
|
||||
describe('compile test', () => {
|
||||
it('basic', (done) => {
|
||||
let template = jsonpath.compile('$.a');
|
||||
let result = template({'a': 1});
|
||||
if (result == 1) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('selector test', () => {
|
||||
it('basic', (done) => {
|
||||
let selector = jsonpath.selector({'a': 1});
|
||||
let result = selector('$.a');
|
||||
if (result == 1) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('select test', () => {
|
||||
it('basic', (done) => {
|
||||
let result = jsonpath.select({'a': 1}, '$.a');
|
||||
if (result == 1) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
@ -1,353 +1,5 @@
|
||||
use super::parser::*;
|
||||
|
||||
mod cmp;
|
||||
mod term;
|
||||
pub mod value_filter;
|
||||
mod value_filter;
|
||||
mod value_wrapper;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
extern crate env_logger;
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use super::parser::Parser;
|
||||
use super::value_filter::*;
|
||||
|
||||
fn setup() {
|
||||
let _ = env_logger::try_init();
|
||||
}
|
||||
|
||||
fn new_value_filter(file: &str) -> ValueFilter {
|
||||
let string = read_json(file);
|
||||
let json: Value = serde_json::from_str(string.as_str()).unwrap();
|
||||
ValueFilter::new(json.into(), false, false)
|
||||
}
|
||||
|
||||
fn do_filter(path: &str, file: &str) -> JsonValueFilter {
|
||||
let string = read_json(file);
|
||||
let mut jf = JsonValueFilter::new(string.as_str()).unwrap();
|
||||
let mut parser = Parser::new(path);
|
||||
parser.parse(&mut jf).unwrap();
|
||||
jf
|
||||
}
|
||||
|
||||
fn read_json(path: &str) -> String {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
contents
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn step_in() {
|
||||
setup();
|
||||
|
||||
let mut jf = new_value_filter("./benches/data_obj.json");
|
||||
{
|
||||
let current = jf.step_in_str("friends");
|
||||
assert_eq!(current.is_array(), true);
|
||||
}
|
||||
|
||||
let mut jf = new_value_filter("./benches/data_array.json");
|
||||
{
|
||||
let current = jf.step_in_num(&1.0);
|
||||
assert_eq!(current.get_val().is_object(), true);
|
||||
}
|
||||
{
|
||||
let current = jf.step_in_str("friends");
|
||||
assert_eq!(current.is_array(), true);
|
||||
}
|
||||
let mut jf = new_value_filter("./benches/data_obj.json");
|
||||
{
|
||||
jf.step_in_str("school");
|
||||
jf.step_in_str("friends");
|
||||
jf.step_in_all();
|
||||
let current = jf.step_in_str("name");
|
||||
let friends = json!([
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]);
|
||||
assert_eq!(friends, current.get_val().into_value());
|
||||
}
|
||||
let mut jf = new_value_filter("./benches/data_obj.json");
|
||||
{
|
||||
let current = jf.step_leaves_str("name");
|
||||
let names = json!([
|
||||
"Leonor Herman",
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]);
|
||||
assert_eq!(names, current.get_val().into_value());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array() {
|
||||
setup();
|
||||
|
||||
let friends = json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]);
|
||||
|
||||
let jf = do_filter("$.school.friends[1, 2]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school.friends[1:]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school.friends[:-2]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{"id": 0, "name": "Millicent Norman"}
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..friends[2].name", "./benches/data_obj.json");
|
||||
let friends = json!(["Gray Berry", "Gray Berry"]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..friends[*].name", "./benches/data_obj.json");
|
||||
let friends = json!(["Vincent Cannon","Gray Berry","Millicent Norman","Vincent Cannon","Gray Berry"]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$['school']['friends'][*].['name']", "./benches/data_obj.json");
|
||||
let friends = json!(["Millicent Norman","Vincent Cannon","Gray Berry"]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$['school']['friends'][0].['name']", "./benches/data_obj.json");
|
||||
let friends = json!("Millicent Norman");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type() {
|
||||
setup();
|
||||
|
||||
let friends = json!({
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
});
|
||||
|
||||
let jf = do_filter("$.school", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school[?(@.friends[0])]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school[?(@.friends[10])]", "./benches/data_obj.json");
|
||||
assert_eq!(Value::Null, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school[?(1==1)]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school.friends[?(1==1)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op() {
|
||||
setup();
|
||||
|
||||
let jf = do_filter("$.school[?(@.friends == @.friends)]", "./benches/data_obj.json");
|
||||
let friends = json!({
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
});
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?(@.name)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?(@.id >= 2)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?(@.id >= 2 || @.id == 1)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" },
|
||||
{ "id" : 1, "name" : "Vincent Cannon" }
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?( (@.id >= 2 || @.id == 1) && @.id == 0)]", "./benches/data_obj.json");
|
||||
assert_eq!(Value::Null, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..friends[?(@.id == $.index)].id", "./benches/data_obj.json");
|
||||
let friends = json!([0, 0]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[?($.store.bicycle.price < @.price)].price", "./benches/example.json");
|
||||
let friends = json!([22.99]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[?( (@.price == 12.99 || @.category == 'reference') && @.price > 10)].price", "./benches/example.json");
|
||||
let friends = json!([12.99]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn example() {
|
||||
setup();
|
||||
|
||||
let jf = do_filter("$.store.book[*].author", "./benches/example.json");
|
||||
let ret = json!(["Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..author", "./benches/example.json");
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.store.*", "./benches/example.json");
|
||||
let ret = json!([
|
||||
[
|
||||
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||
{"category" : "fiction", "author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
],
|
||||
{"color" : "red","price" : 19.95},
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.store..price", "./benches/example.json");
|
||||
let ret = json!([8.95, 12.99, 8.99, 22.99, 19.95]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[2]", "./benches/example.json");
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[-2]", "./benches/example.json");
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[0,1]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[:2]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[2:]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[?(@.isbn)]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.store.book[?(@.price < 10)]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..*", "./benches/example.json");
|
||||
let json: Value = serde_json::from_str(read_json("./benches/giveme_every_thing_result.json").as_str()).unwrap();
|
||||
assert_eq!(json, jf.current_value().into_value());
|
||||
}
|
||||
}
|
||||
pub mod prelude;
|
1
src/filter/prelude.rs
Normal file
1
src/filter/prelude.rs
Normal file
@ -0,0 +1 @@
|
||||
pub use super::value_filter::*;
|
@ -1,13 +1,13 @@
|
||||
use std::error::Error;
|
||||
use std::result;
|
||||
|
||||
use ref_value::*;
|
||||
use std::result::Result;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use super::parser::*;
|
||||
use super::term::*;
|
||||
use super::value_wrapper::*;
|
||||
use ref_value::*;
|
||||
|
||||
use parser::prelude::*;
|
||||
use filter::term::*;
|
||||
use filter::value_wrapper::*;
|
||||
|
||||
trait ArrayIndex {
|
||||
fn index(&self, v: &RefValueWrapper) -> usize;
|
||||
@ -277,10 +277,10 @@ pub struct JsonValueFilter {
|
||||
}
|
||||
|
||||
impl JsonValueFilter {
|
||||
pub fn new(json: &str) -> result::Result<Self, String> {
|
||||
pub fn new(json: &str) -> Result<Self, String> {
|
||||
let json: Value = serde_json::from_str(json)
|
||||
.map_err(|e| e.description().to_string())?;
|
||||
Ok(JsonValueFilter::new_from_value(json.into()))
|
||||
Ok(JsonValueFilter::new_from_value((&json).into()))
|
||||
}
|
||||
|
||||
pub fn new_from_value(json: RefValueWrapper) -> Self {
|
||||
|
@ -1,4 +1,5 @@
|
||||
use indexmap::map::IndexMap;
|
||||
|
||||
use ref_value::*;
|
||||
|
||||
use super::cmp::*;
|
||||
|
104
src/lib.rs
104
src/lib.rs
@ -43,7 +43,7 @@
|
||||
//! "expensive": 10
|
||||
//! });
|
||||
//!
|
||||
//! let mut selector = jsonpath::selector(json_obj);
|
||||
//! let mut selector = jsonpath::selector(&json_obj);
|
||||
//!
|
||||
//! //
|
||||
//! // $.store.book[*].author
|
||||
@ -158,28 +158,22 @@
|
||||
//! assert_eq!(ret, json);
|
||||
//! ```
|
||||
|
||||
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate env_logger;
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use] extern crate serde_json;
|
||||
#[cfg(not(test))]
|
||||
extern crate serde_json;
|
||||
|
||||
extern crate core;
|
||||
extern crate indexmap;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub mod parser;
|
||||
mod parser;
|
||||
#[doc(hidden)]
|
||||
pub mod filter;
|
||||
mod filter;
|
||||
#[doc(hidden)]
|
||||
pub mod ref_value;
|
||||
mod ref_value;
|
||||
pub mod prelude;
|
||||
|
||||
use parser::parser::*;
|
||||
use filter::value_filter::*;
|
||||
use parser::prelude::*;
|
||||
use filter::prelude::*;
|
||||
|
||||
use std::result;
|
||||
use serde_json::Value;
|
||||
@ -204,7 +198,7 @@ type Result = result::Result<Value, String>;
|
||||
/// "friends": [ {"id": 0}, {"id": 1} ]
|
||||
/// });
|
||||
///
|
||||
/// let json = template(json_obj).unwrap();
|
||||
/// let json = template(&json_obj).unwrap();
|
||||
/// let ret = json!([ {"id": 0}, {"id": 0} ]);
|
||||
/// assert_eq!(json, ret);
|
||||
///
|
||||
@ -216,11 +210,11 @@ type Result = result::Result<Value, String>;
|
||||
/// "friends": [ {"id": 0}, {"id": 1} ]
|
||||
/// });
|
||||
///
|
||||
/// let json = template(json_obj).unwrap();
|
||||
/// let json = template(&json_obj).unwrap();
|
||||
/// let ret = json!([ {"id": 0}, {"name": "Millicent Norman"} ]);
|
||||
/// assert_eq!(json, ret);
|
||||
/// ```
|
||||
pub fn compile<'a>(path: &'a str) -> impl FnMut(Value) -> Result + 'a {
|
||||
pub fn compile<'a>(path: &'a str) -> impl FnMut(&Value) -> Result + 'a {
|
||||
let mut parser = Parser::new(path);
|
||||
let node = parser.compile();
|
||||
move |json| {
|
||||
@ -249,7 +243,7 @@ pub fn compile<'a>(path: &'a str) -> impl FnMut(Value) -> Result + 'a {
|
||||
/// "friends": [{"id": 0},{"id": 1}]
|
||||
/// });
|
||||
///
|
||||
/// let mut selector = jsonpath::selector(json_obj);
|
||||
/// let mut selector = jsonpath::selector(&json_obj);
|
||||
///
|
||||
/// let json = selector("$..friends[0]").unwrap();
|
||||
/// let ret = json!([ {"id": 0}, {"id": 0} ]);
|
||||
@ -259,7 +253,7 @@ pub fn compile<'a>(path: &'a str) -> impl FnMut(Value) -> Result + 'a {
|
||||
/// let ret = json!([ {"id": 1}, {"id": 1} ]);
|
||||
/// assert_eq!(json, ret);
|
||||
/// ```
|
||||
pub fn selector(json: Value) -> impl FnMut(&str) -> Result {
|
||||
pub fn selector(json: &Value) -> impl FnMut(&str) -> Result {
|
||||
let wrapper: RefValueWrapper = json.into();
|
||||
move |path: &str| {
|
||||
let mut jf = JsonValueFilter::new_from_value(wrapper.clone());
|
||||
@ -270,7 +264,7 @@ pub fn selector(json: Value) -> impl FnMut(&str) -> Result {
|
||||
}
|
||||
|
||||
/// # Read the same Json multiple times using different JsonPath - Deprecated. use selector
|
||||
pub fn reader(json: Value) -> impl FnMut(&str) -> Result {
|
||||
pub fn reader(json: &Value) -> impl FnMut(&str) -> Result {
|
||||
selector(json)
|
||||
}
|
||||
|
||||
@ -286,11 +280,11 @@ pub fn reader(json: Value) -> impl FnMut(&str) -> Result {
|
||||
/// },
|
||||
/// "friends": [{"id": 0}, {"id": 1}]
|
||||
/// });
|
||||
/// let json = jsonpath::select(json_obj, "$..friends[0]").unwrap();
|
||||
/// let json = jsonpath::select(&json_obj, "$..friends[0]").unwrap();
|
||||
/// let ret = json!([ {"id": 0}, {"id": 0} ]);
|
||||
/// assert_eq!(json, ret);
|
||||
/// ```
|
||||
pub fn select(json: Value, path: &str) -> Result {
|
||||
pub fn select(json: &Value, path: &str) -> Result {
|
||||
let mut jf = JsonValueFilter::new_from_value(json.into());
|
||||
let mut parser = Parser::new(path);
|
||||
parser.parse(&mut jf)?;
|
||||
@ -298,73 +292,7 @@ pub fn select(json: Value, path: &str) -> Result {
|
||||
}
|
||||
|
||||
/// # Read Json using JsonPath - Deprecated. use select
|
||||
pub fn read(json: Value, path: &str) -> Result {
|
||||
pub fn read(json: &Value, path: &str) -> Result {
|
||||
select(json, path)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
fn read_json(path: &str) -> Value {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
serde_json::from_str(contents.as_str()).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile() {
|
||||
let mut template = super::compile("$..friends[2]");
|
||||
let json_obj = read_json("./benches/data_obj.json");
|
||||
let json = template(json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
assert_eq!(json, ret);
|
||||
|
||||
let json_obj = read_json("./benches/data_array.json");
|
||||
let json = template(json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Rosetta Erickson"}
|
||||
]);
|
||||
assert_eq!(json, ret);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn selector() {
|
||||
let json_obj = read_json("./benches/data_obj.json");
|
||||
let mut reader = super::selector(json_obj);
|
||||
let json = reader("$..friends[2]").unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
assert_eq!(json, ret);
|
||||
|
||||
let json = reader("$..friends[0]").unwrap();
|
||||
let ret = json!([
|
||||
{"id": 0},
|
||||
{"id": 0,"name": "Millicent Norman"}
|
||||
]);
|
||||
assert_eq!(json, ret);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn select() {
|
||||
let json_obj = read_json("./benches/example.json");
|
||||
let json = super::select(json_obj, "$..book[2]").unwrap();
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
assert_eq!(json, ret);
|
||||
}
|
||||
}
|
@ -1,3 +1,4 @@
|
||||
mod path_reader;
|
||||
mod tokenizer;
|
||||
pub mod parser;
|
||||
mod parser;
|
||||
pub mod prelude;
|
@ -1,19 +1,14 @@
|
||||
use std::result;
|
||||
use std::result::Result;
|
||||
|
||||
use super::tokenizer::{
|
||||
PreloadedTokenizer,
|
||||
Token,
|
||||
TokenError,
|
||||
};
|
||||
use super::tokenizer::*;
|
||||
|
||||
const DUMMY: usize = 0;
|
||||
|
||||
type Result<T> = result::Result<T, String>;
|
||||
type ParseResult<T> = Result<T, String>;
|
||||
|
||||
mod utils {
|
||||
use std::result;
|
||||
|
||||
pub fn string_to_isize<F>(string: &String, msg_handler: F) -> result::Result<isize, String>
|
||||
pub fn string_to_isize<F>(string: &String, msg_handler: F) -> Result<isize, String>
|
||||
where F: Fn() -> String {
|
||||
match string.as_str().parse::<isize>() {
|
||||
Ok(n) => Ok(n),
|
||||
@ -21,7 +16,7 @@ mod utils {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string_to_f64<F>(string: &String, msg_handler: F) -> result::Result<f64, String>
|
||||
pub fn string_to_f64<F>(string: &String, msg_handler: F) -> Result<f64, String>
|
||||
where F: Fn() -> String {
|
||||
match string.as_str().parse::<f64>() {
|
||||
Ok(n) => Ok(n),
|
||||
@ -88,17 +83,17 @@ impl<'a> Parser<'a> {
|
||||
Parser { tokenizer: PreloadedTokenizer::new(input) }
|
||||
}
|
||||
|
||||
pub fn compile(&mut self) -> Result<Node> {
|
||||
pub fn compile(&mut self) -> ParseResult<Node> {
|
||||
Ok(self.json_path()?)
|
||||
}
|
||||
|
||||
pub fn parse<V: NodeVisitor>(&mut self, visitor: &mut V) -> Result<()> {
|
||||
pub fn parse<V: NodeVisitor>(&mut self, visitor: &mut V) -> ParseResult<()> {
|
||||
let node = self.json_path()?;
|
||||
visitor.visit(node);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn json_path(&mut self) -> Result<Node> {
|
||||
fn json_path(&mut self) -> ParseResult<Node> {
|
||||
debug!("#json_path");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Absolute(_)) => {
|
||||
@ -111,7 +106,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn paths(&mut self, prev: Node) -> Result<Node> {
|
||||
fn paths(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#paths");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => {
|
||||
@ -130,7 +125,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn paths_dot(&mut self, prev: Node) -> Result<Node> {
|
||||
fn paths_dot(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#paths_dot");
|
||||
let node = self.path(prev)?;
|
||||
match self.tokenizer.peek_token() {
|
||||
@ -150,7 +145,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn path(&mut self, prev: Node) -> Result<Node> {
|
||||
fn path(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Dot(_)) => {
|
||||
@ -172,7 +167,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn path_leaves(&mut self, prev: Node) -> Result<Node> {
|
||||
fn path_leaves(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_leaves");
|
||||
self.eat_token();
|
||||
match self.tokenizer.peek_token() {
|
||||
@ -190,7 +185,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn path_leaves_key(&mut self, prev: Node) -> Result<Node> {
|
||||
fn path_leaves_key(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_leaves_key");
|
||||
Ok(Node {
|
||||
token: ParseToken::Leaves,
|
||||
@ -199,7 +194,7 @@ impl<'a> Parser<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
fn path_leaves_all(&mut self, prev: Node) -> Result<Node> {
|
||||
fn path_leaves_all(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_leaves_all");
|
||||
self.eat_token();
|
||||
Ok(Node {
|
||||
@ -209,7 +204,7 @@ impl<'a> Parser<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
fn path_in_all(&mut self, prev: Node) -> Result<Node> {
|
||||
fn path_in_all(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_in_all");
|
||||
self.eat_token();
|
||||
Ok(Node {
|
||||
@ -219,7 +214,7 @@ impl<'a> Parser<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
fn path_in_key(&mut self, prev: Node) -> Result<Node> {
|
||||
fn path_in_key(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#path_in_key");
|
||||
Ok(Node {
|
||||
token: ParseToken::In,
|
||||
@ -228,7 +223,7 @@ impl<'a> Parser<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
fn key(&mut self) -> Result<Node> {
|
||||
fn key(&mut self) -> ParseResult<Node> {
|
||||
debug!("#key");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(_, v)) => {
|
||||
@ -240,7 +235,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn array_quota_value(&mut self) -> Result<Node> {
|
||||
fn array_quota_value(&mut self) -> ParseResult<Node> {
|
||||
debug!("#array_quota_value");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::SingleQuoted(_, val))
|
||||
@ -256,7 +251,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn array_start(&mut self, prev: Node) -> Result<Node> {
|
||||
fn array_start(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#array_start");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Question(_)) => {
|
||||
@ -285,14 +280,14 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn array(&mut self, prev: Node) -> Result<Node> {
|
||||
fn array(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#array");
|
||||
let ret = self.array_start(prev)?;
|
||||
self.eat_whitespace();
|
||||
self.close_token(ret, Token::CloseArray(DUMMY))
|
||||
}
|
||||
|
||||
fn array_value_key(&mut self) -> Result<Node> {
|
||||
fn array_value_key(&mut self) -> ParseResult<Node> {
|
||||
debug!("#array_value_key");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
@ -318,7 +313,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
|
||||
fn array_value(&mut self) -> Result<Node> {
|
||||
fn array_value(&mut self) -> ParseResult<Node> {
|
||||
debug!("#array_value");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::Key(_, _)) => {
|
||||
@ -342,7 +337,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn union(&mut self, num: isize) -> Result<Node> {
|
||||
fn union(&mut self, num: isize) -> ParseResult<Node> {
|
||||
debug!("#union");
|
||||
let mut values = vec![num];
|
||||
while match self.tokenizer.peek_token() {
|
||||
@ -364,7 +359,7 @@ impl<'a> Parser<'a> {
|
||||
Ok(self.node(ParseToken::Union(values)))
|
||||
}
|
||||
|
||||
fn range_from(&mut self, num: isize) -> Result<Node> {
|
||||
fn range_from(&mut self, num: isize) -> ParseResult<Node> {
|
||||
debug!("#range_from");
|
||||
self.eat_token();
|
||||
self.eat_whitespace();
|
||||
@ -378,7 +373,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn range_to(&mut self) -> Result<Node> {
|
||||
fn range_to(&mut self) -> ParseResult<Node> {
|
||||
debug!("#range_to");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
@ -391,7 +386,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn range(&mut self, num: isize) -> Result<Node> {
|
||||
fn range(&mut self, num: isize) -> ParseResult<Node> {
|
||||
debug!("#range");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, ref val)) => {
|
||||
@ -404,7 +399,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn filter(&mut self) -> Result<Node> {
|
||||
fn filter(&mut self) -> ParseResult<Node> {
|
||||
debug!("#filter");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::OpenParenthesis(_)) => {
|
||||
@ -421,7 +416,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn exprs(&mut self) -> Result<Node> {
|
||||
fn exprs(&mut self) -> ParseResult<Node> {
|
||||
self.eat_whitespace();
|
||||
debug!("#exprs");
|
||||
let node = match self.tokenizer.peek_token() {
|
||||
@ -441,7 +436,7 @@ impl<'a> Parser<'a> {
|
||||
self.condition_expr(node)
|
||||
}
|
||||
|
||||
fn condition_expr(&mut self, prev: Node) -> Result<Node> {
|
||||
fn condition_expr(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#condition_expr");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::And(_)) => {
|
||||
@ -466,7 +461,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn expr(&mut self) -> Result<Node> {
|
||||
fn expr(&mut self) -> ParseResult<Node> {
|
||||
debug!("#expr");
|
||||
|
||||
let has_prop_candidate = match self.tokenizer.peek_token() {
|
||||
@ -494,7 +489,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn term_num(&mut self) -> Result<Node> {
|
||||
fn term_num(&mut self) -> ParseResult<Node> {
|
||||
debug!("#term_num");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(Token::Key(pos, val)) => {
|
||||
@ -517,7 +512,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn term_num_float(&mut self, mut num: &str) -> Result<Node> {
|
||||
fn term_num_float(&mut self, mut num: &str) -> ParseResult<Node> {
|
||||
debug!("#term_num_float");
|
||||
self.eat_token();
|
||||
match self.tokenizer.next_token() {
|
||||
@ -535,7 +530,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn term(&mut self) -> Result<Node> {
|
||||
fn term(&mut self) -> ParseResult<Node> {
|
||||
debug!("#term");
|
||||
match self.tokenizer.peek_token() {
|
||||
Ok(Token::At(_)) => {
|
||||
@ -568,7 +563,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn op(&mut self, prev: Node) -> Result<Node> {
|
||||
fn op(&mut self, prev: Node) -> ParseResult<Node> {
|
||||
debug!("#op");
|
||||
let token = match self.tokenizer.next_token() {
|
||||
Ok(Token::Equal(_)) => {
|
||||
@ -620,7 +615,7 @@ impl<'a> Parser<'a> {
|
||||
Node { left: None, right: None, token: token }
|
||||
}
|
||||
|
||||
fn close_token(&mut self, ret: Node, token: Token) -> Result<Node> {
|
||||
fn close_token(&mut self, ret: Node, token: Token) -> ParseResult<Node> {
|
||||
debug!("#close_token");
|
||||
match self.tokenizer.next_token() {
|
||||
Ok(ref t) if t.partial_eq(token) => {
|
||||
@ -680,318 +675,3 @@ pub trait NodeVisitor {
|
||||
fn visit_token(&mut self, token: ParseToken);
|
||||
fn end_term(&mut self) {}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
extern crate env_logger;
|
||||
|
||||
use super::*;
|
||||
|
||||
struct NodeVisitorTestImpl<'a> {
|
||||
input: &'a str,
|
||||
stack: Vec<ParseToken>,
|
||||
}
|
||||
|
||||
impl<'a> NodeVisitorTestImpl<'a> {
|
||||
fn new(input: &'a str) -> Self {
|
||||
NodeVisitorTestImpl { input, stack: Vec::new() }
|
||||
}
|
||||
|
||||
fn visit(&mut self) -> result::Result<Vec<ParseToken>, String> {
|
||||
let tokenizer = PreloadedTokenizer::new(self.input);
|
||||
let mut parser = Parser { tokenizer };
|
||||
parser.parse(self)?;
|
||||
Ok(self.stack.split_off(0))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> NodeVisitor for NodeVisitorTestImpl<'a> {
|
||||
fn visit_token(&mut self, token: ParseToken) {
|
||||
self.stack.push(token);
|
||||
}
|
||||
}
|
||||
|
||||
fn setup() {
|
||||
let _ = env_logger::try_init();
|
||||
}
|
||||
|
||||
fn run(input: &str) -> result::Result<Vec<ParseToken>, String> {
|
||||
let mut interpreter = NodeVisitorTestImpl::new(input);
|
||||
interpreter.visit()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_path() {
|
||||
setup();
|
||||
|
||||
assert_eq!(run("$.aa"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("aa".to_owned())
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.00.a"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("00".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned())
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.00.韓창.seok"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("00".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("韓창".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("seok".to_owned())
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.*"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::All
|
||||
]));
|
||||
|
||||
assert_eq!(run("$..*"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Leaves,
|
||||
ParseToken::All
|
||||
]));
|
||||
|
||||
assert_eq!(run("$..[0]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Leaves,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(0.0),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
match run("$.") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$..") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$. a") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_array_sytax() {
|
||||
setup();
|
||||
|
||||
assert_eq!(run("$.book[?(@.isbn)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("book".to_string()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("isbn".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
//
|
||||
// Array도 컨텍스트 In으로 간주 할거라서 중첩되면 하나만
|
||||
//
|
||||
assert_eq!(run("$.[*]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[*]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[*].가"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::In, ParseToken::Key("가".to_owned())
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[0][1]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(0_f64),
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1_f64),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[1,2]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Union(vec![1, 2]),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[10:]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(Some(10), None),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[:11]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(None, Some(11)),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[-12:13]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(Some(-12), Some(13)),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[?(1>2)]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[?($.b>3)]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("b".to_owned()), ParseToken::Number(3_f64), ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$[?($.c>@.d && 1==2)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::Filter(FilterToken::And),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$[?($.c>@.d&&(1==2||3>=4))]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::Number(3_f64), ParseToken::Number(4_f64), ParseToken::Filter(FilterToken::GreaterOrEqual),
|
||||
ParseToken::Filter(FilterToken::Or),
|
||||
ParseToken::Filter(FilterToken::And),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$[?(@.a<@.b)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("b".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Little),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$[*][*][*]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$['a']['bb']"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("a".to_string()),
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("bb".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
match run("$[") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[a]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?($.a)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(@.a > @.b]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(@.a < @.b&&(@.c < @.d)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_array_float() {
|
||||
setup();
|
||||
|
||||
assert_eq!(run("$[?(1.1<2.1)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1.1), ParseToken::Number(2.1), ParseToken::Filter(FilterToken::Little),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
match run("$[1.1]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(1.1<.2)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(1.1<2.)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(1.1<2.a)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
use std::result;
|
||||
use std::result::Result;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum ReaderError {
|
||||
@ -18,12 +18,12 @@ impl<'a> PathReader<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek_char(&self) -> result::Result<(usize, char), ReaderError> {
|
||||
pub fn peek_char(&self) -> Result<(usize, char), ReaderError> {
|
||||
let ch = self.input.chars().next().ok_or(ReaderError::Eof)?;
|
||||
Ok((self.pos + ch.len_utf8(), ch))
|
||||
}
|
||||
|
||||
pub fn take_while<F>(&mut self, fun: F) -> result::Result<(usize, String), ReaderError>
|
||||
pub fn take_while<F>(&mut self, fun: F) -> Result<(usize, String), ReaderError>
|
||||
where
|
||||
F: Fn(&char) -> bool
|
||||
{
|
||||
@ -42,7 +42,7 @@ impl<'a> PathReader<'a> {
|
||||
Ok((self.pos, ret))
|
||||
}
|
||||
|
||||
pub fn next_char(&mut self) -> result::Result<(usize, char), ReaderError> {
|
||||
pub fn next_char(&mut self) -> Result<(usize, char), ReaderError> {
|
||||
let (_, ch) = self.peek_char()?;
|
||||
self.input = &self.input[ch.len_utf8()..];
|
||||
let ret = Ok((self.pos, ch));
|
||||
|
2
src/parser/prelude.rs
Normal file
2
src/parser/prelude.rs
Normal file
@ -0,0 +1,2 @@
|
||||
pub use super::parser::*;
|
||||
pub use super::tokenizer::*;
|
@ -1,10 +1,7 @@
|
||||
use std::result;
|
||||
use std::io::Write;
|
||||
use std::result::Result;
|
||||
|
||||
use super::path_reader::{
|
||||
ReaderError,
|
||||
PathReader,
|
||||
};
|
||||
use super::path_reader::{PathReader, ReaderError};
|
||||
|
||||
const ABSOLUTE: &'static str = "$";
|
||||
const DOT: &'static str = ".";
|
||||
@ -90,7 +87,6 @@ pub enum Token {
|
||||
}
|
||||
|
||||
impl Token {
|
||||
|
||||
pub fn partial_eq(&self, other: Token) -> bool {
|
||||
self.to_simple() == other.to_simple()
|
||||
}
|
||||
@ -152,19 +148,19 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn single_quota(&mut self, pos: usize, ch: char) -> result::Result<Token, TokenError> {
|
||||
fn single_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
Ok(Token::SingleQuoted(pos, val))
|
||||
}
|
||||
|
||||
fn double_quota(&mut self, pos: usize, ch: char) -> result::Result<Token, TokenError> {
|
||||
fn double_quota(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let (_, val) = self.input.take_while(|c| *c != ch).map_err(to_token_error)?;
|
||||
self.input.next_char().map_err(to_token_error)?;
|
||||
Ok(Token::DoubleQuoted(pos, val))
|
||||
}
|
||||
|
||||
fn equal(&mut self, pos: usize, _: char) -> result::Result<Token, TokenError> {
|
||||
fn equal(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||
let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
|
||||
match ch {
|
||||
CH_EQUAL => {
|
||||
@ -175,7 +171,7 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn not_equal(&mut self, pos: usize, _: char) -> result::Result<Token, TokenError> {
|
||||
fn not_equal(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||
let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
|
||||
match ch {
|
||||
CH_EQUAL => {
|
||||
@ -186,7 +182,7 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn little(&mut self, pos: usize, _: char) -> result::Result<Token, TokenError> {
|
||||
fn little(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||
let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
|
||||
match ch {
|
||||
CH_EQUAL => {
|
||||
@ -197,7 +193,7 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn greater(&mut self, pos: usize, _: char) -> result::Result<Token, TokenError> {
|
||||
fn greater(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||
let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
|
||||
match ch {
|
||||
CH_EQUAL => {
|
||||
@ -208,7 +204,7 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn and(&mut self, pos: usize, _: char) -> result::Result<Token, TokenError> {
|
||||
fn and(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||
let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
|
||||
match ch {
|
||||
CH_AMPERSAND => {
|
||||
@ -219,7 +215,7 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn or(&mut self, pos: usize, _: char) -> result::Result<Token, TokenError> {
|
||||
fn or(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||
let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
|
||||
match ch {
|
||||
CH_PIPE => {
|
||||
@ -230,12 +226,12 @@ impl<'a> Tokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn whitespace(&mut self, pos: usize, _: char) -> result::Result<Token, TokenError> {
|
||||
fn whitespace(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
|
||||
let (_, vec) = self.input.take_while(|c| c.is_whitespace()).map_err(to_token_error)?;
|
||||
Ok(Token::Whitespace(pos, vec.len()))
|
||||
}
|
||||
|
||||
fn other(&mut self, pos: usize, ch: char) -> result::Result<Token, TokenError> {
|
||||
fn other(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
|
||||
let fun = |c: &char| {
|
||||
match simple_matched_token(*c, pos) {
|
||||
Some(_) => false,
|
||||
@ -253,7 +249,7 @@ impl<'a> Tokenizer<'a> {
|
||||
Ok(Token::Key(pos, vec))
|
||||
}
|
||||
|
||||
pub fn next_token(&mut self) -> result::Result<Token, TokenError> {
|
||||
pub fn next_token(&mut self) -> Result<Token, TokenError> {
|
||||
let (pos, ch) = self.input.next_char().map_err(to_token_error)?;
|
||||
match simple_matched_token(ch, pos) {
|
||||
Some(t) => Ok(t),
|
||||
@ -309,7 +305,7 @@ impl<'a> PreloadedTokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek_token(&self) -> result::Result<&Token, TokenError> {
|
||||
pub fn peek_token(&self) -> Result<&Token, TokenError> {
|
||||
match self.tokens.last() {
|
||||
Some((_, t)) => {
|
||||
trace!("%{:?}", t);
|
||||
@ -322,7 +318,7 @@ impl<'a> PreloadedTokenizer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_token(&mut self) -> result::Result<Token, TokenError> {
|
||||
pub fn next_token(&mut self) -> Result<Token, TokenError> {
|
||||
match self.tokens.pop() {
|
||||
Some((pos, t)) => {
|
||||
self.curr_pos = Some(pos);
|
||||
@ -357,204 +353,3 @@ impl<'a> PreloadedTokenizer<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::TokenError;
|
||||
use super::{
|
||||
Token,
|
||||
Tokenizer,
|
||||
PreloadedTokenizer,
|
||||
};
|
||||
|
||||
fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
|
||||
let mut tokenizer = Tokenizer::new(input);
|
||||
let mut vec = vec![];
|
||||
loop {
|
||||
match tokenizer.next_token() {
|
||||
Ok(t) => vec.push(t),
|
||||
Err(e) => return (vec, Some(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn run(input: &str, expected: (Vec<Token>, Option<TokenError>)) {
|
||||
let (vec, err) = collect_token(input.clone());
|
||||
assert_eq!((vec, err), expected, "\"{}\"", input);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn peek() {
|
||||
let mut tokenizer = PreloadedTokenizer::new("$.a");
|
||||
match tokenizer.next_token() {
|
||||
Ok(t) => assert_eq!(Token::Absolute(0), t),
|
||||
_ => panic!()
|
||||
}
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
||||
_ => panic!()
|
||||
}
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
||||
_ => panic!()
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(t) => assert_eq!(Token::Dot(1), t),
|
||||
_ => panic!()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn token() {
|
||||
run("$.01.a",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Key(2, "01".to_string()),
|
||||
Token::Dot(4),
|
||||
Token::Key(5, "a".to_string())
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("$. []",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Whitespace(2, 2),
|
||||
Token::OpenArray(5),
|
||||
Token::CloseArray(6)
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("$..",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Dot(2),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("$..ab",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Dot(2),
|
||||
Token::Key(3, "ab".to_string())
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("$..가 [",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Dot(2),
|
||||
Token::Key(3, "가".to_string()),
|
||||
Token::Whitespace(6, 0),
|
||||
Token::OpenArray(7),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("[-1, 2 ]",
|
||||
(
|
||||
vec![
|
||||
Token::OpenArray(0),
|
||||
Token::Key(1, "-1".to_string()),
|
||||
Token::Comma(3),
|
||||
Token::Whitespace(4, 0),
|
||||
Token::Key(5, "2".to_string()),
|
||||
Token::Whitespace(6, 0),
|
||||
Token::CloseArray(7),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("[ 1 2 , 3 \"abc\" : -10 ]",
|
||||
(
|
||||
vec![
|
||||
Token::OpenArray(0),
|
||||
Token::Whitespace(1, 0),
|
||||
Token::Key(2, "1".to_string()),
|
||||
Token::Whitespace(3, 0),
|
||||
Token::Key(4, "2".to_string()),
|
||||
Token::Whitespace(5, 0),
|
||||
Token::Comma(6),
|
||||
Token::Whitespace(7, 0),
|
||||
Token::Key(8, "3".to_string()),
|
||||
Token::Whitespace(9, 0),
|
||||
Token::DoubleQuoted(10, "abc".to_string()),
|
||||
Token::Whitespace(15, 0),
|
||||
Token::Split(16),
|
||||
Token::Whitespace(17, 0),
|
||||
Token::Key(18, "-10".to_string()),
|
||||
Token::Whitespace(21, 0),
|
||||
Token::CloseArray(22),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("?(@.a가 <41.01)",
|
||||
(
|
||||
vec![
|
||||
Token::Question(0),
|
||||
Token::OpenParenthesis(1),
|
||||
Token::At(2),
|
||||
Token::Dot(3),
|
||||
Token::Key(4, "a가".to_string()),
|
||||
Token::Whitespace(8, 0),
|
||||
Token::Little(9),
|
||||
Token::Key(10, "41".to_string()),
|
||||
Token::Dot(12),
|
||||
Token::Key(13, "01".to_string()),
|
||||
Token::CloseParenthesis(15),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("?(@.a <4a.01)",
|
||||
(
|
||||
vec![
|
||||
Token::Question(0),
|
||||
Token::OpenParenthesis(1),
|
||||
Token::At(2),
|
||||
Token::Dot(3),
|
||||
Token::Key(4, "a".to_string()),
|
||||
Token::Whitespace(5, 0),
|
||||
Token::Little(6),
|
||||
Token::Key(7, "4a".to_string()),
|
||||
Token::Dot(9),
|
||||
Token::Key(10, "01".to_string()),
|
||||
Token::CloseParenthesis(12),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("?($.c>@.d)", (
|
||||
vec![
|
||||
Token::Question(0),
|
||||
Token::OpenParenthesis(1),
|
||||
Token::Absolute(2),
|
||||
Token::Dot(3),
|
||||
Token::Key(4, "c".to_string()),
|
||||
Token::Greater(5),
|
||||
Token::At(6),
|
||||
Token::Dot(7),
|
||||
Token::Key(8, "d".to_string()),
|
||||
Token::CloseParenthesis(9)
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
}
|
||||
}
|
3
src/prelude.rs
Normal file
3
src/prelude.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub use parser::prelude::*;
|
||||
pub use filter::prelude::*;
|
||||
pub use ref_value::*;
|
@ -5,8 +5,7 @@ use std::sync::Arc;
|
||||
use std::convert::Into;
|
||||
|
||||
use indexmap::map::IndexMap;
|
||||
use serde_json::Number;
|
||||
use serde_json::Value;
|
||||
use serde_json::{Number, Value};
|
||||
|
||||
pub type TypeRefValue = Arc<Box<RefValue>>;
|
||||
|
||||
@ -81,7 +80,7 @@ impl RefIndex for str {
|
||||
match *v {
|
||||
RefValue::Object(ref mut map) => {
|
||||
map.entry(self.to_owned()).or_insert(RefValueWrapper::wrap(RefValue::Null))
|
||||
},
|
||||
}
|
||||
_ => panic!("cannot access key {:?} in JSON {:?}", self, v),
|
||||
}
|
||||
}
|
||||
@ -182,7 +181,7 @@ impl RefValueWrapper {
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<RefValueWrapper> for Value {
|
||||
impl Into<RefValueWrapper> for &Value {
|
||||
fn into(self) -> RefValueWrapper {
|
||||
let ref_val = RefValueConverter::new(self);
|
||||
RefValueWrapper::new(ref_val)
|
||||
@ -275,7 +274,6 @@ impl RefValue {
|
||||
}
|
||||
|
||||
impl Into<RefValueWrapper> for RefValue {
|
||||
|
||||
fn into(self) -> RefValueWrapper {
|
||||
let wrap = RefValueWrapper::wrap(self);
|
||||
RefValueWrapper::new(wrap)
|
||||
@ -285,11 +283,11 @@ impl Into<RefValueWrapper> for RefValue {
|
||||
struct RefValueConverter;
|
||||
|
||||
impl RefValueConverter {
|
||||
fn new(value: Value) -> TypeRefValue {
|
||||
fn new(value: &Value) -> TypeRefValue {
|
||||
RefValueConverter {}.visit_value(value)
|
||||
}
|
||||
|
||||
fn visit_value(&self, value: Value) -> TypeRefValue {
|
||||
fn visit_value(&self, value: &Value) -> TypeRefValue {
|
||||
match value {
|
||||
Value::Null => self.visit_null(),
|
||||
Value::Bool(v) => self.visit_bool(v),
|
||||
@ -302,29 +300,29 @@ impl RefValueConverter {
|
||||
fn visit_null(&self) -> TypeRefValue {
|
||||
RefValueWrapper::wrap(RefValue::Null)
|
||||
}
|
||||
fn visit_bool(&self, value: bool) -> TypeRefValue {
|
||||
RefValueWrapper::wrap(RefValue::Bool(value))
|
||||
fn visit_bool(&self, value: &bool) -> TypeRefValue {
|
||||
RefValueWrapper::wrap(RefValue::Bool(*value))
|
||||
}
|
||||
fn visit_number(&self, value: serde_json::Number) -> TypeRefValue {
|
||||
RefValueWrapper::wrap(RefValue::Number(value))
|
||||
fn visit_number(&self, value: &serde_json::Number) -> TypeRefValue {
|
||||
RefValueWrapper::wrap(RefValue::Number(value.clone()))
|
||||
}
|
||||
fn visit_string(&self, value: String) -> TypeRefValue {
|
||||
fn visit_string(&self, value: &String) -> TypeRefValue {
|
||||
RefValueWrapper::wrap(RefValue::String(value.to_string()))
|
||||
}
|
||||
fn visit_array(&self, value: Vec<Value>) -> TypeRefValue {
|
||||
fn visit_array(&self, value: &Vec<Value>) -> TypeRefValue {
|
||||
let mut values = Vec::new();
|
||||
for v in value {
|
||||
values.push(self.visit_value(v));
|
||||
}
|
||||
RefValueWrapper::wrap(RefValue::Array(values))
|
||||
}
|
||||
fn visit_object(&self, mut value: serde_json::Map<String, Value>) -> TypeRefValue {
|
||||
fn visit_object(&self, value: &serde_json::Map<String, Value>) -> TypeRefValue {
|
||||
let mut map = IndexMap::new();
|
||||
let keys: Vec<String> = value.keys().into_iter().map(|k| k.to_string()).collect();
|
||||
for k in keys {
|
||||
let value = self.visit_value(match value.get_mut(&k) {
|
||||
Some(v) => v.take(),
|
||||
_ => Value::Null
|
||||
let value = self.visit_value(match value.get(&k) {
|
||||
Some(v) => v,
|
||||
_ => &Value::Null
|
||||
});
|
||||
map.insert(k, value);
|
||||
}
|
||||
|
345
tests/filter.rs
Normal file
345
tests/filter.rs
Normal file
@ -0,0 +1,345 @@
|
||||
extern crate env_logger;
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use jsonpath::prelude::*;
|
||||
|
||||
fn setup() {
|
||||
let _ = env_logger::try_init();
|
||||
}
|
||||
|
||||
fn new_value_filter(file: &str) -> ValueFilter {
|
||||
let string = read_json(file);
|
||||
let json: Value = serde_json::from_str(string.as_str()).unwrap();
|
||||
ValueFilter::new((&json).into(), false, false)
|
||||
}
|
||||
|
||||
fn do_filter(path: &str, file: &str) -> JsonValueFilter {
|
||||
let string = read_json(file);
|
||||
let mut jf = JsonValueFilter::new(string.as_str()).unwrap();
|
||||
let mut parser = Parser::new(path);
|
||||
parser.parse(&mut jf).unwrap();
|
||||
jf
|
||||
}
|
||||
|
||||
fn read_json(path: &str) -> String {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
contents
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn step_in() {
|
||||
setup();
|
||||
|
||||
let mut jf = new_value_filter("./benches/data_obj.json");
|
||||
{
|
||||
let current = jf.step_in_str("friends");
|
||||
assert_eq!(current.is_array(), true);
|
||||
}
|
||||
|
||||
let mut jf = new_value_filter("./benches/data_array.json");
|
||||
{
|
||||
let current = jf.step_in_num(&1.0);
|
||||
assert_eq!(current.get_val().is_object(), true);
|
||||
}
|
||||
{
|
||||
let current = jf.step_in_str("friends");
|
||||
assert_eq!(current.is_array(), true);
|
||||
}
|
||||
let mut jf = new_value_filter("./benches/data_obj.json");
|
||||
{
|
||||
jf.step_in_str("school");
|
||||
jf.step_in_str("friends");
|
||||
jf.step_in_all();
|
||||
let current = jf.step_in_str("name");
|
||||
let friends = json!([
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]);
|
||||
assert_eq!(friends, current.get_val().into_value());
|
||||
}
|
||||
let mut jf = new_value_filter("./benches/data_obj.json");
|
||||
{
|
||||
let current = jf.step_leaves_str("name");
|
||||
let names = json!([
|
||||
"Leonor Herman",
|
||||
"Millicent Norman",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry",
|
||||
"Vincent Cannon",
|
||||
"Gray Berry"
|
||||
]);
|
||||
assert_eq!(names, current.get_val().into_value());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array() {
|
||||
setup();
|
||||
|
||||
let friends = json!([
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]);
|
||||
|
||||
let jf = do_filter("$.school.friends[1, 2]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school.friends[1:]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school.friends[:-2]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{"id": 0, "name": "Millicent Norman"}
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..friends[2].name", "./benches/data_obj.json");
|
||||
let friends = json!(["Gray Berry", "Gray Berry"]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..friends[*].name", "./benches/data_obj.json");
|
||||
let friends = json!(["Vincent Cannon","Gray Berry","Millicent Norman","Vincent Cannon","Gray Berry"]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$['school']['friends'][*].['name']", "./benches/data_obj.json");
|
||||
let friends = json!(["Millicent Norman","Vincent Cannon","Gray Berry"]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$['school']['friends'][0].['name']", "./benches/data_obj.json");
|
||||
let friends = json!("Millicent Norman");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type() {
|
||||
setup();
|
||||
|
||||
let friends = json!({
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
});
|
||||
|
||||
let jf = do_filter("$.school", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school[?(@.friends[0])]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school[?(@.friends[10])]", "./benches/data_obj.json");
|
||||
assert_eq!(Value::Null, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school[?(1==1)]", "./benches/data_obj.json");
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.school.friends[?(1==1)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op() {
|
||||
setup();
|
||||
|
||||
let jf = do_filter("$.school[?(@.friends == @.friends)]", "./benches/data_obj.json");
|
||||
let friends = json!({
|
||||
"friends": [
|
||||
{"id": 0, "name": "Millicent Norman"},
|
||||
{"id": 1, "name": "Vincent Cannon" },
|
||||
{"id": 2, "name": "Gray Berry"}
|
||||
]
|
||||
});
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?(@.name)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{ "id" : 1, "name" : "Vincent Cannon" },
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?(@.id >= 2)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" }
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?(@.id >= 2 || @.id == 1)]", "./benches/data_obj.json");
|
||||
let friends = json!([
|
||||
{ "id" : 2, "name" : "Gray Berry" },
|
||||
{ "id" : 1, "name" : "Vincent Cannon" }
|
||||
]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.friends[?( (@.id >= 2 || @.id == 1) && @.id == 0)]", "./benches/data_obj.json");
|
||||
assert_eq!(Value::Null, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..friends[?(@.id == $.index)].id", "./benches/data_obj.json");
|
||||
let friends = json!([0, 0]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[?($.store.bicycle.price < @.price)].price", "./benches/example.json");
|
||||
let friends = json!([22.99]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[?( (@.price == 12.99 || @.category == 'reference') && @.price > 10)].price", "./benches/example.json");
|
||||
let friends = json!([12.99]);
|
||||
assert_eq!(friends, jf.current_value().into_value());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn example() {
|
||||
setup();
|
||||
|
||||
let jf = do_filter("$.store.book[*].author", "./benches/example.json");
|
||||
let ret = json!(["Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..author", "./benches/example.json");
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.store.*", "./benches/example.json");
|
||||
let ret = json!([
|
||||
[
|
||||
{"category" : "reference", "author" : "Nigel Rees","title" : "Sayings of the Century", "price" : 8.95},
|
||||
{"category" : "fiction", "author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99},
|
||||
{"category" : "fiction", "author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99},
|
||||
{"category" : "fiction", "author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99}
|
||||
],
|
||||
{"color" : "red","price" : 19.95},
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.store..price", "./benches/example.json");
|
||||
let ret = json!([8.95, 12.99, 8.99, 22.99, 19.95]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[2]", "./benches/example.json");
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[-2]", "./benches/example.json");
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[0,1]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[:2]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Evelyn Waugh",
|
||||
"title" : "Sword of Honour",
|
||||
"price" : 12.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[2:]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..book[?(@.isbn)]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "J. R. R. Tolkien",
|
||||
"title" : "The Lord of the Rings",
|
||||
"isbn" : "0-395-19395-8",
|
||||
"price" : 22.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$.store.book[?(@.price < 10)]", "./benches/example.json");
|
||||
let ret = json!([
|
||||
{
|
||||
"category" : "reference",
|
||||
"author" : "Nigel Rees",
|
||||
"title" : "Sayings of the Century",
|
||||
"price" : 8.95
|
||||
},
|
||||
{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}
|
||||
]);
|
||||
assert_eq!(ret, jf.current_value().into_value());
|
||||
|
||||
let jf = do_filter("$..*", "./benches/example.json");
|
||||
let json: Value = serde_json::from_str(read_json("./benches/giveme_every_thing_result.json").as_str()).unwrap();
|
||||
assert_eq!(json, jf.current_value().into_value());
|
||||
}
|
69
tests/lib.rs
Normal file
69
tests/lib.rs
Normal file
@ -0,0 +1,69 @@
|
||||
extern crate env_logger;
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
extern crate log;
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
fn read_json(path: &str) -> Value {
|
||||
let mut f = std::fs::File::open(path).unwrap();
|
||||
let mut contents = String::new();
|
||||
f.read_to_string(&mut contents).unwrap();
|
||||
serde_json::from_str(contents.as_str()).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile() {
|
||||
let mut template = jsonpath::compile("$..friends[2]");
|
||||
let json_obj = read_json("./benches/data_obj.json");
|
||||
let json = template(&json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
assert_eq!(json, ret);
|
||||
|
||||
let json_obj = read_json("./benches/data_array.json");
|
||||
let json = template(&json_obj).unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Rosetta Erickson"}
|
||||
]);
|
||||
assert_eq!(json, ret);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn selector() {
|
||||
let json_obj = read_json("./benches/data_obj.json");
|
||||
let mut reader = jsonpath::selector(&json_obj);
|
||||
let json = reader("$..friends[2]").unwrap();
|
||||
let ret = json!([
|
||||
{"id": 2,"name": "Gray Berry"},
|
||||
{"id": 2,"name": "Gray Berry"}
|
||||
]);
|
||||
assert_eq!(json, ret);
|
||||
|
||||
let json = reader("$..friends[0]").unwrap();
|
||||
let ret = json!([
|
||||
{"id": 0},
|
||||
{"id": 0,"name": "Millicent Norman"}
|
||||
]);
|
||||
assert_eq!(json, ret);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn select() {
|
||||
let json_obj = read_json("./benches/example.json");
|
||||
let json = jsonpath::select(&json_obj, "$..book[2]").unwrap();
|
||||
let ret = json!([{
|
||||
"category" : "fiction",
|
||||
"author" : "Herman Melville",
|
||||
"title" : "Moby Dick",
|
||||
"isbn" : "0-553-21311-3",
|
||||
"price" : 8.99
|
||||
}]);
|
||||
assert_eq!(json, ret);
|
||||
}
|
313
tests/parser.rs
Normal file
313
tests/parser.rs
Normal file
@ -0,0 +1,313 @@
|
||||
extern crate env_logger;
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
|
||||
use std::result;
|
||||
|
||||
use jsonpath::prelude::*;
|
||||
|
||||
struct NodeVisitorTestImpl<'a> {
|
||||
input: &'a str,
|
||||
stack: Vec<ParseToken>,
|
||||
}
|
||||
|
||||
impl<'a> NodeVisitorTestImpl<'a> {
|
||||
fn new(input: &'a str) -> Self {
|
||||
NodeVisitorTestImpl { input, stack: Vec::new() }
|
||||
}
|
||||
|
||||
fn visit(&mut self) -> result::Result<Vec<ParseToken>, String> {
|
||||
let mut parser = Parser::new(self.input);
|
||||
parser.parse(self)?;
|
||||
Ok(self.stack.split_off(0))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> NodeVisitor for NodeVisitorTestImpl<'a> {
|
||||
fn visit_token(&mut self, token: ParseToken) {
|
||||
self.stack.push(token);
|
||||
}
|
||||
}
|
||||
|
||||
fn setup() {
|
||||
let _ = env_logger::try_init();
|
||||
}
|
||||
|
||||
fn run(input: &str) -> result::Result<Vec<ParseToken>, String> {
|
||||
let mut interpreter = NodeVisitorTestImpl::new(input);
|
||||
interpreter.visit()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_path() {
|
||||
setup();
|
||||
|
||||
assert_eq!(run("$.aa"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("aa".to_owned())
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.00.a"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("00".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("a".to_owned())
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.00.韓창.seok"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("00".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("韓창".to_owned()),
|
||||
ParseToken::In,
|
||||
ParseToken::Key("seok".to_owned())
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.*"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::All
|
||||
]));
|
||||
|
||||
assert_eq!(run("$..*"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Leaves,
|
||||
ParseToken::All
|
||||
]));
|
||||
|
||||
assert_eq!(run("$..[0]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Leaves,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(0.0),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
match run("$.") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$..") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$. a") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_array_sytax() {
|
||||
setup();
|
||||
|
||||
assert_eq!(run("$.book[?(@.isbn)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("book".to_string()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative,
|
||||
ParseToken::In,
|
||||
ParseToken::Key("isbn".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
//
|
||||
// Array도 컨텍스트 In으로 간주 할거라서 중첩되면 하나만
|
||||
//
|
||||
assert_eq!(run("$.[*]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[*]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[*].가"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::In, ParseToken::Key("가".to_owned())
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[0][1]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(0_f64),
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1_f64),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[1,2]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Union(vec![1, 2]),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[10:]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(Some(10), None),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[:11]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(None, Some(11)),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[-12:13]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Range(Some(-12), Some(13)),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[?(1>2)]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$.a[?($.b>3)]"), Ok(vec![
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("b".to_owned()), ParseToken::Number(3_f64), ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$[?($.c>@.d && 1==2)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::Filter(FilterToken::And),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$[?($.c>@.d&&(1==2||3>=4))]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Absolute, ParseToken::In, ParseToken::Key("c".to_owned()),
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("d".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Greater),
|
||||
ParseToken::Number(1_f64), ParseToken::Number(2_f64), ParseToken::Filter(FilterToken::Equal),
|
||||
ParseToken::Number(3_f64), ParseToken::Number(4_f64), ParseToken::Filter(FilterToken::GreaterOrEqual),
|
||||
ParseToken::Filter(FilterToken::Or),
|
||||
ParseToken::Filter(FilterToken::And),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$[?(@.a<@.b)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("a".to_owned()),
|
||||
ParseToken::Relative, ParseToken::In, ParseToken::Key("b".to_owned()),
|
||||
ParseToken::Filter(FilterToken::Little),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$[*][*][*]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::All,
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
assert_eq!(run("$['a']['bb']"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("a".to_string()),
|
||||
ParseToken::ArrayEof,
|
||||
ParseToken::Array,
|
||||
ParseToken::Key("bb".to_string()),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
match run("$[") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[a]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?($.a)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(@.a > @.b]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(@.a < @.b&&(@.c < @.d)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_array_float() {
|
||||
setup();
|
||||
|
||||
assert_eq!(run("$[?(1.1<2.1)]"), Ok(vec![
|
||||
ParseToken::Absolute,
|
||||
ParseToken::Array,
|
||||
ParseToken::Number(1.1), ParseToken::Number(2.1), ParseToken::Filter(FilterToken::Little),
|
||||
ParseToken::ArrayEof
|
||||
]));
|
||||
|
||||
match run("$[1.1]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(1.1<.2)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(1.1<2.)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match run("$[?(1.1<2.a)]") {
|
||||
Ok(_) => panic!(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
194
tests/tokenizer.rs
Normal file
194
tests/tokenizer.rs
Normal file
@ -0,0 +1,194 @@
|
||||
extern crate jsonpath_lib as jsonpath;
|
||||
|
||||
use jsonpath::prelude::*;
|
||||
|
||||
fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
|
||||
let mut tokenizer = Tokenizer::new(input);
|
||||
let mut vec = vec![];
|
||||
loop {
|
||||
match tokenizer.next_token() {
|
||||
Ok(t) => vec.push(t),
|
||||
Err(e) => return (vec, Some(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn run(input: &str, expected: (Vec<Token>, Option<TokenError>)) {
|
||||
let (vec, err) = collect_token(input.clone());
|
||||
assert_eq!((vec, err), expected, "\"{}\"", input);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn peek() {
|
||||
let mut tokenizer = PreloadedTokenizer::new("$.a");
|
||||
match tokenizer.next_token() {
|
||||
Ok(t) => assert_eq!(Token::Absolute(0), t),
|
||||
_ => panic!()
|
||||
}
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
||||
_ => panic!()
|
||||
}
|
||||
|
||||
match tokenizer.peek_token() {
|
||||
Ok(t) => assert_eq!(&Token::Dot(1), t),
|
||||
_ => panic!()
|
||||
}
|
||||
|
||||
match tokenizer.next_token() {
|
||||
Ok(t) => assert_eq!(Token::Dot(1), t),
|
||||
_ => panic!()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn token() {
|
||||
run("$.01.a",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Key(2, "01".to_string()),
|
||||
Token::Dot(4),
|
||||
Token::Key(5, "a".to_string())
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("$. []",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Whitespace(2, 2),
|
||||
Token::OpenArray(5),
|
||||
Token::CloseArray(6)
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("$..",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Dot(2),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("$..ab",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Dot(2),
|
||||
Token::Key(3, "ab".to_string())
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("$..가 [",
|
||||
(
|
||||
vec![
|
||||
Token::Absolute(0),
|
||||
Token::Dot(1),
|
||||
Token::Dot(2),
|
||||
Token::Key(3, "가".to_string()),
|
||||
Token::Whitespace(6, 0),
|
||||
Token::OpenArray(7),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("[-1, 2 ]",
|
||||
(
|
||||
vec![
|
||||
Token::OpenArray(0),
|
||||
Token::Key(1, "-1".to_string()),
|
||||
Token::Comma(3),
|
||||
Token::Whitespace(4, 0),
|
||||
Token::Key(5, "2".to_string()),
|
||||
Token::Whitespace(6, 0),
|
||||
Token::CloseArray(7),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("[ 1 2 , 3 \"abc\" : -10 ]",
|
||||
(
|
||||
vec![
|
||||
Token::OpenArray(0),
|
||||
Token::Whitespace(1, 0),
|
||||
Token::Key(2, "1".to_string()),
|
||||
Token::Whitespace(3, 0),
|
||||
Token::Key(4, "2".to_string()),
|
||||
Token::Whitespace(5, 0),
|
||||
Token::Comma(6),
|
||||
Token::Whitespace(7, 0),
|
||||
Token::Key(8, "3".to_string()),
|
||||
Token::Whitespace(9, 0),
|
||||
Token::DoubleQuoted(10, "abc".to_string()),
|
||||
Token::Whitespace(15, 0),
|
||||
Token::Split(16),
|
||||
Token::Whitespace(17, 0),
|
||||
Token::Key(18, "-10".to_string()),
|
||||
Token::Whitespace(21, 0),
|
||||
Token::CloseArray(22),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("?(@.a가 <41.01)",
|
||||
(
|
||||
vec![
|
||||
Token::Question(0),
|
||||
Token::OpenParenthesis(1),
|
||||
Token::At(2),
|
||||
Token::Dot(3),
|
||||
Token::Key(4, "a가".to_string()),
|
||||
Token::Whitespace(8, 0),
|
||||
Token::Little(9),
|
||||
Token::Key(10, "41".to_string()),
|
||||
Token::Dot(12),
|
||||
Token::Key(13, "01".to_string()),
|
||||
Token::CloseParenthesis(15),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("?(@.a <4a.01)",
|
||||
(
|
||||
vec![
|
||||
Token::Question(0),
|
||||
Token::OpenParenthesis(1),
|
||||
Token::At(2),
|
||||
Token::Dot(3),
|
||||
Token::Key(4, "a".to_string()),
|
||||
Token::Whitespace(5, 0),
|
||||
Token::Little(6),
|
||||
Token::Key(7, "4a".to_string()),
|
||||
Token::Dot(9),
|
||||
Token::Key(10, "01".to_string()),
|
||||
Token::CloseParenthesis(12),
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
|
||||
run("?($.c>@.d)", (
|
||||
vec![
|
||||
Token::Question(0),
|
||||
Token::OpenParenthesis(1),
|
||||
Token::Absolute(2),
|
||||
Token::Dot(3),
|
||||
Token::Key(4, "c".to_string()),
|
||||
Token::Greater(5),
|
||||
Token::At(6),
|
||||
Token::Dot(7),
|
||||
Token::Key(8, "d".to_string()),
|
||||
Token::CloseParenthesis(9)
|
||||
]
|
||||
, Some(TokenError::Eof)
|
||||
));
|
||||
}
|
@ -2,11 +2,8 @@
|
||||
name = "jsonpath-wasm"
|
||||
version = "0.1.3"
|
||||
authors = ["Changseok Han <freestrings@gmail.com>"]
|
||||
|
||||
description = "JsonPath Webassembly version compiled by Rust - Demo: https://freestrings.github.io/jsonpath"
|
||||
|
||||
keywords = ["library", "jsonpath", "json", "webassembly"]
|
||||
|
||||
repository = "https://github.com/freestrings/jsonpath"
|
||||
license = "MIT"
|
||||
|
||||
|
@ -1,79 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# project_root/wasm
|
||||
DIR="$(pwd)"
|
||||
|
||||
cd "${DIR}"/www && \
|
||||
rm -rf "${DIR}"/www/dist && \
|
||||
rm -rf "${DIR}"/www/node_modules && \
|
||||
rm -rf "${DIR}"/www_bench/dist && \
|
||||
rm -rf "${DIR}"/www_bench/node_modules && \
|
||||
npm install && \
|
||||
cd "${DIR}"
|
||||
|
||||
echo "-------------------- start build nodejs pkg --------------------"
|
||||
echo
|
||||
|
||||
rm -rf "${DIR}"/wasm/nodejs_pkg && \
|
||||
wasm-pack build --target=nodejs --scope nodejs --out-dir nodejs_pkg && \
|
||||
cd "${DIR}"/nodejs_pkg && npm link && \
|
||||
rm -rf "${DIR}"/../benches/javascript/node_modules && \
|
||||
cd "${DIR}"/../benches/javascript && npm install && \
|
||||
npm link @nodejs/jsonpath-wasm
|
||||
echo "-------------------- build nodejs pkg done --------------------"
|
||||
|
||||
cd "${DIR}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo "-------------------- start build browser pkg --------------------"
|
||||
echo
|
||||
rm -rf "${DIR}"/wasm/browser_pkg && \
|
||||
wasm-pack build --target=browser --scope browser --out-dir browser_pkg && \
|
||||
cd "${DIR}"/browser_pkg && npm link && \
|
||||
cd "${DIR}"/www && npm link @browser/jsonpath-wasm
|
||||
echo "-------------------- build browser pkg done --------------------"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo "-------------------- start build browser bench pkg --------------------"
|
||||
echo
|
||||
rm -rf "${DIR}"/www_bench/node_modules && \
|
||||
cd "${DIR}"/www_bench && npm install && npm link @browser/jsonpath-wasm
|
||||
echo "-------------------- build browser bench pkg done --------------------"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo "-------------------- start build docs --------------------"
|
||||
cd "${DIR}"/www && \
|
||||
npm run build && \
|
||||
rm -f "${DIR}"/../docs/*.js && rm -f "${DIR}"/../docs/*.wasm && rm -f "${DIR}"/../docs/*.html && \
|
||||
cp "${DIR}"/www/dist/*.* "${DIR}"/../docs/
|
||||
echo "-------------------- build docs done --------------------"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
echo "-------------------- start build docs bench --------------------"
|
||||
cd "${DIR}"/www_bench && \
|
||||
npm run build && \
|
||||
rm -f "${DIR}"/../docs/bench/*.js && rm -f "${DIR}"/../docs/bench/*.wasm && rm -f "${DIR}"/../docs/bench/*.html && \
|
||||
cp "${DIR}"/www_bench/dist/*.* "${DIR}"/../docs/bench/
|
||||
echo "-------------------- build docs bench done --------------------"
|
@ -11,9 +11,7 @@ use std::result::Result;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use cfg_if::cfg_if;
|
||||
use jsonpath::filter::value_filter::*;
|
||||
use jsonpath::parser::parser::*;
|
||||
use jsonpath::ref_value::*;
|
||||
use jsonpath::prelude::*;
|
||||
use serde_json::Value;
|
||||
use wasm_bindgen::prelude::*;
|
||||
use web_sys::console;
|
||||
@ -54,7 +52,7 @@ fn into_serde_json(js_value: &JsValue) -> Result<Value, String> {
|
||||
|
||||
fn into_ref_value(js_value: &JsValue, node: Node) -> JsValue {
|
||||
match into_serde_json(js_value) {
|
||||
Ok(json) => filter_ref_value(json.into(), node),
|
||||
Ok(json) => filter_ref_value((&json).into(), node),
|
||||
Err(e) => JsValue::from_str(&format!("Json serialize error: {}", e))
|
||||
}
|
||||
}
|
||||
@ -87,7 +85,7 @@ pub fn alloc_json(js_value: JsValue) -> usize {
|
||||
|
||||
let mut idx = CACHE_JSON_IDX.lock().unwrap();
|
||||
*idx += 1;
|
||||
map.insert(*idx, json.into());
|
||||
map.insert(*idx, (&json).into());
|
||||
*idx
|
||||
}
|
||||
Err(e) => {
|
||||
@ -138,7 +136,7 @@ pub fn selector(js_value: JsValue) -> JsValue {
|
||||
}
|
||||
_ => {
|
||||
match into_serde_json(&js_value) {
|
||||
Ok(json) => json.into(),
|
||||
Ok(json) => (&json).into(),
|
||||
Err(e) => return JsValue::from_str(e.as_str())
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user