mirror of
https://github.com/fluencelabs/aquavm
synced 2025-07-02 16:11:33 +00:00
Support numbers and booleans (#64)
This commit is contained in:
7
Cargo.lock
generated
7
Cargo.lock
generated
@ -11,7 +11,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "air-parser"
|
name = "air-parser"
|
||||||
version = "0.3.0"
|
version = "0.4.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"codespan",
|
"codespan",
|
||||||
"codespan-reporting",
|
"codespan-reporting",
|
||||||
@ -21,6 +21,7 @@ dependencies = [
|
|||||||
"lalrpop-util",
|
"lalrpop-util",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde_json",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -57,7 +58,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aquamarine"
|
name = "aquamarine"
|
||||||
version = "0.4.1"
|
version = "0.5.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fluence",
|
"fluence",
|
||||||
"log",
|
"log",
|
||||||
@ -1693,7 +1694,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "stepper-lib"
|
name = "stepper-lib"
|
||||||
version = "0.4.1"
|
version = "0.5.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"air-parser",
|
"air-parser",
|
||||||
"aqua-test-utils",
|
"aqua-test-utils",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "air-parser"
|
name = "air-parser"
|
||||||
version = "0.3.0"
|
version = "0.4.0"
|
||||||
authors = ["Fluence Labs"]
|
authors = ["Fluence Labs"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
@ -16,6 +16,7 @@ codespan-reporting = "0.9.5"
|
|||||||
|
|
||||||
# TODO: hide serde behind a feature
|
# TODO: hide serde behind a feature
|
||||||
serde = { version = "=1.0.118", features = ["rc"] }
|
serde = { version = "=1.0.118", features = ["rc"] }
|
||||||
|
serde_json = "=1.0.61"
|
||||||
|
|
||||||
thiserror = "1.0.23"
|
thiserror = "1.0.23"
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ use crate::parser::ast::*;
|
|||||||
use crate::parser::into_variable_and_path;
|
use crate::parser::into_variable_and_path;
|
||||||
use crate::parser::lexer::LexerError;
|
use crate::parser::lexer::LexerError;
|
||||||
use crate::parser::lexer::Token;
|
use crate::parser::lexer::Token;
|
||||||
|
use crate::parser::lexer::Number;
|
||||||
|
|
||||||
use lalrpop_util::ErrorRecovery;
|
use lalrpop_util::ErrorRecovery;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
@ -86,6 +87,8 @@ CallInstrArgValue: CallInstrArgValue<'input> = {
|
|||||||
let (variable, path) = into_variable_and_path(v.0, v.1);
|
let (variable, path) = into_variable_and_path(v.0, v.1);
|
||||||
CallInstrArgValue::JsonPath { variable, path }
|
CallInstrArgValue::JsonPath { variable, path }
|
||||||
},
|
},
|
||||||
|
<n:Number> => CallInstrArgValue::Number(n),
|
||||||
|
<b:Boolean> => CallInstrArgValue::Boolean(b),
|
||||||
InitPeerId => CallInstrArgValue::InitPeerId,
|
InitPeerId => CallInstrArgValue::InitPeerId,
|
||||||
LastError => CallInstrArgValue::LastError,
|
LastError => CallInstrArgValue::LastError,
|
||||||
}
|
}
|
||||||
@ -121,6 +124,8 @@ extern {
|
|||||||
Literal => Token::StringLiteral(<&'input str>),
|
Literal => Token::StringLiteral(<&'input str>),
|
||||||
JsonPath => Token::JsonPath(<&'input str>, <usize>),
|
JsonPath => Token::JsonPath(<&'input str>, <usize>),
|
||||||
Accumulator => Token::Accumulator(<&'input str>),
|
Accumulator => Token::Accumulator(<&'input str>),
|
||||||
|
Number => Token::Number(<Number>),
|
||||||
|
Boolean => Token::Boolean(<bool>),
|
||||||
|
|
||||||
InitPeerId => Token::InitPeerId,
|
InitPeerId => Token::InitPeerId,
|
||||||
LastError => Token::LastError,
|
LastError => Token::LastError,
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -126,8 +126,26 @@ fn lexical_error_to_label(file_id: usize, error: LexerError) -> Label<usize> {
|
|||||||
EmptyAccName(start, end) => {
|
EmptyAccName(start, end) => {
|
||||||
Label::primary(file_id, start..end).with_message(error.to_string())
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
}
|
}
|
||||||
|
EmptyVariableOrConst(start, end) => {
|
||||||
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
|
}
|
||||||
InvalidJsonPath(start, end) => {
|
InvalidJsonPath(start, end) => {
|
||||||
Label::primary(file_id, start..end).with_message(error.to_string())
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
}
|
}
|
||||||
|
UnallowedCharInNumber(start, end) => {
|
||||||
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
|
}
|
||||||
|
ParseIntError(start, end, _) => {
|
||||||
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
|
}
|
||||||
|
ParseFloatError(start, end, _) => {
|
||||||
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
|
}
|
||||||
|
TooBigFloat(start, end) => {
|
||||||
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
|
}
|
||||||
|
LeadingDot(start, end) => {
|
||||||
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,18 +1,4 @@
|
|||||||
/*
|
pub use crate::parser::lexer::Number;
|
||||||
* Copyright 2020 Fluence Labs Limited
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
@ -20,7 +6,7 @@ use serde::Serialize;
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
#[allow(clippy::large_enum_variant)] // for Null and Error variants
|
#[allow(clippy::large_enum_variant)] // for Null and Error variants
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub enum Instruction<'i> {
|
pub enum Instruction<'i> {
|
||||||
Null(Null),
|
Null(Null),
|
||||||
Call(Call<'i>),
|
Call(Call<'i>),
|
||||||
@ -34,19 +20,19 @@ pub enum Instruction<'i> {
|
|||||||
Error,
|
Error,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub enum PeerPart<'i> {
|
pub enum PeerPart<'i> {
|
||||||
PeerPk(CallInstrValue<'i>),
|
PeerPk(CallInstrValue<'i>),
|
||||||
PeerPkWithServiceId(CallInstrValue<'i>, CallInstrValue<'i>),
|
PeerPkWithServiceId(CallInstrValue<'i>, CallInstrValue<'i>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub enum FunctionPart<'i> {
|
pub enum FunctionPart<'i> {
|
||||||
FuncName(CallInstrValue<'i>),
|
FuncName(CallInstrValue<'i>),
|
||||||
ServiceIdWithFuncName(CallInstrValue<'i>, CallInstrValue<'i>),
|
ServiceIdWithFuncName(CallInstrValue<'i>, CallInstrValue<'i>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub struct Call<'i> {
|
pub struct Call<'i> {
|
||||||
pub peer_part: PeerPart<'i>,
|
pub peer_part: PeerPart<'i>,
|
||||||
pub function_part: FunctionPart<'i>,
|
pub function_part: FunctionPart<'i>,
|
||||||
@ -54,7 +40,7 @@ pub struct Call<'i> {
|
|||||||
pub output: CallOutputValue<'i>,
|
pub output: CallOutputValue<'i>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Hash, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
pub enum CallInstrValue<'i> {
|
pub enum CallInstrValue<'i> {
|
||||||
InitPeerId,
|
InitPeerId,
|
||||||
Literal(&'i str),
|
Literal(&'i str),
|
||||||
@ -62,67 +48,71 @@ pub enum CallInstrValue<'i> {
|
|||||||
JsonPath { variable: &'i str, path: &'i str },
|
JsonPath { variable: &'i str, path: &'i str },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Hash, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
pub enum CallInstrArgValue<'i> {
|
pub enum CallInstrArgValue<'i> {
|
||||||
InitPeerId,
|
InitPeerId,
|
||||||
LastError,
|
LastError,
|
||||||
Literal(&'i str),
|
Literal(&'i str),
|
||||||
|
Number(Number),
|
||||||
|
Boolean(bool),
|
||||||
Variable(&'i str),
|
Variable(&'i str),
|
||||||
JsonPath { variable: &'i str, path: &'i str },
|
JsonPath { variable: &'i str, path: &'i str },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Hash, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
pub enum IterableValue<'i> {
|
pub enum IterableValue<'i> {
|
||||||
Variable(&'i str),
|
Variable(&'i str),
|
||||||
JsonPath { variable: &'i str, path: &'i str },
|
JsonPath { variable: &'i str, path: &'i str },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Hash, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
pub enum MatchableValue<'i> {
|
pub enum MatchableValue<'i> {
|
||||||
Literal(&'i str),
|
Literal(&'i str),
|
||||||
|
Number(Number),
|
||||||
|
Boolean(bool),
|
||||||
Variable(&'i str),
|
Variable(&'i str),
|
||||||
JsonPath { variable: &'i str, path: &'i str },
|
JsonPath { variable: &'i str, path: &'i str },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, Hash, PartialEq, Eq, Clone)]
|
#[derive(Serialize, Debug, PartialEq, Clone)]
|
||||||
pub enum CallOutputValue<'i> {
|
pub enum CallOutputValue<'i> {
|
||||||
Scalar(&'i str),
|
Scalar(&'i str),
|
||||||
Accumulator(&'i str),
|
Accumulator(&'i str),
|
||||||
None,
|
None,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub struct Seq<'i>(pub Box<Instruction<'i>>, pub Box<Instruction<'i>>);
|
pub struct Seq<'i>(pub Box<Instruction<'i>>, pub Box<Instruction<'i>>);
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub struct Par<'i>(pub Box<Instruction<'i>>, pub Box<Instruction<'i>>);
|
pub struct Par<'i>(pub Box<Instruction<'i>>, pub Box<Instruction<'i>>);
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub struct Xor<'i>(pub Box<Instruction<'i>>, pub Box<Instruction<'i>>);
|
pub struct Xor<'i>(pub Box<Instruction<'i>>, pub Box<Instruction<'i>>);
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub struct Match<'i> {
|
pub struct Match<'i> {
|
||||||
pub left_value: MatchableValue<'i>,
|
pub left_value: MatchableValue<'i>,
|
||||||
pub right_value: MatchableValue<'i>,
|
pub right_value: MatchableValue<'i>,
|
||||||
pub instruction: Box<Instruction<'i>>,
|
pub instruction: Box<Instruction<'i>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub struct MisMatch<'i> {
|
pub struct MisMatch<'i> {
|
||||||
pub left_value: MatchableValue<'i>,
|
pub left_value: MatchableValue<'i>,
|
||||||
pub right_value: MatchableValue<'i>,
|
pub right_value: MatchableValue<'i>,
|
||||||
pub instruction: Box<Instruction<'i>>,
|
pub instruction: Box<Instruction<'i>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub struct Fold<'i> {
|
pub struct Fold<'i> {
|
||||||
pub iterable: IterableValue<'i>,
|
pub iterable: IterableValue<'i>,
|
||||||
pub iterator: &'i str,
|
pub iterator: &'i str,
|
||||||
pub instruction: Rc<Instruction<'i>>,
|
pub instruction: Rc<Instruction<'i>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub struct Next<'i>(pub &'i str);
|
pub struct Next<'i>(pub &'i str);
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Debug, PartialEq)]
|
||||||
pub struct Null;
|
pub struct Null;
|
||||||
|
@ -15,7 +15,9 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
use super::errors::LexerError;
|
use super::errors::LexerError;
|
||||||
|
use super::is_aqua_alphanumeric;
|
||||||
use super::token::Token;
|
use super::token::Token;
|
||||||
|
use super::LexerResult;
|
||||||
|
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
use std::str::CharIndices;
|
use std::str::CharIndices;
|
||||||
@ -169,7 +171,7 @@ fn should_stop(ch: char, round_brackets_balance: i64, open_square_brackets_balan
|
|||||||
ch.is_whitespace() || round_brackets_balance < 0 || open_square_brackets_balance < 0
|
ch.is_whitespace() || round_brackets_balance < 0 || open_square_brackets_balance < 0
|
||||||
}
|
}
|
||||||
|
|
||||||
fn string_to_token(input: &str, start_pos: usize) -> Result<Token, LexerError> {
|
fn string_to_token(input: &str, start_pos: usize) -> LexerResult<Token> {
|
||||||
match input {
|
match input {
|
||||||
"" => Err(LexerError::EmptyString(start_pos, start_pos)),
|
"" => Err(LexerError::EmptyString(start_pos, start_pos)),
|
||||||
|
|
||||||
@ -186,12 +188,15 @@ fn string_to_token(input: &str, start_pos: usize) -> Result<Token, LexerError> {
|
|||||||
INIT_PEER_ID => Ok(Token::InitPeerId),
|
INIT_PEER_ID => Ok(Token::InitPeerId),
|
||||||
LAST_ERROR => Ok(Token::LastError),
|
LAST_ERROR => Ok(Token::LastError),
|
||||||
|
|
||||||
|
TRUE_VALUE => Ok(Token::Boolean(true)),
|
||||||
|
FALSE_VALUE => Ok(Token::Boolean(false)),
|
||||||
|
|
||||||
str if str.ends_with(ACC_END_TAG) => try_parse_accumulator(str, start_pos),
|
str if str.ends_with(ACC_END_TAG) => try_parse_accumulator(str, start_pos),
|
||||||
str => try_parse_call_variable(str, start_pos),
|
str => super::call_variable_parser::try_parse_call_variable(str, start_pos),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_parse_accumulator(maybe_acc: &str, start: usize) -> Result<Token, LexerError> {
|
fn try_parse_accumulator(maybe_acc: &str, start: usize) -> LexerResult<Token> {
|
||||||
const ACC_END_TAG_SIZE: usize = 2;
|
const ACC_END_TAG_SIZE: usize = 2;
|
||||||
|
|
||||||
let str_len = maybe_acc.len();
|
let str_len = maybe_acc.len();
|
||||||
@ -211,25 +216,6 @@ fn try_parse_accumulator(maybe_acc: &str, start: usize) -> Result<Token, LexerEr
|
|||||||
Ok(Token::Accumulator(maybe_acc))
|
Ok(Token::Accumulator(maybe_acc))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_parse_call_variable(maybe_var: &str, start: usize) -> Result<Token, LexerError> {
|
|
||||||
let mut json_path_start_pos = None;
|
|
||||||
|
|
||||||
for (pos, ch) in maybe_var.chars().enumerate() {
|
|
||||||
if !json_path_started(json_path_start_pos) && is_json_path_start_point(ch) {
|
|
||||||
json_path_start_pos = Some(pos);
|
|
||||||
} else if !json_path_started(json_path_start_pos) && !is_aqua_alphanumeric(ch) {
|
|
||||||
return Err(LexerError::IsNotAlphanumeric(start + pos, start + pos));
|
|
||||||
} else if json_path_started(json_path_start_pos) & !json_path_allowed_char(ch) {
|
|
||||||
return Err(LexerError::InvalidJsonPath(start + pos, start + pos));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match json_path_start_pos {
|
|
||||||
Some(pos) => Ok(Token::JsonPath(maybe_var, pos)),
|
|
||||||
None => Ok(Token::Alphanumeric(maybe_var)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const CALL_INSTR: &str = "call";
|
const CALL_INSTR: &str = "call";
|
||||||
const SEQ_INSTR: &str = "seq";
|
const SEQ_INSTR: &str = "seq";
|
||||||
const PAR_INSTR: &str = "par";
|
const PAR_INSTR: &str = "par";
|
||||||
@ -243,40 +229,7 @@ const MISMATCH_INSTR: &str = "mismatch";
|
|||||||
const INIT_PEER_ID: &str = "%init_peer_id%";
|
const INIT_PEER_ID: &str = "%init_peer_id%";
|
||||||
const LAST_ERROR: &str = "%last_error%";
|
const LAST_ERROR: &str = "%last_error%";
|
||||||
|
|
||||||
|
const TRUE_VALUE: &str = "true";
|
||||||
|
const FALSE_VALUE: &str = "false";
|
||||||
|
|
||||||
const ACC_END_TAG: &str = "[]";
|
const ACC_END_TAG: &str = "[]";
|
||||||
|
|
||||||
fn is_json_path_start_point(ch: char) -> bool {
|
|
||||||
ch == '.'
|
|
||||||
}
|
|
||||||
|
|
||||||
fn json_path_started(first_dot_pos: Option<usize>) -> bool {
|
|
||||||
first_dot_pos.is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn json_path_allowed_char(ch: char) -> bool {
|
|
||||||
// we don't have spec for json path now, but some possible example could be found here
|
|
||||||
// https://packagist.org/packages/softcreatr/jsonpath
|
|
||||||
|
|
||||||
// good old switch faster here than hash set
|
|
||||||
match ch {
|
|
||||||
'$' => true,
|
|
||||||
'@' => true,
|
|
||||||
'[' => true,
|
|
||||||
']' => true,
|
|
||||||
'(' => true,
|
|
||||||
')' => true,
|
|
||||||
':' => true,
|
|
||||||
'?' => true,
|
|
||||||
'.' => true,
|
|
||||||
'*' => true,
|
|
||||||
',' => true,
|
|
||||||
'"' => true,
|
|
||||||
'\'' => true,
|
|
||||||
'!' => true,
|
|
||||||
ch => is_aqua_alphanumeric(ch),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_aqua_alphanumeric(ch: char) -> bool {
|
|
||||||
ch.is_alphanumeric() || ch == '_' || ch == '-'
|
|
||||||
}
|
|
||||||
|
262
crates/air-parser/src/parser/lexer/call_variable_parser.rs
Normal file
262
crates/air-parser/src/parser/lexer/call_variable_parser.rs
Normal file
@ -0,0 +1,262 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Fluence Labs Limited
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
use super::LexerError;
|
||||||
|
use super::LexerResult;
|
||||||
|
use super::Token;
|
||||||
|
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use std::iter::Peekable;
|
||||||
|
use std::str::CharIndices;
|
||||||
|
|
||||||
|
pub(super) fn try_parse_call_variable(
|
||||||
|
string_to_parse: &str,
|
||||||
|
start_pos: usize,
|
||||||
|
) -> LexerResult<Token<'_>> {
|
||||||
|
CallVariableParser::try_parse(string_to_parse, start_pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct ParserState {
|
||||||
|
pub(self) first_dot_met_pos: Option<usize>,
|
||||||
|
pub(self) non_numeric_met: bool,
|
||||||
|
pub(self) digit_met: bool,
|
||||||
|
pub(self) is_first_char: bool,
|
||||||
|
pub(self) current_char: char,
|
||||||
|
pub(self) current_pos: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CallVariableParser<'input> {
|
||||||
|
string_to_parse_iter: Peekable<CharIndices<'input>>,
|
||||||
|
string_to_parse: &'input str,
|
||||||
|
start_pos: usize,
|
||||||
|
state: ParserState,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'input> CallVariableParser<'input> {
|
||||||
|
fn new(string_to_parse: &'input str, start_pos: usize) -> LexerResult<Self> {
|
||||||
|
let mut string_to_parse_iter = string_to_parse.char_indices().peekable();
|
||||||
|
let (current_pos, current_char) = match string_to_parse_iter.next() {
|
||||||
|
Some(pos_and_ch) => pos_and_ch,
|
||||||
|
None => return Err(LexerError::EmptyVariableOrConst(start_pos, start_pos)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let state = ParserState {
|
||||||
|
first_dot_met_pos: None,
|
||||||
|
non_numeric_met: false,
|
||||||
|
digit_met: false,
|
||||||
|
is_first_char: true,
|
||||||
|
current_char,
|
||||||
|
current_pos,
|
||||||
|
};
|
||||||
|
|
||||||
|
let parser = Self {
|
||||||
|
string_to_parse_iter,
|
||||||
|
string_to_parse,
|
||||||
|
start_pos,
|
||||||
|
state,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(parser)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(self) fn try_parse(
|
||||||
|
string_to_parse: &'input str,
|
||||||
|
start_pos: usize,
|
||||||
|
) -> LexerResult<Token<'input>> {
|
||||||
|
let mut parser = Self::new(string_to_parse, start_pos)?;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if parser.is_possible_to_parse_as_number() {
|
||||||
|
parser.try_parse_as_number()?;
|
||||||
|
} else {
|
||||||
|
parser.try_parse_as_variable()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !parser.next_char() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parser.to_token()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_char(&mut self) -> bool {
|
||||||
|
let (pos, ch) = match self.string_to_parse_iter.next() {
|
||||||
|
Some(pos_and_ch) => pos_and_ch,
|
||||||
|
None => return false,
|
||||||
|
};
|
||||||
|
|
||||||
|
self.state.current_char = ch;
|
||||||
|
self.state.current_pos = pos;
|
||||||
|
self.state.is_first_char = false;
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_possible_to_parse_as_number(&self) -> bool {
|
||||||
|
!self.state.non_numeric_met
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_parse_as_number(&mut self) -> LexerResult<()> {
|
||||||
|
if self.try_parse_as_sign() || self.try_parse_as_digit() || self.try_parse_as_float_dot()? {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.handle_non_digit()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_parse_as_sign(&self) -> bool {
|
||||||
|
let ch = self.current_char();
|
||||||
|
self.state.is_first_char && (ch == '-' || ch == '+')
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_parse_as_digit(&mut self) -> bool {
|
||||||
|
if self.current_char().is_numeric() {
|
||||||
|
self.state.digit_met = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_parse_as_float_dot(&mut self) -> LexerResult<bool> {
|
||||||
|
let is_first_dot = self.try_parse_first_met_dot()?;
|
||||||
|
|
||||||
|
// filter out +.12 -.2315 variants
|
||||||
|
if is_first_dot && !self.state.digit_met {
|
||||||
|
let error_pos = self.pos_in_string_to_parse();
|
||||||
|
return Err(LexerError::LeadingDot(error_pos, error_pos));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(is_first_dot)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_non_digit(&mut self) -> LexerResult<()> {
|
||||||
|
self.check_fallback_to_variable()?;
|
||||||
|
|
||||||
|
self.state.non_numeric_met = true;
|
||||||
|
self.try_parse_as_variable()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_fallback_to_variable(&self) -> LexerResult<()> {
|
||||||
|
if self.dot_met() {
|
||||||
|
let error_pos = self.pos_in_string_to_parse();
|
||||||
|
return Err(LexerError::UnallowedCharInNumber(error_pos, error_pos));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_parse_as_variable(&mut self) -> LexerResult<()> {
|
||||||
|
if self.try_parse_as_json_path_start()? {
|
||||||
|
return Ok(());
|
||||||
|
} else if self.is_json_path_started() {
|
||||||
|
self.try_parse_as_json_path()?;
|
||||||
|
} else {
|
||||||
|
self.try_parse_as_alphanumeric()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_parse_as_json_path_start(&mut self) -> LexerResult<bool> {
|
||||||
|
self.try_parse_first_met_dot()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_parse_as_alphanumeric(&self) -> LexerResult<()> {
|
||||||
|
if !self.aqua_alphanumeric() {
|
||||||
|
let error_pos = self.pos_in_string_to_parse();
|
||||||
|
return Err(LexerError::IsNotAlphanumeric(error_pos, error_pos));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_parse_as_json_path(&self) -> LexerResult<()> {
|
||||||
|
if !self.json_path_allowed_char() {
|
||||||
|
let error_pos = self.pos_in_string_to_parse();
|
||||||
|
return Err(LexerError::InvalidJsonPath(error_pos, error_pos));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_parse_first_met_dot(&mut self) -> LexerResult<bool> {
|
||||||
|
if !self.dot_met() && self.current_char() == '.' {
|
||||||
|
if self.current_pos() == 0 {
|
||||||
|
return Err(LexerError::LeadingDot(
|
||||||
|
self.start_pos,
|
||||||
|
self.pos_in_string_to_parse(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
self.state.first_dot_met_pos = Some(self.current_pos());
|
||||||
|
return Ok(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_json_path_started(&self) -> bool {
|
||||||
|
self.dot_met()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dot_met(&self) -> bool {
|
||||||
|
self.state.first_dot_met_pos.is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aqua_alphanumeric(&self) -> bool {
|
||||||
|
super::is_aqua_alphanumeric(self.current_char())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn json_path_allowed_char(&self) -> bool {
|
||||||
|
super::is_json_path_allowed_char(self.current_char())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pos_in_string_to_parse(&self) -> usize {
|
||||||
|
self.start_pos + self.current_pos()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn current_pos(&self) -> usize {
|
||||||
|
self.state.current_pos
|
||||||
|
}
|
||||||
|
|
||||||
|
fn current_char(&self) -> char {
|
||||||
|
self.state.current_char
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_token(&self) -> LexerResult<Token<'input>> {
|
||||||
|
use super::token::UnparsedNumber;
|
||||||
|
|
||||||
|
match (self.is_possible_to_parse_as_number(), self.dot_met()) {
|
||||||
|
(true, false) => {
|
||||||
|
let number = UnparsedNumber::Int(self.string_to_parse, self.start_pos);
|
||||||
|
let number: super::Number = number.try_into()?;
|
||||||
|
Ok(number.into())
|
||||||
|
}
|
||||||
|
(true, true) => {
|
||||||
|
let number = UnparsedNumber::Float(self.string_to_parse, self.start_pos);
|
||||||
|
let number: super::Number = number.try_into()?;
|
||||||
|
Ok(number.into())
|
||||||
|
}
|
||||||
|
(false, false) => Ok(Token::Alphanumeric(self.string_to_parse)),
|
||||||
|
(false, true) => Ok(Token::JsonPath(
|
||||||
|
self.string_to_parse,
|
||||||
|
self.state.first_dot_met_pos.unwrap(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -16,7 +16,10 @@
|
|||||||
|
|
||||||
use thiserror::Error as ThisError;
|
use thiserror::Error as ThisError;
|
||||||
|
|
||||||
#[derive(ThisError, Debug, Clone, PartialEq, Eq, Hash)]
|
use std::num::ParseFloatError;
|
||||||
|
use std::num::ParseIntError;
|
||||||
|
|
||||||
|
#[derive(ThisError, Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum LexerError {
|
pub enum LexerError {
|
||||||
#[error("this string literal has unclosed quote")]
|
#[error("this string literal has unclosed quote")]
|
||||||
UnclosedQuote(usize, usize),
|
UnclosedQuote(usize, usize),
|
||||||
@ -24,14 +27,32 @@ pub enum LexerError {
|
|||||||
#[error("empty string aren't allowed in this position")]
|
#[error("empty string aren't allowed in this position")]
|
||||||
EmptyString(usize, usize),
|
EmptyString(usize, usize),
|
||||||
|
|
||||||
#[error("only alphanumeric and _, - characters are allowed in this position")]
|
#[error("only alphanumeric, '_', and '-' characters are allowed in this position")]
|
||||||
IsNotAlphanumeric(usize, usize),
|
IsNotAlphanumeric(usize, usize),
|
||||||
|
|
||||||
#[error("an accumulator name should be non empty")]
|
#[error("an accumulator name should be non empty")]
|
||||||
EmptyAccName(usize, usize),
|
EmptyAccName(usize, usize),
|
||||||
|
|
||||||
|
#[error("this variable or constant shouldn't have empty name")]
|
||||||
|
EmptyVariableOrConst(usize, usize),
|
||||||
|
|
||||||
#[error("invalid character in json path")]
|
#[error("invalid character in json path")]
|
||||||
InvalidJsonPath(usize, usize),
|
InvalidJsonPath(usize, usize),
|
||||||
|
|
||||||
|
#[error("a digit could contain only digits or one dot")]
|
||||||
|
UnallowedCharInNumber(usize, usize),
|
||||||
|
|
||||||
|
#[error("{2}")]
|
||||||
|
ParseIntError(usize, usize, #[source] ParseIntError),
|
||||||
|
|
||||||
|
#[error("{2}")]
|
||||||
|
ParseFloatError(usize, usize, #[source] ParseFloatError),
|
||||||
|
|
||||||
|
#[error("this float is too big, a float could contain less than 12 digits")]
|
||||||
|
TooBigFloat(usize, usize),
|
||||||
|
|
||||||
|
#[error("leading dot without any symbols before - please write 0 if it's float or variable name if it's json path")]
|
||||||
|
LeadingDot(usize, usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<std::convert::Infallible> for LexerError {
|
impl From<std::convert::Infallible> for LexerError {
|
||||||
|
@ -15,12 +15,20 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
mod air_lexer;
|
mod air_lexer;
|
||||||
|
mod call_variable_parser;
|
||||||
mod errors;
|
mod errors;
|
||||||
mod token;
|
mod token;
|
||||||
|
mod utils;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod tests;
|
pub mod tests;
|
||||||
|
|
||||||
pub use air_lexer::AIRLexer;
|
pub use air_lexer::AIRLexer;
|
||||||
pub use errors::LexerError;
|
pub use errors::LexerError;
|
||||||
|
pub use token::Number;
|
||||||
pub use token::Token;
|
pub use token::Token;
|
||||||
|
|
||||||
|
pub(super) type LexerResult<T> = std::result::Result<T, LexerError>;
|
||||||
|
|
||||||
|
pub(self) use utils::is_aqua_alphanumeric;
|
||||||
|
pub(self) use utils::is_json_path_allowed_char;
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
use super::air_lexer::Spanned;
|
use super::air_lexer::Spanned;
|
||||||
use super::AIRLexer;
|
use super::AIRLexer;
|
||||||
use super::LexerError;
|
use super::LexerError;
|
||||||
|
use super::Number;
|
||||||
use super::Token;
|
use super::Token;
|
||||||
|
|
||||||
fn run_lexer(input: &str) -> Vec<Spanned<Token<'_>, usize, LexerError>> {
|
fn run_lexer(input: &str) -> Vec<Spanned<Token<'_>, usize, LexerError>> {
|
||||||
@ -24,110 +25,119 @@ fn run_lexer(input: &str) -> Vec<Spanned<Token<'_>, usize, LexerError>> {
|
|||||||
lexer.collect()
|
lexer.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
enum TokenCompareStrategy<'token> {
|
||||||
|
All(Vec<Spanned<Token<'token>, usize, LexerError>>),
|
||||||
|
Some(Vec<usize>, Vec<Spanned<Token<'token>, usize, LexerError>>),
|
||||||
|
One(usize, Spanned<Token<'token>, usize, LexerError>),
|
||||||
|
Single(Spanned<Token<'token>, usize, LexerError>),
|
||||||
|
}
|
||||||
|
|
||||||
|
use TokenCompareStrategy::*;
|
||||||
|
|
||||||
|
fn lexer_test(input: &str, expected_tokens: TokenCompareStrategy) {
|
||||||
|
let actual_tokens = run_lexer(input);
|
||||||
|
|
||||||
|
match expected_tokens {
|
||||||
|
All(expected_tokens) => assert_eq!(actual_tokens, expected_tokens),
|
||||||
|
Some(token_ids, expected_tokens) => {
|
||||||
|
for (&id, token) in token_ids.iter().zip(expected_tokens) {
|
||||||
|
assert_eq!(actual_tokens[id], token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
One(id, token) => assert_eq!(actual_tokens[id], token),
|
||||||
|
Single(token) => assert_eq!(actual_tokens, vec![token]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn air_instructions() {
|
fn air_instructions() {
|
||||||
let call_tokens = run_lexer("call");
|
lexer_test("call", Single(Ok((0, Token::Call, 4))));
|
||||||
assert_eq!(call_tokens, vec![Ok((0, Token::Call, 4))]);
|
|
||||||
|
|
||||||
let call_tokens = run_lexer("(call)");
|
lexer_test(
|
||||||
assert_eq!(
|
"(call)",
|
||||||
call_tokens,
|
All(vec![
|
||||||
vec![
|
|
||||||
Ok((0, Token::OpenRoundBracket, 1)),
|
Ok((0, Token::OpenRoundBracket, 1)),
|
||||||
Ok((1, Token::Call, 5)),
|
Ok((1, Token::Call, 5)),
|
||||||
Ok((5, Token::CloseRoundBracket, 6))
|
Ok((5, Token::CloseRoundBracket, 6)),
|
||||||
]
|
]),
|
||||||
);
|
);
|
||||||
|
|
||||||
let par_tokens = run_lexer("par");
|
lexer_test("par", Single(Ok((0, Token::Par, 3))));
|
||||||
assert_eq!(par_tokens, vec![Ok((0, Token::Par, 3))]);
|
|
||||||
|
|
||||||
let par_tokens = run_lexer("(par)");
|
lexer_test(
|
||||||
assert_eq!(
|
"(par)",
|
||||||
par_tokens,
|
All(vec![
|
||||||
vec![
|
|
||||||
Ok((0, Token::OpenRoundBracket, 1)),
|
Ok((0, Token::OpenRoundBracket, 1)),
|
||||||
Ok((1, Token::Par, 4)),
|
Ok((1, Token::Par, 4)),
|
||||||
Ok((4, Token::CloseRoundBracket, 5))
|
Ok((4, Token::CloseRoundBracket, 5)),
|
||||||
]
|
]),
|
||||||
);
|
);
|
||||||
|
|
||||||
let seq_tokens = run_lexer("seq");
|
lexer_test("seq", Single(Ok((0, Token::Seq, 3))));
|
||||||
assert_eq!(seq_tokens, vec![Ok((0, Token::Seq, 3))]);
|
|
||||||
|
|
||||||
let seq_tokens = run_lexer("(seq)");
|
lexer_test(
|
||||||
assert_eq!(
|
"(seq)",
|
||||||
seq_tokens,
|
All(vec![
|
||||||
vec![
|
|
||||||
Ok((0, Token::OpenRoundBracket, 1)),
|
Ok((0, Token::OpenRoundBracket, 1)),
|
||||||
Ok((1, Token::Seq, 4)),
|
Ok((1, Token::Seq, 4)),
|
||||||
Ok((4, Token::CloseRoundBracket, 5))
|
Ok((4, Token::CloseRoundBracket, 5)),
|
||||||
]
|
]),
|
||||||
);
|
);
|
||||||
|
|
||||||
let null_tokens = run_lexer("null");
|
lexer_test("null", Single(Ok((0, Token::Null, 4))));
|
||||||
assert_eq!(null_tokens, vec![Ok((0, Token::Null, 4))]);
|
|
||||||
|
|
||||||
let null_tokens = run_lexer("(null)");
|
lexer_test(
|
||||||
assert_eq!(
|
"(null)",
|
||||||
null_tokens,
|
All(vec![
|
||||||
vec![
|
|
||||||
Ok((0, Token::OpenRoundBracket, 1)),
|
Ok((0, Token::OpenRoundBracket, 1)),
|
||||||
Ok((1, Token::Null, 5)),
|
Ok((1, Token::Null, 5)),
|
||||||
Ok((5, Token::CloseRoundBracket, 6))
|
Ok((5, Token::CloseRoundBracket, 6)),
|
||||||
]
|
]),
|
||||||
);
|
);
|
||||||
|
|
||||||
let fold_tokens = run_lexer("fold");
|
lexer_test("fold", Single(Ok((0, Token::Fold, 4))));
|
||||||
assert_eq!(fold_tokens, vec![Ok((0, Token::Fold, 4))]);
|
|
||||||
|
|
||||||
let fold_tokens = run_lexer("(fold)");
|
lexer_test(
|
||||||
assert_eq!(
|
"(fold)",
|
||||||
fold_tokens,
|
All(vec![
|
||||||
vec![
|
|
||||||
Ok((0, Token::OpenRoundBracket, 1)),
|
Ok((0, Token::OpenRoundBracket, 1)),
|
||||||
Ok((1, Token::Fold, 5)),
|
Ok((1, Token::Fold, 5)),
|
||||||
Ok((5, Token::CloseRoundBracket, 6))
|
Ok((5, Token::CloseRoundBracket, 6)),
|
||||||
]
|
]),
|
||||||
);
|
);
|
||||||
|
|
||||||
let next_tokens = run_lexer("next");
|
lexer_test("next", Single(Ok((0, Token::Next, 4))));
|
||||||
assert_eq!(next_tokens, vec![Ok((0, Token::Next, 4))]);
|
|
||||||
|
|
||||||
let next_tokens = run_lexer("(next)");
|
lexer_test(
|
||||||
assert_eq!(
|
"(next)",
|
||||||
next_tokens,
|
All(vec![
|
||||||
vec![
|
|
||||||
Ok((0, Token::OpenRoundBracket, 1)),
|
Ok((0, Token::OpenRoundBracket, 1)),
|
||||||
Ok((1, Token::Next, 5)),
|
Ok((1, Token::Next, 5)),
|
||||||
Ok((5, Token::CloseRoundBracket, 6))
|
Ok((5, Token::CloseRoundBracket, 6)),
|
||||||
]
|
]),
|
||||||
);
|
);
|
||||||
|
|
||||||
let match_tokens = run_lexer("match");
|
lexer_test("match", Single(Ok((0, Token::Match, 5))));
|
||||||
assert_eq!(match_tokens, vec![Ok((0, Token::Match, 5))]);
|
|
||||||
|
|
||||||
let match_tokens = run_lexer("(match)");
|
lexer_test(
|
||||||
assert_eq!(
|
"(match)",
|
||||||
match_tokens,
|
All(vec![
|
||||||
vec![
|
|
||||||
Ok((0, Token::OpenRoundBracket, 1)),
|
Ok((0, Token::OpenRoundBracket, 1)),
|
||||||
Ok((1, Token::Match, 6)),
|
Ok((1, Token::Match, 6)),
|
||||||
Ok((6, Token::CloseRoundBracket, 7))
|
Ok((6, Token::CloseRoundBracket, 7)),
|
||||||
]
|
]),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mismatch_tokens = run_lexer("mismatch");
|
lexer_test("mismatch", Single(Ok((0, Token::MisMatch, 8))));
|
||||||
assert_eq!(mismatch_tokens, vec![Ok((0, Token::MisMatch, 8))]);
|
|
||||||
|
|
||||||
let mismatch_tokens = run_lexer("(mismatch)");
|
lexer_test(
|
||||||
assert_eq!(
|
"(mismatch)",
|
||||||
mismatch_tokens,
|
All(vec![
|
||||||
vec![
|
|
||||||
Ok((0, Token::OpenRoundBracket, 1)),
|
Ok((0, Token::OpenRoundBracket, 1)),
|
||||||
Ok((1, Token::MisMatch, 9)),
|
Ok((1, Token::MisMatch, 9)),
|
||||||
Ok((9, Token::CloseRoundBracket, 10))
|
Ok((9, Token::CloseRoundBracket, 10)),
|
||||||
]
|
]),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -135,10 +145,9 @@ fn air_instructions() {
|
|||||||
fn init_peer_id() {
|
fn init_peer_id() {
|
||||||
const INIT_PEER_ID: &str = "%init_peer_id%";
|
const INIT_PEER_ID: &str = "%init_peer_id%";
|
||||||
|
|
||||||
let init_peer_id_tokens = run_lexer(INIT_PEER_ID);
|
lexer_test(
|
||||||
assert_eq!(
|
INIT_PEER_ID,
|
||||||
init_peer_id_tokens,
|
Single(Ok((0, Token::InitPeerId, INIT_PEER_ID.len()))),
|
||||||
vec![Ok((0, Token::InitPeerId, INIT_PEER_ID.len()))]
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -146,14 +155,13 @@ fn init_peer_id() {
|
|||||||
fn accumulator() {
|
fn accumulator() {
|
||||||
const ACC: &str = "accumulator____asdasd[]";
|
const ACC: &str = "accumulator____asdasd[]";
|
||||||
|
|
||||||
let init_peer_id_tokens = run_lexer(ACC);
|
lexer_test(
|
||||||
assert_eq!(
|
ACC,
|
||||||
init_peer_id_tokens,
|
Single(Ok((
|
||||||
vec![Ok((
|
|
||||||
0,
|
0,
|
||||||
Token::Accumulator(&ACC[0..ACC.len() - 2]),
|
Token::Accumulator(&ACC[0..ACC.len() - 2]),
|
||||||
ACC.len()
|
ACC.len(),
|
||||||
))]
|
))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -161,14 +169,100 @@ fn accumulator() {
|
|||||||
fn string_literal() {
|
fn string_literal() {
|
||||||
const STRING_LITERAL: &str = r#""some_string""#;
|
const STRING_LITERAL: &str = r#""some_string""#;
|
||||||
|
|
||||||
let string_literal_tokens = run_lexer(STRING_LITERAL);
|
lexer_test(
|
||||||
assert_eq!(
|
STRING_LITERAL,
|
||||||
string_literal_tokens,
|
Single(Ok((
|
||||||
vec![Ok((
|
|
||||||
0,
|
0,
|
||||||
Token::StringLiteral(&STRING_LITERAL[1..STRING_LITERAL.len() - 1]),
|
Token::StringLiteral(&STRING_LITERAL[1..STRING_LITERAL.len() - 1]),
|
||||||
STRING_LITERAL.len()
|
STRING_LITERAL.len(),
|
||||||
))]
|
))),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn integer_numbers() {
|
||||||
|
const NUMBER_WITH_PLUS_SIGN: &str = "+123";
|
||||||
|
let number = Number::Int(123);
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
NUMBER_WITH_PLUS_SIGN,
|
||||||
|
Single(Ok((
|
||||||
|
0,
|
||||||
|
Token::Number(number.clone()),
|
||||||
|
NUMBER_WITH_PLUS_SIGN.len(),
|
||||||
|
))),
|
||||||
|
);
|
||||||
|
|
||||||
|
const NUMBER: &str = "123";
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
NUMBER,
|
||||||
|
Single(Ok((0, Token::Number(number.clone()), NUMBER.len()))),
|
||||||
|
);
|
||||||
|
|
||||||
|
const NUMBER_WITH_MINUS_SIGN: &str = "-123";
|
||||||
|
let number = Number::Int(-123);
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
NUMBER_WITH_MINUS_SIGN,
|
||||||
|
Single(Ok((0, Token::Number(number), NUMBER_WITH_MINUS_SIGN.len()))),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn float_number() {
|
||||||
|
const FNUMBER_WITH_PLUS_SIGN: &str = "+123.123";
|
||||||
|
let number = Number::Float(123.123);
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
FNUMBER_WITH_PLUS_SIGN,
|
||||||
|
Single(Ok((
|
||||||
|
0,
|
||||||
|
Token::Number(number.clone()),
|
||||||
|
FNUMBER_WITH_PLUS_SIGN.len(),
|
||||||
|
))),
|
||||||
|
);
|
||||||
|
|
||||||
|
const FNUMBER: &str = "123.123";
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
FNUMBER,
|
||||||
|
Single(Ok((0, Token::Number(number), FNUMBER.len()))),
|
||||||
|
);
|
||||||
|
|
||||||
|
const FNUMBER_WITH_MINUS_SIGN: &str = "-123.123";
|
||||||
|
let number = Number::Float(-123.123);
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
FNUMBER_WITH_MINUS_SIGN,
|
||||||
|
Single(Ok((
|
||||||
|
0,
|
||||||
|
Token::Number(number),
|
||||||
|
FNUMBER_WITH_MINUS_SIGN.len(),
|
||||||
|
))),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn too_big_number() {
|
||||||
|
const NUMBER: &str = "1231231564564545684564646515313546547682131";
|
||||||
|
|
||||||
|
let number_tokens = run_lexer(NUMBER);
|
||||||
|
|
||||||
|
assert!(matches!(
|
||||||
|
number_tokens[0],
|
||||||
|
Err(LexerError::ParseIntError(..))
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn too_big_float_number() {
|
||||||
|
const FNUMBER: &str =
|
||||||
|
"10000000000000000000000000000001.1231564564545684564646515313546547682131";
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
FNUMBER,
|
||||||
|
Single(Err(LexerError::TooBigFloat(0, FNUMBER.len()))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -177,10 +271,40 @@ fn json_path() {
|
|||||||
// this json path contains all allowed in json path charactes
|
// this json path contains all allowed in json path charactes
|
||||||
const JSON_PATH: &str = r#"value.$[$@[]():?.*,"!]"#;
|
const JSON_PATH: &str = r#"value.$[$@[]():?.*,"!]"#;
|
||||||
|
|
||||||
let json_path_tokens = run_lexer(JSON_PATH);
|
lexer_test(
|
||||||
assert_eq!(
|
JSON_PATH,
|
||||||
json_path_tokens,
|
Single(Ok((0, Token::JsonPath(JSON_PATH, 5), JSON_PATH.len()))),
|
||||||
vec![Ok((0, Token::JsonPath(JSON_PATH, 5), JSON_PATH.len()))]
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_path_numbers() {
|
||||||
|
const JSON_PATH: &str = r#"12345.$[$@[]():?.*,"!]"#;
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
JSON_PATH,
|
||||||
|
Single(Err(LexerError::UnallowedCharInNumber(6, 6))),
|
||||||
|
);
|
||||||
|
|
||||||
|
const JSON_PATH1: &str = r#"+12345.$[$@[]():?.*,"!]"#;
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
JSON_PATH1,
|
||||||
|
Single(Err(LexerError::UnallowedCharInNumber(7, 7))),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn leading_dot() {
|
||||||
|
const LEADING_DOT: &str = ".111";
|
||||||
|
|
||||||
|
lexer_test(LEADING_DOT, Single(Err(LexerError::LeadingDot(0, 0))));
|
||||||
|
|
||||||
|
const LEADING_DOT_AFTER_SIGN: &str = "+.1111";
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
LEADING_DOT_AFTER_SIGN,
|
||||||
|
Single(Err(LexerError::LeadingDot(1, 1))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -188,10 +312,9 @@ fn json_path() {
|
|||||||
fn unclosed_quote() {
|
fn unclosed_quote() {
|
||||||
const UNCLOSED_QUOTE_AIR: &str = r#"(call ("peer_name) ("service_name" "function_name") [])"#;
|
const UNCLOSED_QUOTE_AIR: &str = r#"(call ("peer_name) ("service_name" "function_name") [])"#;
|
||||||
|
|
||||||
let unclosed_quote_air_tokens = run_lexer(UNCLOSED_QUOTE_AIR);
|
lexer_test(
|
||||||
assert_eq!(
|
UNCLOSED_QUOTE_AIR,
|
||||||
unclosed_quote_air_tokens[4],
|
One(4, Err(LexerError::IsNotAlphanumeric(33, 33))),
|
||||||
Err(LexerError::IsNotAlphanumeric(33, 33))
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,10 +323,9 @@ fn bad_value() {
|
|||||||
// value contains ! that only allowed in json path
|
// value contains ! that only allowed in json path
|
||||||
const INVALID_VALUE: &str = r#"val!ue.$[$@[]():?.*,"\!]"#;
|
const INVALID_VALUE: &str = r#"val!ue.$[$@[]():?.*,"\!]"#;
|
||||||
|
|
||||||
let invalid_value_tokens = run_lexer(INVALID_VALUE);
|
lexer_test(
|
||||||
assert_eq!(
|
INVALID_VALUE,
|
||||||
invalid_value_tokens,
|
Single(Err(LexerError::IsNotAlphanumeric(3, 3))),
|
||||||
vec![Err(LexerError::IsNotAlphanumeric(3, 3))]
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -211,9 +333,44 @@ fn bad_value() {
|
|||||||
fn invalid_json_path() {
|
fn invalid_json_path() {
|
||||||
const INVALID_JSON_PATH: &str = r#"value.$%"#;
|
const INVALID_JSON_PATH: &str = r#"value.$%"#;
|
||||||
|
|
||||||
let invalid_json_path_tokens = run_lexer(INVALID_JSON_PATH);
|
lexer_test(
|
||||||
assert_eq!(
|
INVALID_JSON_PATH,
|
||||||
invalid_json_path_tokens,
|
Single(Err(LexerError::InvalidJsonPath(7, 7))),
|
||||||
vec![Err(LexerError::InvalidJsonPath(7, 7))]
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_json_path_numbers() {
|
||||||
|
// this json path contains all allowed in json path charactes
|
||||||
|
const JSON_PATH: &str = r#"+12345$[$@[]():?.*,"!]"#;
|
||||||
|
|
||||||
|
lexer_test(JSON_PATH, Single(Err(LexerError::IsNotAlphanumeric(6, 6))));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn booleans() {
|
||||||
|
const TRUE_BOOL_CONST: &str = "true";
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
TRUE_BOOL_CONST,
|
||||||
|
Single(Ok((0, Token::Boolean(true), TRUE_BOOL_CONST.len()))),
|
||||||
|
);
|
||||||
|
|
||||||
|
const FALSE_BOOL_CONST: &str = "false";
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
FALSE_BOOL_CONST,
|
||||||
|
Single(Ok((0, Token::Boolean(false), FALSE_BOOL_CONST.len()))),
|
||||||
|
);
|
||||||
|
|
||||||
|
const NON_BOOL_CONST: &str = "true1";
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
NON_BOOL_CONST,
|
||||||
|
Single(Ok((
|
||||||
|
0,
|
||||||
|
Token::Alphanumeric(NON_BOOL_CONST),
|
||||||
|
NON_BOOL_CONST.len(),
|
||||||
|
))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,10 @@
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
use serde::Deserialize;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub enum Token<'input> {
|
pub enum Token<'input> {
|
||||||
OpenRoundBracket,
|
OpenRoundBracket,
|
||||||
CloseRoundBracket,
|
CloseRoundBracket,
|
||||||
@ -25,6 +28,8 @@ pub enum Token<'input> {
|
|||||||
Alphanumeric(&'input str),
|
Alphanumeric(&'input str),
|
||||||
JsonPath(&'input str, usize),
|
JsonPath(&'input str, usize),
|
||||||
Accumulator(&'input str),
|
Accumulator(&'input str),
|
||||||
|
Number(Number),
|
||||||
|
Boolean(bool),
|
||||||
|
|
||||||
InitPeerId,
|
InitPeerId,
|
||||||
LastError,
|
LastError,
|
||||||
@ -39,3 +44,73 @@ pub enum Token<'input> {
|
|||||||
Match,
|
Match,
|
||||||
MisMatch,
|
MisMatch,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum Number {
|
||||||
|
Int(i64),
|
||||||
|
Float(f64),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Number> for Token<'_> {
|
||||||
|
fn from(value: Number) -> Self {
|
||||||
|
Token::Number(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Number> for serde_json::Value {
|
||||||
|
fn from(number: Number) -> Self {
|
||||||
|
number.into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Number> for serde_json::Value {
|
||||||
|
fn from(number: &Number) -> Self {
|
||||||
|
match number {
|
||||||
|
Number::Int(value) => (*value).into(),
|
||||||
|
Number::Float(value) => (*value).into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
use super::LexerError;
|
||||||
|
use super::LexerResult;
|
||||||
|
use std::convert::TryFrom;
|
||||||
|
|
||||||
|
pub(crate) enum UnparsedNumber<'input> {
|
||||||
|
// raw value and starting pos
|
||||||
|
Int(&'input str, usize),
|
||||||
|
Float(&'input str, usize),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<UnparsedNumber<'_>> for Number {
|
||||||
|
type Error = LexerError;
|
||||||
|
|
||||||
|
fn try_from(value: UnparsedNumber<'_>) -> LexerResult<Number> {
|
||||||
|
match value {
|
||||||
|
UnparsedNumber::Int(raw_value, start_pos) => {
|
||||||
|
let number = raw_value.parse::<i64>().map_err(|e| {
|
||||||
|
LexerError::ParseIntError(start_pos, start_pos + raw_value.len(), e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let number = Self::Int(number);
|
||||||
|
Ok(number)
|
||||||
|
}
|
||||||
|
|
||||||
|
UnparsedNumber::Float(raw_value, start_pos) => {
|
||||||
|
if raw_value.len() > 11 {
|
||||||
|
return Err(LexerError::TooBigFloat(
|
||||||
|
start_pos,
|
||||||
|
start_pos + raw_value.len(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let number = raw_value.parse::<f64>().map_err(|e| {
|
||||||
|
LexerError::ParseFloatError(start_pos, start_pos + raw_value.len(), e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let number = Self::Float(number);
|
||||||
|
Ok(number)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
43
crates/air-parser/src/parser/lexer/utils.rs
Normal file
43
crates/air-parser/src/parser/lexer/utils.rs
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Fluence Labs Limited
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
pub(super) fn is_aqua_alphanumeric(ch: char) -> bool {
|
||||||
|
ch.is_alphanumeric() || ch == '_' || ch == '-'
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn is_json_path_allowed_char(ch: char) -> bool {
|
||||||
|
// we don't have spec for json path now, but some possible example could be found here
|
||||||
|
// https://packagist.org/packages/softcreatr/jsonpath
|
||||||
|
|
||||||
|
// good old switch faster here than hash set
|
||||||
|
match ch {
|
||||||
|
'$' => true,
|
||||||
|
'@' => true,
|
||||||
|
'[' => true,
|
||||||
|
']' => true,
|
||||||
|
'(' => true,
|
||||||
|
')' => true,
|
||||||
|
':' => true,
|
||||||
|
'?' => true,
|
||||||
|
'.' => true,
|
||||||
|
'*' => true,
|
||||||
|
',' => true,
|
||||||
|
'"' => true,
|
||||||
|
'\'' => true,
|
||||||
|
'!' => true,
|
||||||
|
ch => is_aqua_alphanumeric(ch),
|
||||||
|
}
|
||||||
|
}
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "stepper-lib"
|
name = "stepper-lib"
|
||||||
version = "0.4.1"
|
version = "0.5.0"
|
||||||
authors = ["Fluence Labs"]
|
authors = ["Fluence Labs"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
|
@ -35,14 +35,14 @@ use air_parser::ast::{CallInstrArgValue, CallOutputValue};
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
/// Represents Call instruction with resolved internal parts.
|
/// Represents Call instruction with resolved internal parts.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub(super) struct ResolvedCall<'i> {
|
pub(super) struct ResolvedCall<'i> {
|
||||||
triplet: Rc<ResolvedTriplet>,
|
triplet: Rc<ResolvedTriplet>,
|
||||||
function_arg_paths: Rc<Vec<CallInstrArgValue<'i>>>,
|
function_arg_paths: Rc<Vec<CallInstrArgValue<'i>>>,
|
||||||
output: CallOutputValue<'i>,
|
output: CallOutputValue<'i>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
struct ResolvedArguments {
|
struct ResolvedArguments {
|
||||||
call_arguments: String,
|
call_arguments: String,
|
||||||
tetraplets: Vec<Vec<SecurityTetraplet>>,
|
tetraplets: Vec<Vec<SecurityTetraplet>>,
|
||||||
|
@ -19,6 +19,7 @@ use crate::execution::air::ExecutionResult;
|
|||||||
use crate::execution::utils::resolve_to_jvaluable;
|
use crate::execution::utils::resolve_to_jvaluable;
|
||||||
use crate::JValue;
|
use crate::JValue;
|
||||||
|
|
||||||
|
use air_parser::ast;
|
||||||
use air_parser::ast::MatchableValue;
|
use air_parser::ast::MatchableValue;
|
||||||
|
|
||||||
pub(crate) fn are_matchable_eq<'ctx>(
|
pub(crate) fn are_matchable_eq<'ctx>(
|
||||||
@ -29,8 +30,17 @@ pub(crate) fn are_matchable_eq<'ctx>(
|
|||||||
use MatchableValue::*;
|
use MatchableValue::*;
|
||||||
|
|
||||||
match (left, right) {
|
match (left, right) {
|
||||||
(Literal(name), matchable) => compare_matchable_and_literal(matchable, name, exec_ctx),
|
(Literal(left_name), Literal(right_name)) => Ok(left_name == right_name),
|
||||||
(matchable, Literal(name)) => compare_matchable_and_literal(matchable, name, exec_ctx),
|
|
||||||
|
(Literal(value), matchable) => compare_matchable(matchable, exec_ctx, make_string_comparator(value)),
|
||||||
|
(matchable, Literal(value)) => compare_matchable(matchable, exec_ctx, make_string_comparator(value)),
|
||||||
|
|
||||||
|
(Boolean(value), matchable) => compare_matchable(matchable, exec_ctx, make_bool_comparator(value)),
|
||||||
|
(matchable, Boolean(value)) => compare_matchable(matchable, exec_ctx, make_bool_comparator(value)),
|
||||||
|
|
||||||
|
(Number(value), matchable) => compare_matchable(matchable, exec_ctx, make_number_comparator(value)),
|
||||||
|
(matchable, Number(value)) => compare_matchable(matchable, exec_ctx, make_number_comparator(value)),
|
||||||
|
|
||||||
(Variable(left_name), Variable(right_name)) => {
|
(Variable(left_name), Variable(right_name)) => {
|
||||||
let left_jvaluable = resolve_to_jvaluable(left_name, exec_ctx)?;
|
let left_jvaluable = resolve_to_jvaluable(left_name, exec_ctx)?;
|
||||||
let left_value = left_jvaluable.as_jvalue();
|
let left_value = left_jvaluable.as_jvalue();
|
||||||
@ -53,29 +63,33 @@ pub(crate) fn are_matchable_eq<'ctx>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compare_matchable_and_literal<'ctx>(
|
use std::borrow::Cow;
|
||||||
|
type Comparator<'a> = Box<dyn Fn(Cow<'_, JValue>) -> bool + 'a>;
|
||||||
|
|
||||||
|
fn compare_matchable<'ctx>(
|
||||||
matchable: &MatchableValue<'_>,
|
matchable: &MatchableValue<'_>,
|
||||||
string_literal: &str,
|
|
||||||
exec_ctx: &'ctx ExecutionCtx<'_>,
|
exec_ctx: &'ctx ExecutionCtx<'_>,
|
||||||
|
comparator: Comparator<'ctx>,
|
||||||
) -> ExecutionResult<bool> {
|
) -> ExecutionResult<bool> {
|
||||||
use std::borrow::Cow;
|
|
||||||
use MatchableValue::*;
|
use MatchableValue::*;
|
||||||
|
|
||||||
fn compare_jvalue_and_literal(jvalue: Cow<'_, JValue>, string_literal: &str) -> bool {
|
|
||||||
use std::ops::Deref;
|
|
||||||
|
|
||||||
match jvalue.deref() {
|
|
||||||
JValue::String(value) => value == string_literal,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match matchable {
|
match matchable {
|
||||||
Literal(name) => Ok(name == &string_literal),
|
Literal(str) => {
|
||||||
|
let jvalue = str.to_string().into();
|
||||||
|
Ok(comparator(Cow::Owned(jvalue)))
|
||||||
|
}
|
||||||
|
Number(number) => {
|
||||||
|
let jvalue = number.clone().into();
|
||||||
|
Ok(comparator(Cow::Owned(jvalue)))
|
||||||
|
}
|
||||||
|
Boolean(bool) => {
|
||||||
|
let jvalue = (*bool).into();
|
||||||
|
Ok(comparator(Cow::Owned(jvalue)))
|
||||||
|
}
|
||||||
Variable(name) => {
|
Variable(name) => {
|
||||||
let jvaluable = resolve_to_jvaluable(name, exec_ctx)?;
|
let jvaluable = resolve_to_jvaluable(name, exec_ctx)?;
|
||||||
let jvalue = jvaluable.as_jvalue();
|
let jvalue = jvaluable.as_jvalue();
|
||||||
Ok(compare_jvalue_and_literal(jvalue, string_literal))
|
Ok(comparator(jvalue))
|
||||||
}
|
}
|
||||||
JsonPath { variable, path } => {
|
JsonPath { variable, path } => {
|
||||||
let jvaluable = resolve_to_jvaluable(variable, exec_ctx)?;
|
let jvaluable = resolve_to_jvaluable(variable, exec_ctx)?;
|
||||||
@ -84,7 +98,38 @@ fn compare_matchable_and_literal<'ctx>(
|
|||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(compare_jvalue_and_literal(Cow::Borrowed(jvalues[0]), string_literal))
|
Ok(comparator(Cow::Borrowed(jvalues[0])))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn make_string_comparator(comparable_string: &str) -> Comparator<'_> {
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
Box::new(move |jvalue: Cow<'_, JValue>| -> bool {
|
||||||
|
match jvalue.deref() {
|
||||||
|
JValue::String(value) => value == comparable_string,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn make_bool_comparator(comparable_bool: &bool) -> Comparator<'_> {
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
let comparable_bool = *comparable_bool;
|
||||||
|
Box::new(move |jvalue: Cow<'_, JValue>| -> bool {
|
||||||
|
match jvalue.deref() {
|
||||||
|
JValue::Bool(jvalue) => jvalue == &comparable_bool,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn make_number_comparator(comparable_number: &ast::Number) -> Comparator<'_> {
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
let comparable_jvalue: JValue = comparable_number.into();
|
||||||
|
|
||||||
|
Box::new(move |jvalue: Cow<'_, JValue>| -> bool { jvalue.deref() == &comparable_jvalue })
|
||||||
|
}
|
||||||
|
@ -26,3 +26,10 @@ pub(super) use errors::ExecutionError;
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
pub(self) type ExecutionResult<T> = std::result::Result<T, Rc<ExecutionError>>;
|
pub(self) type ExecutionResult<T> = std::result::Result<T, Rc<ExecutionError>>;
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! exec_err {
|
||||||
|
($err:expr) => {
|
||||||
|
Err(std::rc::Rc::new($err))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
@ -30,16 +30,21 @@ pub(crate) fn resolve_to_args<'i>(
|
|||||||
ctx: &ExecutionCtx<'i>,
|
ctx: &ExecutionCtx<'i>,
|
||||||
) -> ExecutionResult<(JValue, Vec<SecurityTetraplet>)> {
|
) -> ExecutionResult<(JValue, Vec<SecurityTetraplet>)> {
|
||||||
match value {
|
match value {
|
||||||
CallInstrArgValue::InitPeerId => prepare_string_arg(ctx.init_peer_id.as_str(), ctx),
|
CallInstrArgValue::InitPeerId => prepare_consts(ctx.init_peer_id.clone(), ctx),
|
||||||
CallInstrArgValue::LastError => prepare_last_error(ctx),
|
CallInstrArgValue::LastError => prepare_last_error(ctx),
|
||||||
CallInstrArgValue::Literal(value) => prepare_string_arg(value, ctx),
|
CallInstrArgValue::Literal(value) => prepare_consts(value.to_string(), ctx),
|
||||||
|
CallInstrArgValue::Boolean(value) => prepare_consts(*value, ctx),
|
||||||
|
CallInstrArgValue::Number(value) => prepare_consts(value, ctx),
|
||||||
CallInstrArgValue::Variable(name) => prepare_variable(name, ctx),
|
CallInstrArgValue::Variable(name) => prepare_variable(name, ctx),
|
||||||
CallInstrArgValue::JsonPath { variable, path } => prepare_json_path(variable, path, ctx),
|
CallInstrArgValue::JsonPath { variable, path } => prepare_json_path(variable, path, ctx),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prepare_string_arg<'i>(arg: &str, ctx: &ExecutionCtx<'i>) -> ExecutionResult<(JValue, Vec<SecurityTetraplet>)> {
|
fn prepare_consts<'i>(
|
||||||
let jvalue = JValue::String(arg.to_string());
|
arg: impl Into<JValue>,
|
||||||
|
ctx: &ExecutionCtx<'i>,
|
||||||
|
) -> ExecutionResult<(JValue, Vec<SecurityTetraplet>)> {
|
||||||
|
let jvalue = arg.into();
|
||||||
let tetraplet = SecurityTetraplet::literal_tetraplet(ctx.init_peer_id.clone());
|
let tetraplet = SecurityTetraplet::literal_tetraplet(ctx.init_peer_id.clone());
|
||||||
|
|
||||||
Ok((jvalue, vec![tetraplet]))
|
Ok((jvalue, vec![tetraplet]))
|
||||||
|
@ -57,10 +57,3 @@ pub mod parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) type JValue = serde_json::Value;
|
pub(crate) type JValue = serde_json::Value;
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! exec_err {
|
|
||||||
($err:expr) => {
|
|
||||||
Err(std::rc::Rc::new($err))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
@ -19,6 +19,7 @@ dependencies = [
|
|||||||
"lalrpop-util",
|
"lalrpop-util",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde_json",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -672,7 +673,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "stepper-lib"
|
name = "stepper-lib"
|
||||||
version = "0.4.0"
|
version = "0.4.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"air-parser",
|
"air-parser",
|
||||||
"boolinator",
|
"boolinator",
|
||||||
|
@ -19,6 +19,7 @@ dependencies = [
|
|||||||
"lalrpop-util",
|
"lalrpop-util",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde_json",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -672,7 +673,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "stepper-lib"
|
name = "stepper-lib"
|
||||||
version = "0.4.0"
|
version = "0.4.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"air-parser",
|
"air-parser",
|
||||||
"boolinator",
|
"boolinator",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "aquamarine"
|
name = "aquamarine"
|
||||||
version = "0.4.1"
|
version = "0.5.0"
|
||||||
authors = ["Fluence Labs"]
|
authors = ["Fluence Labs"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user