mirror of
https://github.com/fluencelabs/aquavm
synced 2025-04-24 23:02:16 +00:00
refactor(parser): use AirPos
type for AIR script position (#341)
Use a dedicated wrapper type for better type safety and self-documented code.
This commit is contained in:
parent
c85b2e2fbf
commit
4a2e8be178
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -90,6 +90,7 @@ dependencies = [
|
||||
name = "air-interpreter-data"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"air-parser",
|
||||
"air-utils",
|
||||
"once_cell",
|
||||
"semver 1.0.14",
|
||||
|
@ -66,9 +66,7 @@ fn epilog<'i>(new: &New<'i>, exec_ctx: &mut ExecutionCtx<'i>) -> ExecutionResult
|
||||
let position = new.span.left;
|
||||
match &new.argument {
|
||||
NewArgument::Stream(stream) => {
|
||||
exec_ctx
|
||||
.streams
|
||||
.meet_scope_end(stream.name.to_string(), position as u32);
|
||||
exec_ctx.streams.meet_scope_end(stream.name.to_string(), position);
|
||||
Ok(())
|
||||
}
|
||||
NewArgument::Scalar(scalar) => exec_ctx.scalars.meet_new_end_scalar(scalar.name),
|
||||
|
@ -15,7 +15,7 @@
|
||||
*/
|
||||
|
||||
use super::Generation;
|
||||
use air_parser::ast;
|
||||
use air_parser::{ast, AirPos};
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub(crate) enum Variable<'i> {
|
||||
@ -25,7 +25,7 @@ pub(crate) enum Variable<'i> {
|
||||
Stream {
|
||||
name: &'i str,
|
||||
generation: Generation,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
},
|
||||
CanonStream {
|
||||
name: &'i str,
|
||||
@ -37,7 +37,7 @@ impl<'i> Variable<'i> {
|
||||
Self::Scalar { name }
|
||||
}
|
||||
|
||||
pub(crate) fn stream(name: &'i str, generation: Generation, position: usize) -> Self {
|
||||
pub(crate) fn stream(name: &'i str, generation: Generation, position: AirPos) -> Self {
|
||||
Self::Stream {
|
||||
name,
|
||||
generation,
|
||||
|
@ -21,6 +21,7 @@ use crate::execution_step::ValueAggregate;
|
||||
|
||||
use air_interpreter_data::GlobalStreamGens;
|
||||
use air_interpreter_data::RestrictedStreamGens;
|
||||
use air_parser::AirPos;
|
||||
|
||||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
||||
use std::collections::HashMap;
|
||||
@ -66,13 +67,13 @@ impl Streams {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get(&self, name: &str, position: usize) -> Option<&Stream> {
|
||||
pub(crate) fn get(&self, name: &str, position: AirPos) -> Option<&Stream> {
|
||||
self.streams
|
||||
.get(name)
|
||||
.and_then(|descriptors| find_closest(descriptors.iter(), position))
|
||||
}
|
||||
|
||||
pub(crate) fn get_mut(&mut self, name: &str, position: usize) -> Option<&mut Stream> {
|
||||
pub(crate) fn get_mut(&mut self, name: &str, position: AirPos) -> Option<&mut Stream> {
|
||||
self.streams
|
||||
.get_mut(name)
|
||||
.and_then(|descriptors| find_closest_mut(descriptors.iter_mut(), position))
|
||||
@ -83,7 +84,7 @@ impl Streams {
|
||||
value: ValueAggregate,
|
||||
generation: Generation,
|
||||
stream_name: &str,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
) -> ExecutionResult<u32> {
|
||||
match self.get_mut(stream_name, position) {
|
||||
Some(stream) => stream.add_value(value, generation),
|
||||
@ -107,7 +108,7 @@ impl Streams {
|
||||
pub(crate) fn meet_scope_start(&mut self, name: impl Into<String>, span: Span, iteration: u32) {
|
||||
let name = name.into();
|
||||
let generations_count = self
|
||||
.stream_generation_from_data(&name, span.left as u32, iteration as usize)
|
||||
.stream_generation_from_data(&name, span.left, iteration as usize)
|
||||
.unwrap_or_default();
|
||||
|
||||
let new_stream = Stream::from_generations_count(generations_count as usize);
|
||||
@ -122,7 +123,7 @@ impl Streams {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn meet_scope_end(&mut self, name: String, position: u32) {
|
||||
pub(crate) fn meet_scope_end(&mut self, name: String, position: AirPos) {
|
||||
// unwraps are safe here because met_scope_end must be called after met_scope_start
|
||||
let stream_descriptors = self.streams.get_mut(&name).unwrap();
|
||||
// delete a stream after exit from a scope
|
||||
@ -155,14 +156,14 @@ impl Streams {
|
||||
(global_streams, self.collected_restricted_stream_gens)
|
||||
}
|
||||
|
||||
fn stream_generation_from_data(&self, name: &str, position: u32, iteration: usize) -> Option<u32> {
|
||||
fn stream_generation_from_data(&self, name: &str, position: AirPos, iteration: usize) -> Option<u32> {
|
||||
self.data_restricted_stream_gens
|
||||
.get(name)
|
||||
.and_then(|scopes| scopes.get(&position).and_then(|iterations| iterations.get(iteration)))
|
||||
.copied()
|
||||
}
|
||||
|
||||
fn collect_stream_generation(&mut self, name: String, position: u32, generation: u32) {
|
||||
fn collect_stream_generation(&mut self, name: String, position: AirPos, generation: u32) {
|
||||
match self.collected_restricted_stream_gens.entry(name) {
|
||||
Occupied(mut streams) => match streams.get_mut().entry(position) {
|
||||
Occupied(mut iterations) => iterations.get_mut().push(generation),
|
||||
@ -183,7 +184,7 @@ impl Streams {
|
||||
impl StreamDescriptor {
|
||||
pub(self) fn global(stream: Stream) -> Self {
|
||||
Self {
|
||||
span: Span::new(0, usize::MAX),
|
||||
span: Span::new(0.into(), usize::MAX.into()),
|
||||
stream,
|
||||
}
|
||||
}
|
||||
@ -195,7 +196,7 @@ impl StreamDescriptor {
|
||||
|
||||
fn find_closest<'d>(
|
||||
descriptors: impl DoubleEndedIterator<Item = &'d StreamDescriptor>,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
) -> Option<&'d Stream> {
|
||||
// descriptors are placed in a order of decreasing scopes, so it's enough to get the latest suitable
|
||||
for descriptor in descriptors.rev() {
|
||||
@ -209,7 +210,7 @@ fn find_closest<'d>(
|
||||
|
||||
fn find_closest_mut<'d>(
|
||||
descriptors: impl DoubleEndedIterator<Item = &'d mut StreamDescriptor>,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
) -> Option<&'d mut Stream> {
|
||||
// descriptors are placed in a order of decreasing scopes, so it's enough to get the latest suitable
|
||||
for descriptor in descriptors.rev() {
|
||||
|
@ -14,6 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use air_parser::AirPos;
|
||||
use air_test_utils::prelude::*;
|
||||
|
||||
#[test]
|
||||
@ -91,7 +92,7 @@ fn new_with_global_streams_seq() {
|
||||
let actual_restricted_streams = data.restricted_streams;
|
||||
let expected_restricted_streams = maplit::hashmap! {
|
||||
"$stream".to_string() => maplit::hashmap! {
|
||||
282 => vec![1,1]
|
||||
AirPos::from(282) => vec![1,1]
|
||||
}
|
||||
};
|
||||
assert_eq!(actual_restricted_streams, expected_restricted_streams);
|
||||
@ -218,7 +219,7 @@ fn new_in_fold_with_ap() {
|
||||
let actual_restricted_streams = data.restricted_streams;
|
||||
let expected_restricted_streams = maplit::hashmap! {
|
||||
"$s1".to_string() => maplit::hashmap! {
|
||||
146 => vec![1,1,1,1,1]
|
||||
AirPos::from(146) => vec![1,1,1,1,1]
|
||||
}
|
||||
};
|
||||
assert_eq!(actual_restricted_streams, expected_restricted_streams);
|
||||
@ -264,10 +265,10 @@ fn new_with_streams_with_errors() {
|
||||
let actual_restricted_streams = data.restricted_streams;
|
||||
let expected_restricted_streams = maplit::hashmap! {
|
||||
"$restricted_stream_2".to_string() => maplit::hashmap! {
|
||||
216 => vec![1]
|
||||
AirPos::from(216) => vec![1]
|
||||
},
|
||||
"$restricted_stream_1".to_string() => maplit::hashmap! {
|
||||
141 => vec![0]
|
||||
AirPos::from(141) => vec![0]
|
||||
}
|
||||
};
|
||||
assert_eq!(actual_restricted_streams, expected_restricted_streams);
|
||||
|
@ -14,6 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use air_parser::AirPos;
|
||||
use air_test_utils::prelude::*;
|
||||
|
||||
#[test]
|
||||
@ -94,7 +95,7 @@ fn issue_173() {
|
||||
let actual_restricted_streams = data.restricted_streams;
|
||||
let expected_restricted_streams = maplit::hashmap! {
|
||||
"$stream".to_string() => maplit::hashmap! {
|
||||
282 => vec![1,1]
|
||||
AirPos::from(282) => vec![1,1]
|
||||
}
|
||||
};
|
||||
assert_eq!(actual_restricted_streams, expected_restricted_streams);
|
||||
|
@ -19,6 +19,7 @@ use super::CallOutputValue;
|
||||
use super::NewArgument;
|
||||
use super::Scalar;
|
||||
use super::Stream;
|
||||
use crate::parser::lexer::AirPos;
|
||||
|
||||
impl<'i> NewArgument<'i> {
|
||||
pub fn name(&self) -> &'i str {
|
||||
@ -31,11 +32,11 @@ impl<'i> NewArgument<'i> {
|
||||
}
|
||||
|
||||
impl<'i> ApResult<'i> {
|
||||
pub fn scalar(name: &'i str, position: usize) -> Self {
|
||||
pub fn scalar(name: &'i str, position: AirPos) -> Self {
|
||||
Self::Scalar(Scalar { name, position })
|
||||
}
|
||||
|
||||
pub fn stream(name: &'i str, position: usize) -> Self {
|
||||
pub fn stream(name: &'i str, position: AirPos) -> Self {
|
||||
Self::Stream(Stream { name, position })
|
||||
}
|
||||
|
||||
@ -48,11 +49,11 @@ impl<'i> ApResult<'i> {
|
||||
}
|
||||
|
||||
impl<'i> CallOutputValue<'i> {
|
||||
pub fn scalar(name: &'i str, position: usize) -> Self {
|
||||
pub fn scalar(name: &'i str, position: AirPos) -> Self {
|
||||
Self::Scalar(Scalar { name, position })
|
||||
}
|
||||
|
||||
pub fn stream(name: &'i str, position: usize) -> Self {
|
||||
pub fn stream(name: &'i str, position: AirPos) -> Self {
|
||||
Self::Stream(Stream { name, position })
|
||||
}
|
||||
}
|
||||
|
@ -17,8 +17,9 @@
|
||||
mod impls;
|
||||
mod traits;
|
||||
|
||||
use air_lambda_parser::LambdaAST;
|
||||
use crate::parser::lexer::AirPos;
|
||||
|
||||
use air_lambda_parser::LambdaAST;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
@ -26,7 +27,7 @@ use serde::Serialize;
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct Scalar<'i> {
|
||||
pub name: &'i str,
|
||||
pub position: usize,
|
||||
pub position: AirPos,
|
||||
}
|
||||
|
||||
/// A scalar value with possible lambda expression.
|
||||
@ -35,14 +36,14 @@ pub struct ScalarWithLambda<'i> {
|
||||
pub name: &'i str,
|
||||
#[serde(borrow)]
|
||||
pub lambda: Option<LambdaAST<'i>>,
|
||||
pub position: usize,
|
||||
pub position: AirPos,
|
||||
}
|
||||
|
||||
/// A stream without lambda.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct Stream<'i> {
|
||||
pub name: &'i str,
|
||||
pub position: usize,
|
||||
pub position: AirPos,
|
||||
}
|
||||
|
||||
/// A stream with possible lambda expression.
|
||||
@ -51,14 +52,14 @@ pub struct StreamWithLambda<'i> {
|
||||
pub name: &'i str,
|
||||
#[serde(borrow)]
|
||||
pub lambda: Option<LambdaAST<'i>>,
|
||||
pub position: usize,
|
||||
pub position: AirPos,
|
||||
}
|
||||
|
||||
/// A canonicalized stream without lambda.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct CanonStream<'i> {
|
||||
pub name: &'i str,
|
||||
pub position: usize,
|
||||
pub position: AirPos,
|
||||
}
|
||||
|
||||
/// A canonicalized stream with lambda.
|
||||
@ -67,7 +68,7 @@ pub struct CanonStreamWithLambda<'i> {
|
||||
pub name: &'i str,
|
||||
#[serde(borrow)]
|
||||
pub lambda: Option<LambdaAST<'i>>,
|
||||
pub position: usize,
|
||||
pub position: AirPos,
|
||||
}
|
||||
|
||||
/// A variable that could be either scalar or stream without lambda.
|
||||
|
@ -19,7 +19,7 @@ use air_lambda_parser::LambdaAST;
|
||||
use air_lambda_parser::ValueAccessor;
|
||||
|
||||
impl<'i> ScalarWithLambda<'i> {
|
||||
pub fn new(name: &'i str, lambda: Option<LambdaAST<'i>>, position: usize) -> Self {
|
||||
pub fn new(name: &'i str, lambda: Option<LambdaAST<'i>>, position: AirPos) -> Self {
|
||||
Self {
|
||||
name,
|
||||
lambda,
|
||||
@ -30,7 +30,7 @@ impl<'i> ScalarWithLambda<'i> {
|
||||
pub(crate) fn from_value_path(
|
||||
name: &'i str,
|
||||
accessors: Vec<ValueAccessor<'i>>,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
) -> Self {
|
||||
let lambda = LambdaAST::try_from_accessors(accessors).ok();
|
||||
Self {
|
||||
@ -42,7 +42,7 @@ impl<'i> ScalarWithLambda<'i> {
|
||||
}
|
||||
|
||||
impl<'i> StreamWithLambda<'i> {
|
||||
pub fn new(name: &'i str, lambda: Option<LambdaAST<'i>>, position: usize) -> Self {
|
||||
pub fn new(name: &'i str, lambda: Option<LambdaAST<'i>>, position: AirPos) -> Self {
|
||||
Self {
|
||||
name,
|
||||
lambda,
|
||||
@ -54,7 +54,7 @@ impl<'i> StreamWithLambda<'i> {
|
||||
pub(crate) fn from_value_path(
|
||||
name: &'i str,
|
||||
accessors: Vec<ValueAccessor<'i>>,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
) -> Self {
|
||||
let lambda = LambdaAST::try_from_accessors(accessors).ok();
|
||||
Self {
|
||||
@ -66,13 +66,13 @@ impl<'i> StreamWithLambda<'i> {
|
||||
}
|
||||
|
||||
impl<'i> CanonStream<'i> {
|
||||
pub fn new(name: &'i str, position: usize) -> Self {
|
||||
pub fn new(name: &'i str, position: AirPos) -> Self {
|
||||
Self { name, position }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'i> CanonStreamWithLambda<'i> {
|
||||
pub fn new(name: &'i str, lambda: Option<LambdaAST<'i>>, position: usize) -> Self {
|
||||
pub fn new(name: &'i str, lambda: Option<LambdaAST<'i>>, position: AirPos) -> Self {
|
||||
Self {
|
||||
name,
|
||||
lambda,
|
||||
@ -82,23 +82,23 @@ impl<'i> CanonStreamWithLambda<'i> {
|
||||
}
|
||||
|
||||
impl<'i> Scalar<'i> {
|
||||
pub fn new(name: &'i str, position: usize) -> Self {
|
||||
pub fn new(name: &'i str, position: AirPos) -> Self {
|
||||
Self { name, position }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'i> Stream<'i> {
|
||||
pub fn new(name: &'i str, position: usize) -> Self {
|
||||
pub fn new(name: &'i str, position: AirPos) -> Self {
|
||||
Self { name, position }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'i> Variable<'i> {
|
||||
pub fn scalar(name: &'i str, position: usize) -> Self {
|
||||
pub fn scalar(name: &'i str, position: AirPos) -> Self {
|
||||
Self::Scalar(Scalar::new(name, position))
|
||||
}
|
||||
|
||||
pub fn stream(name: &'i str, position: usize) -> Self {
|
||||
pub fn stream(name: &'i str, position: AirPos) -> Self {
|
||||
Self::Stream(Stream::new(name, position))
|
||||
}
|
||||
|
||||
@ -112,27 +112,27 @@ impl<'i> Variable<'i> {
|
||||
}
|
||||
|
||||
impl<'i> VariableWithLambda<'i> {
|
||||
pub fn scalar(name: &'i str, position: usize) -> Self {
|
||||
pub fn scalar(name: &'i str, position: AirPos) -> Self {
|
||||
Self::Scalar(ScalarWithLambda::new(name, None, position))
|
||||
}
|
||||
|
||||
pub fn scalar_wl(name: &'i str, lambda: LambdaAST<'i>, position: usize) -> Self {
|
||||
pub fn scalar_wl(name: &'i str, lambda: LambdaAST<'i>, position: AirPos) -> Self {
|
||||
Self::Scalar(ScalarWithLambda::new(name, Some(lambda), position))
|
||||
}
|
||||
|
||||
pub fn stream(name: &'i str, position: usize) -> Self {
|
||||
pub fn stream(name: &'i str, position: AirPos) -> Self {
|
||||
Self::Stream(StreamWithLambda::new(name, None, position))
|
||||
}
|
||||
|
||||
pub fn stream_wl(name: &'i str, lambda: LambdaAST<'i>, position: usize) -> Self {
|
||||
pub fn stream_wl(name: &'i str, lambda: LambdaAST<'i>, position: AirPos) -> Self {
|
||||
Self::Stream(StreamWithLambda::new(name, Some(lambda), position))
|
||||
}
|
||||
|
||||
pub fn canon_stream(name: &'i str, position: usize) -> Self {
|
||||
pub fn canon_stream(name: &'i str, position: AirPos) -> Self {
|
||||
Self::CanonStream(CanonStreamWithLambda::new(name, None, position))
|
||||
}
|
||||
|
||||
pub fn canon_stream_wl(name: &'i str, lambda: LambdaAST<'i>, position: usize) -> Self {
|
||||
pub fn canon_stream_wl(name: &'i str, lambda: LambdaAST<'i>, position: AirPos) -> Self {
|
||||
Self::CanonStream(CanonStreamWithLambda::new(name, Some(lambda), position))
|
||||
}
|
||||
|
||||
@ -156,7 +156,7 @@ impl<'i> VariableWithLambda<'i> {
|
||||
pub(crate) fn from_raw_value_path(
|
||||
name: &'i str,
|
||||
lambda: Vec<ValueAccessor<'i>>,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
) -> Self {
|
||||
let scalar = ScalarWithLambda::from_value_path(name, lambda, position);
|
||||
Self::Scalar(scalar)
|
||||
@ -166,7 +166,7 @@ impl<'i> VariableWithLambda<'i> {
|
||||
pub(crate) fn from_raw_lambda_stream(
|
||||
name: &'i str,
|
||||
lambda: Vec<ValueAccessor<'i>>,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
) -> Self {
|
||||
let stream = StreamWithLambda::from_value_path(name, lambda, position);
|
||||
Self::Stream(stream)
|
||||
|
@ -28,6 +28,7 @@
|
||||
pub mod ast;
|
||||
mod parser;
|
||||
|
||||
pub use parser::lexer::AirPos;
|
||||
pub use parser::parse;
|
||||
pub use parser::AIRLexer;
|
||||
pub use parser::AIRParser;
|
||||
|
@ -2,14 +2,14 @@ use crate::ast::*;
|
||||
use crate::parser::ParserError;
|
||||
use crate::parser::VariableValidator;
|
||||
use crate::parser::Span;
|
||||
use crate::parser::lexer::Token;
|
||||
use crate::parser::lexer::{AirPos, Token};
|
||||
|
||||
use air_lambda_parser::LambdaAST;
|
||||
use lalrpop_util::ErrorRecovery;
|
||||
use std::rc::Rc;
|
||||
|
||||
// the only thing why input matters here is just introducing lifetime for Token
|
||||
grammar<'err, 'input, 'v>(input: &'input str, errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>, validator: &'v mut VariableValidator<'input>);
|
||||
grammar<'err, 'input, 'v>(input: &'input str, errors: &'err mut Vec<ErrorRecovery<AirPos, Token<'input>, ParserError>>, validator: &'v mut VariableValidator<'input>);
|
||||
|
||||
pub AIR = Instr;
|
||||
|
||||
@ -230,7 +230,7 @@ CanonStreamArgument: CanonStream<'input> = {
|
||||
}
|
||||
|
||||
extern {
|
||||
type Location = usize;
|
||||
type Location = AirPos;
|
||||
type Error = ParserError;
|
||||
|
||||
enum Token<'input> {
|
||||
@ -239,12 +239,12 @@ extern {
|
||||
"[" => Token::OpenSquareBracket,
|
||||
"]" => Token::CloseSquareBracket,
|
||||
|
||||
Scalar => Token::Scalar { name:<&'input str>, position: <usize> },
|
||||
ScalarWithLambda => Token::ScalarWithLambda { name: <&'input str>, lambda: <LambdaAST<'input>>, position: <usize> },
|
||||
Stream => Token::Stream { name: <&'input str>, position: <usize> },
|
||||
StreamWithLambda => Token::StreamWithLambda {name: <&'input str>, lambda:<LambdaAST<'input>>, position: <usize>},
|
||||
CanonStream => Token::CanonStream { name: <&'input str>, position: <usize> },
|
||||
CanonStreamWithLambda => Token::CanonStreamWithLambda {name: <&'input str>, lambda:<LambdaAST<'input>>, position: <usize>},
|
||||
Scalar => Token::Scalar { name:<&'input str>, position: <AirPos> },
|
||||
ScalarWithLambda => Token::ScalarWithLambda { name: <&'input str>, lambda: <LambdaAST<'input>>, position: <AirPos> },
|
||||
Stream => Token::Stream { name: <&'input str>, position: <AirPos> },
|
||||
StreamWithLambda => Token::StreamWithLambda {name: <&'input str>, lambda:<LambdaAST<'input>>, position: <AirPos>},
|
||||
CanonStream => Token::CanonStream { name: <&'input str>, position: <AirPos> },
|
||||
CanonStreamWithLambda => Token::CanonStreamWithLambda {name: <&'input str>, lambda:<LambdaAST<'input>>, position: <AirPos>},
|
||||
|
||||
Literal => Token::StringLiteral(<&'input str>),
|
||||
I64 => Token::I64(<i64>),
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -16,6 +16,7 @@
|
||||
|
||||
use super::air;
|
||||
use super::lexer::AIRLexer;
|
||||
use super::lexer::AirPos;
|
||||
use super::lexer::Token;
|
||||
use super::ParserError;
|
||||
use crate::ast::Instruction;
|
||||
@ -39,7 +40,7 @@ pub fn parse(air_script: &str) -> Result<Box<Instruction<'_>>, String> {
|
||||
let file_id = files.add("script.air", air_script);
|
||||
|
||||
PARSER.with(|parser| {
|
||||
let mut errors = Vec::new();
|
||||
let mut errors: Vec<ErrorRecovery<AirPos, Token<'_>, ParserError>> = Vec::new();
|
||||
let lexer = AIRLexer::new(air_script);
|
||||
let mut validator = VariableValidator::new();
|
||||
let result = parser.parse(air_script, &mut errors, &mut validator, lexer);
|
||||
@ -65,7 +66,7 @@ pub fn parse(air_script: &str) -> Result<Box<Instruction<'_>>, String> {
|
||||
fn report_errors(
|
||||
file_id: usize,
|
||||
files: SimpleFiles<&str, &str>,
|
||||
errors: Vec<ErrorRecovery<usize, Token<'_>, ParserError>>,
|
||||
errors: Vec<ErrorRecovery<AirPos, Token<'_>, ParserError>>,
|
||||
) -> String {
|
||||
let labels = errors_to_labels(file_id, errors);
|
||||
let diagnostic = Diagnostic::error().with_labels(labels);
|
||||
@ -85,7 +86,7 @@ fn report_errors(
|
||||
|
||||
fn errors_to_labels(
|
||||
file_id: usize,
|
||||
errors: Vec<ErrorRecovery<usize, Token<'_>, ParserError>>,
|
||||
errors: Vec<ErrorRecovery<AirPos, Token<'_>, ParserError>>,
|
||||
) -> Vec<Label<usize>> {
|
||||
errors
|
||||
.into_iter()
|
||||
@ -93,16 +94,17 @@ fn errors_to_labels(
|
||||
ParseError::UnrecognizedToken {
|
||||
token: (start, _, end),
|
||||
expected,
|
||||
} => Label::primary(file_id, start..end)
|
||||
} => Label::primary(file_id, start.into()..end.into())
|
||||
.with_message(format!("expected {}", pretty_expected(expected))),
|
||||
ParseError::InvalidToken { location } => {
|
||||
Label::primary(file_id, location..(location + 1)).with_message("unexpected token")
|
||||
Label::primary(file_id, location.into()..(location + 1).into())
|
||||
.with_message("unexpected token")
|
||||
}
|
||||
ParseError::ExtraToken {
|
||||
token: (start, _, end),
|
||||
} => Label::primary(file_id, start..end).with_message("extra token"),
|
||||
} => Label::primary(file_id, start.into()..end.into()).with_message("extra token"),
|
||||
ParseError::UnrecognizedEOF { location, expected } => {
|
||||
Label::primary(file_id, location..(location + 1))
|
||||
Label::primary(file_id, location.into()..(location + 1).into())
|
||||
.with_message(format!("expected {}", pretty_expected(expected)))
|
||||
}
|
||||
ParseError::User { error } => parser_error_to_label(file_id, error),
|
||||
@ -120,5 +122,5 @@ fn pretty_expected(expected: Vec<String>) -> String {
|
||||
|
||||
fn parser_error_to_label(file_id: usize, error: ParserError) -> Label<usize> {
|
||||
let span = error.span();
|
||||
Label::primary(file_id, span.left..span.right).with_message(error.to_string())
|
||||
Label::primary(file_id, span.left.into()..span.right.into()).with_message(error.to_string())
|
||||
}
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
use super::errors::LexerError;
|
||||
use super::token::Token;
|
||||
use super::LexerResult;
|
||||
use super::{AirPos, LexerResult};
|
||||
|
||||
use std::iter::Peekable;
|
||||
use std::str::CharIndices;
|
||||
@ -29,7 +29,7 @@ pub struct AIRLexer<'input> {
|
||||
}
|
||||
|
||||
impl<'input> Iterator for AIRLexer<'input> {
|
||||
type Item = Spanned<Token<'input>, usize, LexerError>;
|
||||
type Item = Spanned<Token<'input>, AirPos, LexerError>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.next_token()
|
||||
@ -44,8 +44,9 @@ impl<'input> AIRLexer<'input> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_token(&mut self) -> Option<Spanned<Token<'input>, usize, LexerError>> {
|
||||
pub fn next_token(&mut self) -> Option<Spanned<Token<'input>, AirPos, LexerError>> {
|
||||
while let Some((start_pos, ch)) = self.chars.next() {
|
||||
let start_pos = AirPos::from(start_pos);
|
||||
match ch {
|
||||
'(' => return Some(Ok((start_pos, Token::OpenRoundBracket, start_pos + 1))),
|
||||
')' => return Some(Ok((start_pos, Token::CloseRoundBracket, start_pos + 1))),
|
||||
@ -79,34 +80,37 @@ impl<'input> AIRLexer<'input> {
|
||||
#[allow(clippy::unnecessary_wraps)]
|
||||
fn tokenize_string_literal(
|
||||
&mut self,
|
||||
start_pos: usize,
|
||||
) -> Option<Spanned<Token<'input>, usize, LexerError>> {
|
||||
start_pos: AirPos,
|
||||
) -> Option<Spanned<Token<'input>, AirPos, LexerError>> {
|
||||
for (pos, ch) in &mut self.chars {
|
||||
let pos = AirPos::from(pos);
|
||||
if ch == '"' {
|
||||
// + 1 to count an open double quote
|
||||
let string_size = pos - start_pos + 1;
|
||||
|
||||
return Some(Ok((
|
||||
start_pos,
|
||||
Token::StringLiteral(&self.input[start_pos + 1..pos]),
|
||||
Token::StringLiteral(&self.input[(start_pos + 1).into()..pos.into()]),
|
||||
start_pos + string_size,
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
Some(Err(LexerError::unclosed_quote(start_pos..self.input.len())))
|
||||
Some(Err(LexerError::unclosed_quote(
|
||||
start_pos..self.input.len().into(),
|
||||
)))
|
||||
}
|
||||
|
||||
#[allow(clippy::unnecessary_wraps)]
|
||||
fn tokenize_string(
|
||||
&mut self,
|
||||
start_pos: usize,
|
||||
start_pos: AirPos,
|
||||
open_square_bracket_met: bool,
|
||||
) -> Option<Spanned<Token<'input>, usize, LexerError>> {
|
||||
) -> Option<Spanned<Token<'input>, AirPos, LexerError>> {
|
||||
let end_pos = self.advance_to_token_end(start_pos, open_square_bracket_met);
|
||||
|
||||
// this slicing is safe here because borders come from the chars iterator
|
||||
let token_str = &self.input[start_pos..end_pos];
|
||||
let token_str = &self.input[start_pos.into()..end_pos.into()];
|
||||
|
||||
let token = match string_to_token(token_str, start_pos) {
|
||||
Ok(token) => token,
|
||||
@ -117,13 +121,13 @@ impl<'input> AIRLexer<'input> {
|
||||
Some(Ok((start_pos, token, start_pos + token_str_len)))
|
||||
}
|
||||
|
||||
fn advance_to_token_end(&mut self, start_pos: usize, square_met: bool) -> usize {
|
||||
fn advance_to_token_end(&mut self, start_pos: AirPos, square_met: bool) -> AirPos {
|
||||
let mut end_pos = start_pos;
|
||||
let mut round_brackets_balance: i64 = 0;
|
||||
let mut square_brackets_balance = i64::from(square_met);
|
||||
|
||||
while let Some((pos, ch)) = self.chars.peek() {
|
||||
end_pos = *pos;
|
||||
end_pos = (*pos).into();
|
||||
let ch = *ch;
|
||||
|
||||
update_brackets_count(
|
||||
@ -144,9 +148,9 @@ impl<'input> AIRLexer<'input> {
|
||||
}
|
||||
|
||||
// if it was the last char, advance the end position.
|
||||
fn advance_end_pos(&mut self, end_pos: &mut usize) {
|
||||
fn advance_end_pos(&mut self, end_pos: &mut AirPos) {
|
||||
if self.chars.peek().is_none() {
|
||||
*end_pos = self.input.len();
|
||||
*end_pos = self.input.len().into();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -171,7 +175,7 @@ fn should_stop(ch: char, round_brackets_balance: i64, open_square_brackets_balan
|
||||
ch.is_whitespace() || round_brackets_balance < 0 || open_square_brackets_balance < 0
|
||||
}
|
||||
|
||||
fn string_to_token(input: &str, start_pos: usize) -> LexerResult<Token> {
|
||||
fn string_to_token(input: &str, start_pos: AirPos) -> LexerResult<Token> {
|
||||
match input {
|
||||
"" => Err(LexerError::empty_string(start_pos..start_pos)),
|
||||
|
||||
@ -202,7 +206,7 @@ fn string_to_token(input: &str, start_pos: usize) -> LexerResult<Token> {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_last_error(input: &str, start_pos: usize) -> LexerResult<Token<'_>> {
|
||||
fn parse_last_error(input: &str, start_pos: AirPos) -> LexerResult<Token<'_>> {
|
||||
let last_error_size = LAST_ERROR.len();
|
||||
if input.len() == last_error_size {
|
||||
return Ok(Token::LastError);
|
||||
|
@ -14,6 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use super::AirPos;
|
||||
use super::LexerError;
|
||||
use super::LexerResult;
|
||||
use super::Token;
|
||||
@ -24,7 +25,7 @@ use std::str::CharIndices;
|
||||
|
||||
pub(super) fn try_parse_call_variable(
|
||||
string_to_parse: &str,
|
||||
start_pos: usize,
|
||||
start_pos: AirPos,
|
||||
) -> LexerResult<Token<'_>> {
|
||||
CallVariableParser::try_parse(string_to_parse, start_pos)
|
||||
}
|
||||
@ -45,20 +46,20 @@ struct ParserState {
|
||||
pub(self) met_tag: MetTag,
|
||||
pub(self) is_first_char: bool,
|
||||
pub(self) current_char: char,
|
||||
pub(self) current_pos: usize,
|
||||
pub(self) current_offset: usize,
|
||||
}
|
||||
|
||||
struct CallVariableParser<'input> {
|
||||
string_to_parse_iter: Peekable<CharIndices<'input>>,
|
||||
string_to_parse: &'input str,
|
||||
start_pos: usize,
|
||||
start_pos: AirPos,
|
||||
state: ParserState,
|
||||
}
|
||||
|
||||
impl<'input> CallVariableParser<'input> {
|
||||
fn new(string_to_parse: &'input str, start_pos: usize) -> LexerResult<Self> {
|
||||
fn new(string_to_parse: &'input str, start_pos: AirPos) -> LexerResult<Self> {
|
||||
let mut string_to_parse_iter = string_to_parse.char_indices().peekable();
|
||||
let (current_pos, current_char) = match string_to_parse_iter.next() {
|
||||
let (current_offset, current_char) = match string_to_parse_iter.next() {
|
||||
Some(pos_and_ch) => pos_and_ch,
|
||||
None => return Err(LexerError::empty_variable_or_const(start_pos..start_pos)),
|
||||
};
|
||||
@ -71,7 +72,7 @@ impl<'input> CallVariableParser<'input> {
|
||||
is_first_char: true,
|
||||
met_tag: MetTag::None,
|
||||
current_char,
|
||||
current_pos,
|
||||
current_offset,
|
||||
};
|
||||
|
||||
let parser = Self {
|
||||
@ -86,7 +87,7 @@ impl<'input> CallVariableParser<'input> {
|
||||
|
||||
pub(self) fn try_parse(
|
||||
string_to_parse: &'input str,
|
||||
start_pos: usize,
|
||||
start_pos: AirPos,
|
||||
) -> LexerResult<Token<'input>> {
|
||||
let mut parser = Self::new(string_to_parse, start_pos)?;
|
||||
|
||||
@ -112,7 +113,7 @@ impl<'input> CallVariableParser<'input> {
|
||||
};
|
||||
|
||||
self.state.current_char = ch;
|
||||
self.state.current_pos = pos;
|
||||
self.state.current_offset = pos;
|
||||
self.state.is_first_char = false;
|
||||
|
||||
true
|
||||
@ -186,7 +187,7 @@ impl<'input> CallVariableParser<'input> {
|
||||
|
||||
fn try_parse_as_stream_start(&mut self) -> LexerResult<bool> {
|
||||
let stream_tag = MetTag::from_tag(self.current_char());
|
||||
if self.current_pos() == 0 && stream_tag.is_tag() {
|
||||
if self.current_offset() == 0 && stream_tag.is_tag() {
|
||||
if self.string_to_parse.len() == 1 {
|
||||
let error_pos = self.pos_in_string_to_parse();
|
||||
return Err(LexerError::empty_stream_name(error_pos..error_pos));
|
||||
@ -232,12 +233,12 @@ impl<'input> CallVariableParser<'input> {
|
||||
|
||||
fn try_parse_first_met_dot(&mut self) -> LexerResult<bool> {
|
||||
if !self.dot_met() && self.current_char() == '.' {
|
||||
if self.current_pos() == 0 {
|
||||
if self.current_offset() == 0 {
|
||||
return Err(LexerError::leading_dot(
|
||||
self.start_pos..self.pos_in_string_to_parse(),
|
||||
));
|
||||
}
|
||||
self.state.first_dot_met_pos = Some(self.current_pos());
|
||||
self.state.first_dot_met_pos = Some(self.current_offset());
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
@ -260,12 +261,12 @@ impl<'input> CallVariableParser<'input> {
|
||||
super::is_json_path_allowed_char(self.current_char())
|
||||
}
|
||||
|
||||
fn pos_in_string_to_parse(&self) -> usize {
|
||||
self.start_pos + self.current_pos()
|
||||
fn pos_in_string_to_parse(&self) -> AirPos {
|
||||
self.start_pos + self.current_offset()
|
||||
}
|
||||
|
||||
fn current_pos(&self) -> usize {
|
||||
self.state.current_pos
|
||||
fn current_offset(&self) -> usize {
|
||||
self.state.current_offset
|
||||
}
|
||||
|
||||
fn current_char(&self) -> char {
|
||||
@ -273,7 +274,7 @@ impl<'input> CallVariableParser<'input> {
|
||||
}
|
||||
|
||||
fn is_last_char(&self) -> bool {
|
||||
self.current_pos() == self.string_to_parse.len() - 1
|
||||
self.current_offset() == self.string_to_parse.len() - 1
|
||||
}
|
||||
|
||||
fn to_variable_token<'v>(&self, name: &'v str) -> Token<'v> {
|
||||
@ -313,17 +314,18 @@ impl<'input> CallVariableParser<'input> {
|
||||
}
|
||||
}
|
||||
|
||||
fn try_to_variable_and_lambda(&self, lambda_start_pos: usize) -> LexerResult<Token<'input>> {
|
||||
fn try_to_variable_and_lambda(&self, lambda_start_offset: usize) -> LexerResult<Token<'input>> {
|
||||
let lambda =
|
||||
crate::parse_lambda(&self.string_to_parse[lambda_start_pos..]).map_err(|e| {
|
||||
crate::parse_lambda(&self.string_to_parse[lambda_start_offset..]).map_err(|e| {
|
||||
LexerError::lambda_parser_error(
|
||||
self.start_pos + lambda_start_pos..self.start_pos + self.string_to_parse.len(),
|
||||
self.start_pos + lambda_start_offset
|
||||
..self.start_pos + self.string_to_parse.len(),
|
||||
e.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let token =
|
||||
self.to_variable_token_with_lambda(&self.string_to_parse[0..lambda_start_pos], lambda);
|
||||
let token = self
|
||||
.to_variable_token_with_lambda(&self.string_to_parse[0..lambda_start_offset], lambda);
|
||||
Ok(token)
|
||||
}
|
||||
|
||||
@ -364,7 +366,9 @@ impl<'input> CallVariableParser<'input> {
|
||||
(true, None) => self.try_to_i64(),
|
||||
(true, Some(_)) => self.try_to_f64(),
|
||||
(false, None) => Ok(self.to_variable_token(self.string_to_parse)),
|
||||
(false, Some(lambda_start_pos)) => self.try_to_variable_and_lambda(lambda_start_pos),
|
||||
(false, Some(lambda_start_offset)) => {
|
||||
self.try_to_variable_and_lambda(lambda_start_offset)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,9 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use super::AirPos;
|
||||
use crate::parser::Span;
|
||||
|
||||
use thiserror::Error as ThisError;
|
||||
|
||||
use std::num::ParseFloatError;
|
||||
@ -88,44 +90,44 @@ impl LexerError {
|
||||
*span
|
||||
}
|
||||
|
||||
pub fn unclosed_quote(range: Range<usize>) -> Self {
|
||||
pub fn unclosed_quote(range: Range<AirPos>) -> Self {
|
||||
Self::UnclosedQuote(range.into())
|
||||
}
|
||||
|
||||
pub fn empty_string(range: Range<usize>) -> Self {
|
||||
pub fn empty_string(range: Range<AirPos>) -> Self {
|
||||
Self::EmptyString(range.into())
|
||||
}
|
||||
|
||||
pub fn is_not_alphanumeric(range: Range<usize>) -> Self {
|
||||
pub fn is_not_alphanumeric(range: Range<AirPos>) -> Self {
|
||||
Self::IsNotAlphanumeric(range.into())
|
||||
}
|
||||
|
||||
pub fn empty_stream_name(range: Range<usize>) -> Self {
|
||||
pub fn empty_stream_name(range: Range<AirPos>) -> Self {
|
||||
Self::EmptyStreamName(range.into())
|
||||
}
|
||||
|
||||
pub fn empty_variable_or_const(range: Range<usize>) -> Self {
|
||||
pub fn empty_variable_or_const(range: Range<AirPos>) -> Self {
|
||||
Self::EmptyVariableOrConst(range.into())
|
||||
}
|
||||
|
||||
pub fn invalid_lambda(range: Range<usize>) -> Self {
|
||||
pub fn invalid_lambda(range: Range<AirPos>) -> Self {
|
||||
Self::InvalidLambda(range.into())
|
||||
}
|
||||
|
||||
pub fn unallowed_char_in_number(range: Range<usize>) -> Self {
|
||||
pub fn unallowed_char_in_number(range: Range<AirPos>) -> Self {
|
||||
Self::UnallowedCharInNumber(range.into())
|
||||
}
|
||||
|
||||
pub fn parse_int_error(range: Range<usize>, parse_int_error: ParseIntError) -> Self {
|
||||
pub fn parse_int_error(range: Range<AirPos>, parse_int_error: ParseIntError) -> Self {
|
||||
Self::ParseIntError(range.into(), parse_int_error)
|
||||
}
|
||||
|
||||
pub fn parse_float_error(range: Range<usize>, parse_float_error: ParseFloatError) -> Self {
|
||||
pub fn parse_float_error(range: Range<AirPos>, parse_float_error: ParseFloatError) -> Self {
|
||||
Self::ParseFloatError(range.into(), parse_float_error)
|
||||
}
|
||||
|
||||
pub fn lambda_parser_error(
|
||||
range: Range<usize>,
|
||||
range: Range<AirPos>,
|
||||
se_lambda_parser_error: impl Into<String>,
|
||||
) -> Self {
|
||||
Self::LambdaParserError {
|
||||
@ -134,18 +136,18 @@ impl LexerError {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn last_error_path_error(range: Range<usize>, error_path: String) -> Self {
|
||||
pub fn last_error_path_error(range: Range<AirPos>, error_path: String) -> Self {
|
||||
Self::LastErrorPathError {
|
||||
span: range.into(),
|
||||
error_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn too_big_float(range: Range<usize>) -> Self {
|
||||
pub fn too_big_float(range: Range<AirPos>) -> Self {
|
||||
Self::TooBigFloat(range.into())
|
||||
}
|
||||
|
||||
pub fn leading_dot(range: Range<usize>) -> Self {
|
||||
pub fn leading_dot(range: Range<AirPos>) -> Self {
|
||||
Self::LeadingDot(range.into())
|
||||
}
|
||||
}
|
||||
@ -155,14 +157,14 @@ use crate::parser::air::__ToTriple;
|
||||
use crate::parser::ParserError;
|
||||
|
||||
impl<'err, 'input, 'i> __ToTriple<'err, 'input, 'i>
|
||||
for Result<(usize, Token<'input>, usize), LexerError>
|
||||
for Result<(AirPos, Token<'input>, AirPos), LexerError>
|
||||
{
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
fn to_triple(
|
||||
value: Self,
|
||||
) -> Result<
|
||||
(usize, Token<'input>, usize),
|
||||
lalrpop_util::ParseError<usize, Token<'input>, ParserError>,
|
||||
(AirPos, Token<'input>, AirPos),
|
||||
lalrpop_util::ParseError<AirPos, Token<'input>, ParserError>,
|
||||
> {
|
||||
match value {
|
||||
Ok(v) => Ok(v),
|
||||
|
@ -22,9 +22,11 @@ mod utils;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
pub mod text_pos;
|
||||
|
||||
pub use air_lexer::AIRLexer;
|
||||
pub use errors::LexerError;
|
||||
pub use text_pos::AirPos;
|
||||
pub use token::Token;
|
||||
|
||||
pub(super) type LexerResult<T> = std::result::Result<T, LexerError>;
|
||||
|
@ -14,6 +14,8 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use crate::AirPos;
|
||||
|
||||
use super::air_lexer::Spanned;
|
||||
use super::AIRLexer;
|
||||
use super::LexerError;
|
||||
@ -26,17 +28,17 @@ use air_lambda_ast::Functor;
|
||||
use fstrings::f;
|
||||
use fstrings::format_args_f;
|
||||
|
||||
fn run_lexer(input: &str) -> Vec<Spanned<Token<'_>, usize, LexerError>> {
|
||||
fn run_lexer(input: &str) -> Vec<Spanned<Token<'_>, AirPos, LexerError>> {
|
||||
let lexer = AIRLexer::new(input);
|
||||
lexer.collect()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
enum TokenCompareStrategy<'token> {
|
||||
All(Vec<Spanned<Token<'token>, usize, LexerError>>),
|
||||
Some(Vec<usize>, Vec<Spanned<Token<'token>, usize, LexerError>>),
|
||||
One(usize, Spanned<Token<'token>, usize, LexerError>),
|
||||
Single(Spanned<Token<'token>, usize, LexerError>),
|
||||
All(Vec<Spanned<Token<'token>, AirPos, LexerError>>),
|
||||
Some(Vec<usize>, Vec<Spanned<Token<'token>, AirPos, LexerError>>),
|
||||
One(usize, Spanned<Token<'token>, AirPos, LexerError>),
|
||||
Single(Spanned<Token<'token>, AirPos, LexerError>),
|
||||
}
|
||||
|
||||
use TokenCompareStrategy::*;
|
||||
@ -58,93 +60,96 @@ fn lexer_test(input: &str, expected_tokens: TokenCompareStrategy) {
|
||||
|
||||
#[test]
|
||||
fn air_instructions() {
|
||||
lexer_test("call", Single(Ok((0, Token::Call, 4))));
|
||||
lexer_test("call", Single(Ok((0.into(), Token::Call, 4.into()))));
|
||||
|
||||
lexer_test(
|
||||
"(call)",
|
||||
All(vec![
|
||||
Ok((0, Token::OpenRoundBracket, 1)),
|
||||
Ok((1, Token::Call, 5)),
|
||||
Ok((5, Token::CloseRoundBracket, 6)),
|
||||
Ok((0.into(), Token::OpenRoundBracket, 1.into())),
|
||||
Ok((1.into(), Token::Call, 5.into())),
|
||||
Ok((5.into(), Token::CloseRoundBracket, 6.into())),
|
||||
]),
|
||||
);
|
||||
|
||||
lexer_test("par", Single(Ok((0, Token::Par, 3))));
|
||||
lexer_test("par", Single(Ok((0.into(), Token::Par, 3.into()))));
|
||||
|
||||
lexer_test(
|
||||
"(par)",
|
||||
All(vec![
|
||||
Ok((0, Token::OpenRoundBracket, 1)),
|
||||
Ok((1, Token::Par, 4)),
|
||||
Ok((4, Token::CloseRoundBracket, 5)),
|
||||
Ok((0.into(), Token::OpenRoundBracket, 1.into())),
|
||||
Ok((1.into(), Token::Par, 4.into())),
|
||||
Ok((4.into(), Token::CloseRoundBracket, 5.into())),
|
||||
]),
|
||||
);
|
||||
|
||||
lexer_test("seq", Single(Ok((0, Token::Seq, 3))));
|
||||
lexer_test("seq", Single(Ok((0.into(), Token::Seq, 3.into()))));
|
||||
|
||||
lexer_test(
|
||||
"(seq)",
|
||||
All(vec![
|
||||
Ok((0, Token::OpenRoundBracket, 1)),
|
||||
Ok((1, Token::Seq, 4)),
|
||||
Ok((4, Token::CloseRoundBracket, 5)),
|
||||
Ok((0.into(), Token::OpenRoundBracket, 1.into())),
|
||||
Ok((1.into(), Token::Seq, 4.into())),
|
||||
Ok((4.into(), Token::CloseRoundBracket, 5.into())),
|
||||
]),
|
||||
);
|
||||
|
||||
lexer_test("null", Single(Ok((0, Token::Null, 4))));
|
||||
lexer_test("null", Single(Ok((0.into(), Token::Null, 4.into()))));
|
||||
|
||||
lexer_test(
|
||||
"(null)",
|
||||
All(vec![
|
||||
Ok((0, Token::OpenRoundBracket, 1)),
|
||||
Ok((1, Token::Null, 5)),
|
||||
Ok((5, Token::CloseRoundBracket, 6)),
|
||||
Ok((0.into(), Token::OpenRoundBracket, 1.into())),
|
||||
Ok((1.into(), Token::Null, 5.into())),
|
||||
Ok((5.into(), Token::CloseRoundBracket, 6.into())),
|
||||
]),
|
||||
);
|
||||
|
||||
lexer_test("fail", Single(Ok((0, Token::Fail, 4))));
|
||||
lexer_test("fail", Single(Ok((0.into(), Token::Fail, 4.into()))));
|
||||
|
||||
lexer_test("fold", Single(Ok((0, Token::Fold, 4))));
|
||||
lexer_test("fold", Single(Ok((0.into(), Token::Fold, 4.into()))));
|
||||
|
||||
lexer_test(
|
||||
"(fold)",
|
||||
All(vec![
|
||||
Ok((0, Token::OpenRoundBracket, 1)),
|
||||
Ok((1, Token::Fold, 5)),
|
||||
Ok((5, Token::CloseRoundBracket, 6)),
|
||||
Ok((0.into(), Token::OpenRoundBracket, 1.into())),
|
||||
Ok((1.into(), Token::Fold, 5.into())),
|
||||
Ok((5.into(), Token::CloseRoundBracket, 6.into())),
|
||||
]),
|
||||
);
|
||||
|
||||
lexer_test("next", Single(Ok((0, Token::Next, 4))));
|
||||
lexer_test("next", Single(Ok((0.into(), Token::Next, 4.into()))));
|
||||
|
||||
lexer_test(
|
||||
"(next)",
|
||||
All(vec![
|
||||
Ok((0, Token::OpenRoundBracket, 1)),
|
||||
Ok((1, Token::Next, 5)),
|
||||
Ok((5, Token::CloseRoundBracket, 6)),
|
||||
Ok((0.into(), Token::OpenRoundBracket, 1.into())),
|
||||
Ok((1.into(), Token::Next, 5.into())),
|
||||
Ok((5.into(), Token::CloseRoundBracket, 6.into())),
|
||||
]),
|
||||
);
|
||||
|
||||
lexer_test("match", Single(Ok((0, Token::Match, 5))));
|
||||
lexer_test("match", Single(Ok((0.into(), Token::Match, 5.into()))));
|
||||
|
||||
lexer_test(
|
||||
"(match)",
|
||||
All(vec![
|
||||
Ok((0, Token::OpenRoundBracket, 1)),
|
||||
Ok((1, Token::Match, 6)),
|
||||
Ok((6, Token::CloseRoundBracket, 7)),
|
||||
Ok((0.into(), Token::OpenRoundBracket, 1.into())),
|
||||
Ok((1.into(), Token::Match, 6.into())),
|
||||
Ok((6.into(), Token::CloseRoundBracket, 7.into())),
|
||||
]),
|
||||
);
|
||||
|
||||
lexer_test("mismatch", Single(Ok((0, Token::MisMatch, 8))));
|
||||
lexer_test(
|
||||
"mismatch",
|
||||
Single(Ok((0.into(), Token::MisMatch, 8.into()))),
|
||||
);
|
||||
|
||||
lexer_test(
|
||||
"(mismatch)",
|
||||
All(vec![
|
||||
Ok((0, Token::OpenRoundBracket, 1)),
|
||||
Ok((1, Token::MisMatch, 9)),
|
||||
Ok((9, Token::CloseRoundBracket, 10)),
|
||||
Ok((0.into(), Token::OpenRoundBracket, 1.into())),
|
||||
Ok((1.into(), Token::MisMatch, 9.into())),
|
||||
Ok((9.into(), Token::CloseRoundBracket, 10.into())),
|
||||
]),
|
||||
);
|
||||
}
|
||||
@ -155,7 +160,7 @@ fn init_peer_id() {
|
||||
|
||||
lexer_test(
|
||||
INIT_PEER_ID,
|
||||
Single(Ok((0, Token::InitPeerId, INIT_PEER_ID.len()))),
|
||||
Single(Ok((0.into(), Token::InitPeerId, INIT_PEER_ID.len().into()))),
|
||||
);
|
||||
}
|
||||
|
||||
@ -165,7 +170,7 @@ fn timestamp() {
|
||||
|
||||
lexer_test(
|
||||
TIMESTAMP,
|
||||
Single(Ok((0, Token::Timestamp, TIMESTAMP.len()))),
|
||||
Single(Ok((0.into(), Token::Timestamp, TIMESTAMP.len().into()))),
|
||||
);
|
||||
}
|
||||
|
||||
@ -173,7 +178,7 @@ fn timestamp() {
|
||||
fn ttl() {
|
||||
const TTL: &str = "%ttl%";
|
||||
|
||||
lexer_test(TTL, Single(Ok((0, Token::TTL, TTL.len()))));
|
||||
lexer_test(TTL, Single(Ok((0.into(), Token::TTL, TTL.len().into()))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -183,12 +188,12 @@ fn stream() {
|
||||
lexer_test(
|
||||
STREAM,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::Stream {
|
||||
name: STREAM,
|
||||
position: 0,
|
||||
position: 0.into(),
|
||||
},
|
||||
STREAM.len(),
|
||||
STREAM.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -200,12 +205,12 @@ fn canon_stream() {
|
||||
lexer_test(
|
||||
CANON_STREAM,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::CanonStream {
|
||||
name: CANON_STREAM,
|
||||
position: 0,
|
||||
position: 0.into(),
|
||||
},
|
||||
CANON_STREAM.len(),
|
||||
CANON_STREAM.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -218,13 +223,13 @@ fn stream_with_functor() {
|
||||
lexer_test(
|
||||
&stream_with_functor,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::StreamWithLambda {
|
||||
name: stream_name,
|
||||
lambda: LambdaAST::Functor(Functor::Length),
|
||||
position: 0,
|
||||
position: 0.into(),
|
||||
},
|
||||
stream_with_functor.len(),
|
||||
stream_with_functor.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -237,13 +242,13 @@ fn canon_stream_with_functor() {
|
||||
lexer_test(
|
||||
&canon_stream_with_functor,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::CanonStreamWithLambda {
|
||||
name: canon_stream_name,
|
||||
lambda: LambdaAST::Functor(Functor::Length),
|
||||
position: 0,
|
||||
position: 0.into(),
|
||||
},
|
||||
canon_stream_with_functor.len(),
|
||||
canon_stream_with_functor.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -256,13 +261,13 @@ fn scalar_with_functor() {
|
||||
lexer_test(
|
||||
&scalar_with_functor,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::ScalarWithLambda {
|
||||
name: scalar_name,
|
||||
lambda: LambdaAST::Functor(Functor::Length),
|
||||
position: 0,
|
||||
position: 0.into(),
|
||||
},
|
||||
scalar_with_functor.len(),
|
||||
scalar_with_functor.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -274,9 +279,9 @@ fn string_literal() {
|
||||
lexer_test(
|
||||
STRING_LITERAL,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::StringLiteral(&STRING_LITERAL[1..STRING_LITERAL.len() - 1]),
|
||||
STRING_LITERAL.len(),
|
||||
STRING_LITERAL.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -289,9 +294,9 @@ fn integer_numbers() {
|
||||
lexer_test(
|
||||
&number_with_plus_sign,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::I64(test_integer),
|
||||
number_with_plus_sign.len(),
|
||||
number_with_plus_sign.len().into(),
|
||||
))),
|
||||
);
|
||||
|
||||
@ -299,7 +304,11 @@ fn integer_numbers() {
|
||||
|
||||
lexer_test(
|
||||
&number,
|
||||
Single(Ok((0, Token::I64(test_integer), number.len()))),
|
||||
Single(Ok((
|
||||
0.into(),
|
||||
Token::I64(test_integer),
|
||||
number.len().into(),
|
||||
))),
|
||||
);
|
||||
|
||||
let number_with_minus_sign = f!("-{test_integer}");
|
||||
@ -307,9 +316,9 @@ fn integer_numbers() {
|
||||
lexer_test(
|
||||
&number_with_minus_sign,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::I64(-test_integer),
|
||||
number_with_minus_sign.len(),
|
||||
number_with_minus_sign.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -322,9 +331,9 @@ fn float_number() {
|
||||
lexer_test(
|
||||
&float_number_with_plus_sign,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::F64(test_float),
|
||||
float_number_with_plus_sign.len(),
|
||||
float_number_with_plus_sign.len().into(),
|
||||
))),
|
||||
);
|
||||
|
||||
@ -332,7 +341,11 @@ fn float_number() {
|
||||
|
||||
lexer_test(
|
||||
&float_number,
|
||||
Single(Ok((0, Token::F64(test_float), float_number.len()))),
|
||||
Single(Ok((
|
||||
0.into(),
|
||||
Token::F64(test_float),
|
||||
float_number.len().into(),
|
||||
))),
|
||||
);
|
||||
|
||||
let float_number_with_minus_sign = f!("-{test_float}");
|
||||
@ -340,9 +353,9 @@ fn float_number() {
|
||||
lexer_test(
|
||||
&float_number_with_minus_sign,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::F64(-test_float),
|
||||
float_number_with_minus_sign.len(),
|
||||
float_number_with_minus_sign.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -366,7 +379,9 @@ fn too_big_float_number() {
|
||||
|
||||
lexer_test(
|
||||
FNUMBER,
|
||||
Single(Err(LexerError::too_big_float(0..FNUMBER.len()))),
|
||||
Single(Err(LexerError::too_big_float(
|
||||
0.into()..FNUMBER.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
|
||||
@ -378,7 +393,7 @@ fn lambda() {
|
||||
lexer_test(
|
||||
LAMBDA,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::ScalarWithLambda {
|
||||
name: "value",
|
||||
lambda: LambdaAST::try_from_accessors(vec![
|
||||
@ -388,9 +403,9 @@ fn lambda() {
|
||||
ValueAccessor::ArrayAccess { idx: 1 },
|
||||
])
|
||||
.unwrap(),
|
||||
position: 0,
|
||||
position: 0.into(),
|
||||
},
|
||||
LAMBDA.len(),
|
||||
LAMBDA.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -401,14 +416,18 @@ fn lambda_path_numbers() {
|
||||
|
||||
lexer_test(
|
||||
LAMBDA,
|
||||
Single(Err(LexerError::unallowed_char_in_number(6..6))),
|
||||
Single(Err(LexerError::unallowed_char_in_number(
|
||||
6.into()..6.into(),
|
||||
))),
|
||||
);
|
||||
|
||||
const LAMBDA1: &str = r#"+12345.$[$@[]():?.*,"]"#;
|
||||
|
||||
lexer_test(
|
||||
LAMBDA1,
|
||||
Single(Err(LexerError::unallowed_char_in_number(7..7))),
|
||||
Single(Err(LexerError::unallowed_char_in_number(
|
||||
7.into()..7.into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
|
||||
@ -416,13 +435,16 @@ fn lambda_path_numbers() {
|
||||
fn leading_dot() {
|
||||
const LEADING_DOT: &str = ".111";
|
||||
|
||||
lexer_test(LEADING_DOT, Single(Err(LexerError::leading_dot(0..0))));
|
||||
lexer_test(
|
||||
LEADING_DOT,
|
||||
Single(Err(LexerError::leading_dot(0.into()..0.into()))),
|
||||
);
|
||||
|
||||
const LEADING_DOT_AFTER_SIGN: &str = "+.1111";
|
||||
|
||||
lexer_test(
|
||||
LEADING_DOT_AFTER_SIGN,
|
||||
Single(Err(LexerError::leading_dot(1..1))),
|
||||
Single(Err(LexerError::leading_dot(1.into()..1.into()))),
|
||||
);
|
||||
}
|
||||
|
||||
@ -432,7 +454,10 @@ fn unclosed_quote() {
|
||||
|
||||
lexer_test(
|
||||
UNCLOSED_QUOTE_AIR,
|
||||
One(4, Err(LexerError::is_not_alphanumeric(33..33))),
|
||||
One(
|
||||
4,
|
||||
Err(LexerError::is_not_alphanumeric(33.into()..33.into())),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@ -443,7 +468,7 @@ fn bad_value() {
|
||||
|
||||
lexer_test(
|
||||
INVALID_VALUE,
|
||||
Single(Err(LexerError::is_not_alphanumeric(3..3))),
|
||||
Single(Err(LexerError::is_not_alphanumeric(3.into()..3.into()))),
|
||||
);
|
||||
|
||||
// value contains ! that only allowed at the end of a lambda expression
|
||||
@ -451,7 +476,7 @@ fn bad_value() {
|
||||
|
||||
lexer_test(
|
||||
INVALID_VALUE2,
|
||||
Single(Err(LexerError::invalid_lambda(7..7))),
|
||||
Single(Err(LexerError::invalid_lambda(7.into()..7.into()))),
|
||||
);
|
||||
}
|
||||
|
||||
@ -461,7 +486,7 @@ fn invalid_lambda() {
|
||||
|
||||
lexer_test(
|
||||
INVALID_LAMBDA,
|
||||
Single(Err(LexerError::invalid_lambda(7..7))),
|
||||
Single(Err(LexerError::invalid_lambda(7.into()..7.into()))),
|
||||
);
|
||||
}
|
||||
|
||||
@ -470,7 +495,10 @@ fn invalid_lambda_numbers() {
|
||||
// this lambda contains all allowed in lambda characters
|
||||
const LAMBDA: &str = r#"+12345$[$@[]():?.*,"!]"#;
|
||||
|
||||
lexer_test(LAMBDA, Single(Err(LexerError::is_not_alphanumeric(6..6))));
|
||||
lexer_test(
|
||||
LAMBDA,
|
||||
Single(Err(LexerError::is_not_alphanumeric(6.into()..6.into()))),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -479,7 +507,7 @@ fn last_error() {
|
||||
|
||||
lexer_test(
|
||||
LAST_ERROR,
|
||||
Single(Ok((0, Token::LastError, LAST_ERROR.len()))),
|
||||
Single(Ok((0.into(), Token::LastError, LAST_ERROR.len().into()))),
|
||||
);
|
||||
}
|
||||
|
||||
@ -494,7 +522,10 @@ fn last_error_instruction() {
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
lexer_test(LAST_ERROR, Single(Ok((0, token, LAST_ERROR.len()))));
|
||||
lexer_test(
|
||||
LAST_ERROR,
|
||||
Single(Ok((0.into(), token, LAST_ERROR.len().into()))),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -507,7 +538,10 @@ fn last_error_message() {
|
||||
}])
|
||||
.unwrap(),
|
||||
);
|
||||
lexer_test(LAST_ERROR, Single(Ok((0, token, LAST_ERROR.len()))));
|
||||
lexer_test(
|
||||
LAST_ERROR,
|
||||
Single(Ok((0.into(), token, LAST_ERROR.len().into()))),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -520,7 +554,10 @@ fn last_error_peer_id() {
|
||||
}])
|
||||
.unwrap(),
|
||||
);
|
||||
lexer_test(LAST_ERROR, Single(Ok((0, token, LAST_ERROR.len()))));
|
||||
lexer_test(
|
||||
LAST_ERROR,
|
||||
Single(Ok((0.into(), token, LAST_ERROR.len().into()))),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -533,7 +570,10 @@ fn last_error_non_standard_field() {
|
||||
}])
|
||||
.unwrap(),
|
||||
);
|
||||
lexer_test(LAST_ERROR, Single(Ok((0, token, LAST_ERROR.len()))));
|
||||
lexer_test(
|
||||
LAST_ERROR,
|
||||
Single(Ok((0.into(), token, LAST_ERROR.len().into()))),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -542,14 +582,22 @@ fn booleans() {
|
||||
|
||||
lexer_test(
|
||||
TRUE_BOOL_CONST,
|
||||
Single(Ok((0, Token::Boolean(true), TRUE_BOOL_CONST.len()))),
|
||||
Single(Ok((
|
||||
0.into(),
|
||||
Token::Boolean(true),
|
||||
TRUE_BOOL_CONST.len().into(),
|
||||
))),
|
||||
);
|
||||
|
||||
const FALSE_BOOL_CONST: &str = "false";
|
||||
|
||||
lexer_test(
|
||||
FALSE_BOOL_CONST,
|
||||
Single(Ok((0, Token::Boolean(false), FALSE_BOOL_CONST.len()))),
|
||||
Single(Ok((
|
||||
0.into(),
|
||||
Token::Boolean(false),
|
||||
FALSE_BOOL_CONST.len().into(),
|
||||
))),
|
||||
);
|
||||
|
||||
const NON_BOOL_CONST: &str = "true1";
|
||||
@ -557,12 +605,12 @@ fn booleans() {
|
||||
lexer_test(
|
||||
NON_BOOL_CONST,
|
||||
Single(Ok((
|
||||
0,
|
||||
0.into(),
|
||||
Token::Scalar {
|
||||
name: NON_BOOL_CONST,
|
||||
position: 0,
|
||||
position: 0.into(),
|
||||
},
|
||||
NON_BOOL_CONST.len(),
|
||||
NON_BOOL_CONST.len().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
@ -578,8 +626,8 @@ fn match_with_empty_array__() {
|
||||
Some(
|
||||
vec![3, 4],
|
||||
vec![
|
||||
Ok((14, Token::OpenSquareBracket, 15)),
|
||||
Ok((15, Token::CloseSquareBracket, 16)),
|
||||
Ok((14.into(), Token::OpenSquareBracket, 15.into())),
|
||||
Ok((15.into(), Token::CloseSquareBracket, 16.into())),
|
||||
],
|
||||
),
|
||||
);
|
||||
|
75
crates/air-lib/air-parser/src/parser/lexer/text_pos.rs
Normal file
75
crates/air-lib/air-parser/src/parser/lexer/text_pos.rs
Normal file
@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2021 Fluence Labs Limited
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use std::ops::{Add, Sub};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Character position in the AIR script text.
|
||||
#[derive(
|
||||
Clone, Copy, Debug, Default, Hash, PartialEq, Eq, Serialize, Deserialize, PartialOrd, Ord,
|
||||
)]
|
||||
#[serde(transparent)]
|
||||
#[repr(transparent)]
|
||||
pub struct AirPos(usize);
|
||||
|
||||
impl From<usize> for AirPos {
|
||||
fn from(value: usize) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AirPos> for usize {
|
||||
fn from(p: AirPos) -> Self {
|
||||
p.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<usize> for AirPos {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, rhs: usize) -> Self::Output {
|
||||
Self(self.0 + rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<usize> for AirPos {
|
||||
type Output = AirPos;
|
||||
|
||||
fn sub(self, rhs: usize) -> Self::Output {
|
||||
Self(self.0 - rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<AirPos> for AirPos {
|
||||
type Output = usize;
|
||||
|
||||
fn sub(self, rhs: AirPos) -> Self::Output {
|
||||
self.0 - rhs.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<usize> for AirPos {
|
||||
fn eq(&self, other: &usize) -> bool {
|
||||
self.0 == *other
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for AirPos {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
@ -14,6 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use super::AirPos;
|
||||
use crate::LambdaAST;
|
||||
|
||||
use serde::Deserialize;
|
||||
@ -29,30 +30,30 @@ pub enum Token<'input> {
|
||||
|
||||
Scalar {
|
||||
name: &'input str,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
},
|
||||
ScalarWithLambda {
|
||||
name: &'input str,
|
||||
lambda: LambdaAST<'input>,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
},
|
||||
Stream {
|
||||
name: &'input str,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
},
|
||||
StreamWithLambda {
|
||||
name: &'input str,
|
||||
lambda: LambdaAST<'input>,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
},
|
||||
CanonStream {
|
||||
name: &'input str,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
},
|
||||
CanonStreamWithLambda {
|
||||
name: &'input str,
|
||||
lambda: LambdaAST<'input>,
|
||||
position: usize,
|
||||
position: AirPos,
|
||||
},
|
||||
|
||||
StringLiteral(&'input str),
|
||||
|
@ -14,6 +14,8 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use super::lexer::AirPos;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
@ -21,16 +23,16 @@ use std::ops::Range;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct Span {
|
||||
pub left: usize,
|
||||
pub right: usize,
|
||||
pub left: AirPos,
|
||||
pub right: AirPos,
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub fn new(left: usize, right: usize) -> Self {
|
||||
pub fn new(left: AirPos, right: AirPos) -> Self {
|
||||
Self { left, right }
|
||||
}
|
||||
|
||||
pub fn contains_position(&self, position: usize) -> bool {
|
||||
pub fn contains_position(&self, position: AirPos) -> bool {
|
||||
self.left < position && position < self.right
|
||||
}
|
||||
|
||||
@ -39,8 +41,8 @@ impl Span {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Range<usize>> for Span {
|
||||
fn from(range: Range<usize>) -> Self {
|
||||
impl From<Range<AirPos>> for Span {
|
||||
fn from(range: Range<AirPos>) -> Self {
|
||||
Self {
|
||||
left: range.start,
|
||||
right: range.end,
|
||||
|
@ -31,7 +31,7 @@ fn ap_with_literal() {
|
||||
let actual = parse(source_code);
|
||||
let expected = ap(
|
||||
ApArgument::Literal("some_string"),
|
||||
ApResult::Stream(Stream::new("$stream", 27)),
|
||||
ApResult::Stream(Stream::new("$stream", 27.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -46,7 +46,7 @@ fn ap_with_number() {
|
||||
let actual = parse(source_code);
|
||||
let expected = ap(
|
||||
ApArgument::Number(Number::Int(-100)),
|
||||
ApResult::Stream(Stream::new("$stream", 18)),
|
||||
ApResult::Stream(Stream::new("$stream", 18.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -61,7 +61,7 @@ fn ap_with_bool() {
|
||||
let actual = parse(source_code);
|
||||
let expected = ap(
|
||||
ApArgument::Boolean(true),
|
||||
ApResult::Stream(Stream::new("$stream", 18)),
|
||||
ApResult::Stream(Stream::new("$stream", 18.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -81,7 +81,7 @@ fn ap_with_last_error() {
|
||||
}])
|
||||
.unwrap(),
|
||||
)),
|
||||
ApResult::Stream(Stream::new("$stream", 37)),
|
||||
ApResult::Stream(Stream::new("$stream", 37.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -96,7 +96,7 @@ fn ap_with_empty_array() {
|
||||
let actual = parse(source_code);
|
||||
let expected = ap(
|
||||
ApArgument::EmptyArray,
|
||||
ApResult::Stream(Stream::new("$stream", 16)),
|
||||
ApResult::Stream(Stream::new("$stream", 16.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -111,7 +111,7 @@ fn ap_with_init_peer_id() {
|
||||
let actual = parse(source_code);
|
||||
let expected = ap(
|
||||
ApArgument::InitPeerId,
|
||||
ApResult::Stream(Stream::new("$stream", 28)),
|
||||
ApResult::Stream(Stream::new("$stream", 28.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -126,7 +126,7 @@ fn ap_with_timestamp() {
|
||||
let actual = parse(source_code);
|
||||
let expected = ap(
|
||||
ApArgument::Timestamp,
|
||||
ApResult::Stream(Stream::new("$stream", 25)),
|
||||
ApResult::Stream(Stream::new("$stream", 25.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -141,7 +141,7 @@ fn ap_with_ttl() {
|
||||
let actual = parse(source_code);
|
||||
let expected = ap(
|
||||
ApArgument::TTL,
|
||||
ApResult::Stream(Stream::new("$stream", 19)),
|
||||
ApResult::Stream(Stream::new("$stream", 19.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -157,8 +157,8 @@ fn ap_with_canon_stream() {
|
||||
|
||||
let actual = parse(&source_code);
|
||||
let expected = ap(
|
||||
ApArgument::CanonStream(CanonStreamWithLambda::new(canon_stream, None, 13)),
|
||||
ApResult::Scalar(Scalar::new(scalar, 27)),
|
||||
ApArgument::CanonStream(CanonStreamWithLambda::new(canon_stream, None, 13.into())),
|
||||
ApResult::Scalar(Scalar::new(scalar, 27.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -179,9 +179,9 @@ fn ap_with_canon_stream_with_lambda() {
|
||||
Some(
|
||||
LambdaAST::try_from_accessors(vec![ValueAccessor::ArrayAccess { idx: 0 }]).unwrap(),
|
||||
),
|
||||
13,
|
||||
13.into(),
|
||||
)),
|
||||
ApResult::Scalar(Scalar::new(scalar, 33)),
|
||||
ApResult::Scalar(Scalar::new(scalar, 33.into())),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
|
@ -37,15 +37,15 @@ fn parse_json_path() {
|
||||
CallInstrValue::Variable(VariableWithLambda::from_raw_value_path(
|
||||
"peer_id",
|
||||
vec![ValueAccessor::FieldAccessByName { field_name: "a" }],
|
||||
15,
|
||||
15.into(),
|
||||
)),
|
||||
CallInstrValue::Literal("service_id"),
|
||||
CallInstrValue::Literal("function_name"),
|
||||
Rc::new(vec![
|
||||
Value::Literal("hello"),
|
||||
Value::Variable(VariableWithLambda::scalar("name", 68)),
|
||||
Value::Variable(VariableWithLambda::scalar("name", 68.into())),
|
||||
]),
|
||||
CallOutputValue::Stream(Stream::new("$void", 74)),
|
||||
CallOutputValue::Stream(Stream::new("$void", 74.into())),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
@ -58,13 +58,13 @@ fn parse_empty_array() {
|
||||
|
||||
let actual = parse(source_code);
|
||||
let expected = call(
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 15)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("service_id", 24)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 15.into())),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("service_id", 24.into())),
|
||||
CallInstrValue::Literal("function_name"),
|
||||
Rc::new(vec![
|
||||
Value::Literal(""),
|
||||
Value::EmptyArray,
|
||||
Value::Variable(VariableWithLambda::scalar("arg", 59)),
|
||||
Value::Variable(VariableWithLambda::scalar("arg", 59.into())),
|
||||
]),
|
||||
CallOutputValue::None,
|
||||
);
|
||||
@ -80,11 +80,11 @@ fn parse_empty_array_2() {
|
||||
|
||||
let actual = parse(source_code);
|
||||
let expected = call(
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 15)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 15.into())),
|
||||
CallInstrValue::Literal("service_id"),
|
||||
CallInstrValue::Literal("function_name"),
|
||||
Rc::new(vec![
|
||||
Value::Variable(VariableWithLambda::scalar("k", 55)),
|
||||
Value::Variable(VariableWithLambda::scalar("k", 55.into())),
|
||||
Value::EmptyArray,
|
||||
Value::EmptyArray,
|
||||
]),
|
||||
@ -190,12 +190,12 @@ fn parse_lambda_complex() {
|
||||
CallInstrValue::Variable(VariableWithLambda::from_raw_value_path(
|
||||
"m",
|
||||
vec![ValueAccessor::ArrayAccess { idx: 1 }],
|
||||
32,
|
||||
32.into(),
|
||||
)),
|
||||
CallInstrValue::Literal("service_id"),
|
||||
CallInstrValue::Literal("function_name"),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::Scalar(Scalar::new("void", 75)),
|
||||
CallOutputValue::Scalar(Scalar::new("void", 75.into())),
|
||||
),
|
||||
call(
|
||||
CallInstrValue::Variable(VariableWithLambda::from_raw_value_path(
|
||||
@ -209,12 +209,12 @@ fn parse_lambda_complex() {
|
||||
ValueAccessor::FieldAccessByName { field_name: "cde" },
|
||||
ValueAccessor::ArrayAccess { idx: 1 },
|
||||
],
|
||||
99,
|
||||
99.into(),
|
||||
)),
|
||||
CallInstrValue::Literal("service_id"),
|
||||
CallInstrValue::Literal("function_name"),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::Scalar(Scalar::new("void", 162)),
|
||||
CallOutputValue::Scalar(Scalar::new("void", 162.into())),
|
||||
),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
@ -242,12 +242,12 @@ fn parse_lambda_with_scalars_complex() {
|
||||
scalar_name: "scalar_2",
|
||||
},
|
||||
],
|
||||
32,
|
||||
32.into(),
|
||||
)),
|
||||
CallInstrValue::Literal("service_id"),
|
||||
CallInstrValue::Literal("function_name"),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::Scalar(Scalar::new("void", 97)),
|
||||
CallOutputValue::Scalar(Scalar::new("void", 97.into())),
|
||||
),
|
||||
call(
|
||||
CallInstrValue::Variable(VariableWithLambda::from_raw_value_path(
|
||||
@ -267,12 +267,12 @@ fn parse_lambda_with_scalars_complex() {
|
||||
ValueAccessor::FieldAccessByName { field_name: "cde" },
|
||||
ValueAccessor::ArrayAccess { idx: 1 },
|
||||
],
|
||||
121,
|
||||
121.into(),
|
||||
)),
|
||||
CallInstrValue::Literal("service_id"),
|
||||
CallInstrValue::Literal("function_name"),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::Scalar(Scalar::new("void", 205)),
|
||||
CallOutputValue::Scalar(Scalar::new("void", 205.into())),
|
||||
),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
@ -290,7 +290,7 @@ fn json_path_square_braces() {
|
||||
vec![ValueAccessor::FieldAccessByName {
|
||||
field_name: "peer_id",
|
||||
}],
|
||||
15,
|
||||
15.into(),
|
||||
)),
|
||||
CallInstrValue::Literal("return"),
|
||||
CallInstrValue::Literal(""),
|
||||
@ -304,15 +304,15 @@ fn json_path_square_braces() {
|
||||
ValueAccessor::ArrayAccess { idx: 0 },
|
||||
ValueAccessor::FieldAccessByName { field_name: "abc" },
|
||||
],
|
||||
43,
|
||||
43.into(),
|
||||
)),
|
||||
Value::Variable(VariableWithLambda::from_raw_value_path(
|
||||
"u",
|
||||
vec![ValueAccessor::FieldAccessByName { field_name: "name" }],
|
||||
64,
|
||||
64.into(),
|
||||
)),
|
||||
]),
|
||||
CallOutputValue::Stream(Stream::new("$void", 74)),
|
||||
CallOutputValue::Stream(Stream::new("$void", 74.into())),
|
||||
);
|
||||
|
||||
assert_eq!(instruction, expected);
|
||||
@ -428,7 +428,7 @@ fn canon_stream_in_args() {
|
||||
CallInstrValue::Literal(function_name),
|
||||
Rc::new(vec![Value::Variable(VariableWithLambda::canon_stream(
|
||||
canon_stream,
|
||||
66,
|
||||
66.into(),
|
||||
))]),
|
||||
CallOutputValue::None,
|
||||
);
|
||||
@ -447,7 +447,7 @@ fn canon_stream_in_triplet() {
|
||||
|
||||
let instruction = parse(&source_code);
|
||||
let expected = call(
|
||||
CallInstrValue::Variable(VariableWithLambda::canon_stream(canon_stream, 19)),
|
||||
CallInstrValue::Variable(VariableWithLambda::canon_stream(canon_stream, 19.into())),
|
||||
CallInstrValue::Literal(service_id),
|
||||
CallInstrValue::Literal(function_name),
|
||||
Rc::new(vec![]),
|
||||
@ -476,7 +476,7 @@ fn canon_stream_with_lambda_in_triplet() {
|
||||
ValueAccessor::FieldAccessByName { field_name: "path" },
|
||||
])
|
||||
.unwrap(),
|
||||
19,
|
||||
19.into(),
|
||||
)),
|
||||
CallInstrValue::Literal(service_id),
|
||||
CallInstrValue::Literal(function_name),
|
||||
@ -507,14 +507,14 @@ fn seq_par_call() {
|
||||
CallInstrValue::Literal("local_service_id"),
|
||||
CallInstrValue::Literal("local_fn_name"),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::Scalar(Scalar::new("result_1", 108)),
|
||||
CallOutputValue::Scalar(Scalar::new("result_1", 108.into())),
|
||||
),
|
||||
call(
|
||||
CallInstrValue::Literal(peer_id),
|
||||
CallInstrValue::Literal("service_id"),
|
||||
CallInstrValue::Literal("fn_name"),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::Scalar(Scalar::new("g", 183)),
|
||||
CallOutputValue::Scalar(Scalar::new("g", 183.into())),
|
||||
),
|
||||
),
|
||||
call(
|
||||
@ -522,7 +522,7 @@ fn seq_par_call() {
|
||||
CallInstrValue::Literal("local_service_id"),
|
||||
CallInstrValue::Literal("local_fn_name"),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::Scalar(Scalar::new("result_2", 273)),
|
||||
CallOutputValue::Scalar(Scalar::new("result_2", 273.into())),
|
||||
),
|
||||
);
|
||||
|
||||
@ -562,14 +562,14 @@ fn seq_with_empty_and_dash() {
|
||||
CallInstrValue::Literal(""),
|
||||
CallInstrValue::Literal(""),
|
||||
Rc::new(vec![Value::Literal("module-bytes")]),
|
||||
CallOutputValue::Scalar(Scalar::new("module-bytes", 119)),
|
||||
CallOutputValue::Scalar(Scalar::new("module-bytes", 119.into())),
|
||||
),
|
||||
call(
|
||||
CallInstrValue::Literal("set_variables"),
|
||||
CallInstrValue::Literal(""),
|
||||
CallInstrValue::Literal(""),
|
||||
Rc::new(vec![Value::Literal("module_config")]),
|
||||
CallOutputValue::Scalar(Scalar::new("module_config", 201)),
|
||||
CallOutputValue::Scalar(Scalar::new("module_config", 201.into())),
|
||||
),
|
||||
),
|
||||
call(
|
||||
@ -577,7 +577,7 @@ fn seq_with_empty_and_dash() {
|
||||
CallInstrValue::Literal(""),
|
||||
CallInstrValue::Literal(""),
|
||||
Rc::new(vec![Value::Literal("blueprint")]),
|
||||
CallOutputValue::Scalar(Scalar::new("blueprint", 294)),
|
||||
CallOutputValue::Scalar(Scalar::new("blueprint", 294.into())),
|
||||
),
|
||||
),
|
||||
seq(
|
||||
@ -586,10 +586,10 @@ fn seq_with_empty_and_dash() {
|
||||
CallInstrValue::Literal("add_module"),
|
||||
CallInstrValue::Literal(""),
|
||||
Rc::new(vec![
|
||||
Value::Variable(VariableWithLambda::scalar("module-bytes", 381)),
|
||||
Value::Variable(VariableWithLambda::scalar("module_config", 394)),
|
||||
Value::Variable(VariableWithLambda::scalar("module-bytes", 381.into())),
|
||||
Value::Variable(VariableWithLambda::scalar("module_config", 394.into())),
|
||||
]),
|
||||
CallOutputValue::Scalar(Scalar::new("module", 409)),
|
||||
CallOutputValue::Scalar(Scalar::new("module", 409.into())),
|
||||
),
|
||||
seq(
|
||||
Instruction::Call(Call {
|
||||
@ -600,9 +600,9 @@ fn seq_with_empty_and_dash() {
|
||||
},
|
||||
args: Rc::new(vec![Value::Variable(VariableWithLambda::scalar(
|
||||
"blueprint",
|
||||
490,
|
||||
490.into(),
|
||||
))]),
|
||||
output: CallOutputValue::Scalar(Scalar::new("blueprint_id", 501)),
|
||||
output: CallOutputValue::Scalar(Scalar::new("blueprint_id", 501.into())),
|
||||
}),
|
||||
seq(
|
||||
call(
|
||||
@ -611,9 +611,9 @@ fn seq_with_empty_and_dash() {
|
||||
CallInstrValue::Literal(""),
|
||||
Rc::new(vec![Value::Variable(VariableWithLambda::scalar(
|
||||
"blueprint_id",
|
||||
589,
|
||||
589.into(),
|
||||
))]),
|
||||
CallOutputValue::Scalar(Scalar::new("service_id", 603)),
|
||||
CallOutputValue::Scalar(Scalar::new("service_id", 603.into())),
|
||||
),
|
||||
call(
|
||||
CallInstrValue::Literal("remote_peer_id"),
|
||||
@ -621,9 +621,9 @@ fn seq_with_empty_and_dash() {
|
||||
CallInstrValue::Literal(""),
|
||||
Rc::new(vec![Value::Variable(VariableWithLambda::scalar(
|
||||
"service_id",
|
||||
671,
|
||||
671.into(),
|
||||
))]),
|
||||
CallOutputValue::Scalar(Scalar::new("client_result", 683)),
|
||||
CallOutputValue::Scalar(Scalar::new("client_result", 683.into())),
|
||||
),
|
||||
),
|
||||
),
|
||||
@ -642,9 +642,9 @@ fn no_output() {
|
||||
let actual = parse(source_code);
|
||||
|
||||
let expected = call(
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer", 15)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("service", 21)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("fname", 29)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer", 15.into())),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("service", 21.into())),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("fname", 29.into())),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::None,
|
||||
);
|
||||
|
@ -33,8 +33,8 @@ fn canon_with_literal_peer_id() {
|
||||
let actual = parse(&source_code);
|
||||
let expected = canon(
|
||||
CallInstrValue::Literal(peer_id),
|
||||
Stream::new(stream, 26),
|
||||
CanonStream::new(canon_stream, 34),
|
||||
Stream::new(stream, 26.into()),
|
||||
CanonStream::new(canon_stream, 34.into()),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
@ -51,9 +51,9 @@ fn canon_with_variable_peer_id() {
|
||||
|
||||
let actual = parse(&source_code);
|
||||
let expected = canon(
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar(peer_id, 16)),
|
||||
Stream::new(stream, 24),
|
||||
CanonStream::new(canon_stream, 32),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar(peer_id, 16.into())),
|
||||
Stream::new(stream, 24.into()),
|
||||
CanonStream::new(canon_stream, 32.into()),
|
||||
);
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
|
@ -47,7 +47,7 @@ fn parse_fail_scalars() {
|
||||
(fail scalar)
|
||||
"#;
|
||||
let instruction = parse(source_code);
|
||||
let expected = fail_scalar(ScalarWithLambda::new("scalar", None, 18));
|
||||
let expected = fail_scalar(ScalarWithLambda::new("scalar", None, 18.into()));
|
||||
assert_eq!(instruction, expected)
|
||||
}
|
||||
|
||||
@ -65,7 +65,7 @@ fn parse_fail_scalar_with_lambda() {
|
||||
}])
|
||||
.unwrap(),
|
||||
),
|
||||
18,
|
||||
18.into(),
|
||||
));
|
||||
assert_eq!(instruction, expected)
|
||||
}
|
||||
|
@ -272,10 +272,10 @@ fn parse_fold() {
|
||||
"#;
|
||||
let instruction = parse(&source_code);
|
||||
let expected = fold_scalar_variable(
|
||||
ScalarWithLambda::new("iterable", None, 15),
|
||||
Scalar::new("i", 24),
|
||||
ScalarWithLambda::new("iterable", None, 15.into()),
|
||||
Scalar::new("i", 24.into()),
|
||||
null(),
|
||||
Span::new(9, 54),
|
||||
Span::new(9.into(), 54.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
@ -293,11 +293,11 @@ fn fold_json_path() {
|
||||
ScalarWithLambda::from_value_path(
|
||||
"members",
|
||||
vec![ValueAccessor::ArrayAccess { idx: 123321 }],
|
||||
33,
|
||||
33.into(),
|
||||
),
|
||||
Scalar::new("m", 52),
|
||||
Scalar::new("m", 52.into()),
|
||||
null(),
|
||||
Span::new(27, 61),
|
||||
Span::new(27.into(), 61.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
@ -311,7 +311,11 @@ fn fold_empty_array_iterable() {
|
||||
"#;
|
||||
|
||||
let instruction = parse(source_code);
|
||||
let expected = fold_scalar_empty_array(Scalar::new("m", 18), null(), Span::new(9, 48));
|
||||
let expected = fold_scalar_empty_array(
|
||||
Scalar::new("m", 18.into()),
|
||||
null(),
|
||||
Span::new(9.into(), 48.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
|
||||
@ -323,10 +327,10 @@ fn fold_on_stream() {
|
||||
|
||||
let instruction = parse(source_code);
|
||||
let expected = fold_stream(
|
||||
Stream::new("$stream", 15),
|
||||
Scalar::new("iterator", 23),
|
||||
Stream::new("$stream", 15.into()),
|
||||
Scalar::new("iterator", 23.into()),
|
||||
null(),
|
||||
Span::new(9, 39),
|
||||
Span::new(9.into(), 39.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
@ -341,10 +345,10 @@ fn fold_on_canon_stream() {
|
||||
|
||||
let instruction = parse(&source_code);
|
||||
let expected = fold_scalar_canon_stream(
|
||||
CanonStream::new(canon_stream, 15),
|
||||
Scalar::new(iterator, 29),
|
||||
CanonStream::new(canon_stream, 15.into()),
|
||||
Scalar::new(iterator, 29.into()),
|
||||
null(),
|
||||
Span::new(9, 45),
|
||||
Span::new(9.into(), 45.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
@ -366,11 +370,11 @@ fn comments() {
|
||||
},
|
||||
ValueAccessor::ArrayAccess { idx: 1 },
|
||||
],
|
||||
33,
|
||||
33.into(),
|
||||
),
|
||||
Scalar::new("m", 52),
|
||||
Scalar::new("m", 52.into()),
|
||||
null(),
|
||||
Span::new(27, 61),
|
||||
Span::new(27.into(), 61.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
@ -387,10 +391,10 @@ fn parse_fold_with_xor_par_seq() {
|
||||
let instruction = parse(&source_code);
|
||||
let instr = binary_instruction(*name);
|
||||
let expected = fold_scalar_variable(
|
||||
ScalarWithLambda::new("iterable", None, 6),
|
||||
Scalar::new("i", 15),
|
||||
ScalarWithLambda::new("iterable", None, 6.into()),
|
||||
Scalar::new("i", 15.into()),
|
||||
instr(null(), null()),
|
||||
Span::new(0, 58),
|
||||
Span::new(0.into(), 58.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
|
@ -31,8 +31,8 @@ fn parse_match() {
|
||||
"#;
|
||||
let instruction = parse(&source_code);
|
||||
let expected = match_(
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 16)),
|
||||
Value::Variable(VariableWithLambda::scalar("v2", 19)),
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 16.into())),
|
||||
Value::Variable(VariableWithLambda::scalar("v2", 19.into())),
|
||||
null(),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
@ -53,9 +53,9 @@ fn parse_match_with_canon_stream() {
|
||||
Value::Variable(VariableWithLambda::canon_stream_wl(
|
||||
canon_stream,
|
||||
LambdaAST::try_from_accessors(vec![ValueAccessor::ArrayAccess { idx: 0 }]).unwrap(),
|
||||
16,
|
||||
16.into(),
|
||||
)),
|
||||
Value::Variable(VariableWithLambda::scalar("v2", 36)),
|
||||
Value::Variable(VariableWithLambda::scalar("v2", 36.into())),
|
||||
null(),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
@ -70,7 +70,7 @@ fn parse_match_with_init_peer_id() {
|
||||
"#;
|
||||
let instruction = parse(&source_code);
|
||||
let expected = match_(
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 16)),
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 16.into())),
|
||||
Value::InitPeerId,
|
||||
null(),
|
||||
);
|
||||
@ -87,7 +87,7 @@ fn parse_match_with_timestamp() {
|
||||
let instruction = parse(source_code);
|
||||
let expected = match_(
|
||||
Value::Timestamp,
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 28)),
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 28.into())),
|
||||
null(),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
@ -103,7 +103,7 @@ fn parse_match_with_ttl() {
|
||||
let instruction = parse(source_code);
|
||||
let expected = match_(
|
||||
Value::TTL,
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 22)),
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 22.into())),
|
||||
null(),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
@ -118,8 +118,8 @@ fn parse_mismatch() {
|
||||
"#;
|
||||
let instruction = parse(&source_code);
|
||||
let expected = mismatch(
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 19)),
|
||||
Value::Variable(VariableWithLambda::scalar("v2", 22)),
|
||||
Value::Variable(VariableWithLambda::scalar("v1", 19.into())),
|
||||
Value::Variable(VariableWithLambda::scalar("v2", 22.into())),
|
||||
null(),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
@ -133,7 +133,7 @@ fn match_with_bool() {
|
||||
)
|
||||
"#;
|
||||
|
||||
let left_value = Value::Variable(VariableWithLambda::scalar("isOnline", 17));
|
||||
let left_value = Value::Variable(VariableWithLambda::scalar("isOnline", 17.into()));
|
||||
let right_value = Value::Boolean(true);
|
||||
let null = null();
|
||||
let expected = match_(left_value, right_value, null);
|
||||
@ -151,7 +151,7 @@ fn mismatch_with_bool() {
|
||||
"#;
|
||||
|
||||
let left_value = Value::Boolean(true);
|
||||
let right_value = Value::Variable(VariableWithLambda::scalar("isOnline", 25));
|
||||
let right_value = Value::Variable(VariableWithLambda::scalar("isOnline", 25.into()));
|
||||
let null = null();
|
||||
let expected = mismatch(left_value, right_value, null);
|
||||
|
||||
@ -167,7 +167,7 @@ fn match_with_empty_array() {
|
||||
)
|
||||
"#;
|
||||
|
||||
let left_value = Value::Variable(VariableWithLambda::scalar("variable", 17));
|
||||
let left_value = Value::Variable(VariableWithLambda::scalar("variable", 17.into()));
|
||||
let right_value = Value::EmptyArray;
|
||||
let instr = null();
|
||||
let expected = match_(left_value, right_value, instr);
|
||||
@ -182,7 +182,7 @@ fn match_with_empty_array() {
|
||||
"#;
|
||||
|
||||
let left_value = Value::EmptyArray;
|
||||
let right_value = Value::Variable(VariableWithLambda::scalar("variable", 20));
|
||||
let right_value = Value::Variable(VariableWithLambda::scalar("variable", 20.into()));
|
||||
let instr = null();
|
||||
let expected = match_(left_value, right_value, instr);
|
||||
|
||||
@ -198,7 +198,7 @@ fn mismatch_with_empty_array() {
|
||||
)
|
||||
"#;
|
||||
|
||||
let left_value = Value::Variable(VariableWithLambda::scalar("variable", 20));
|
||||
let left_value = Value::Variable(VariableWithLambda::scalar("variable", 20.into()));
|
||||
let right_value = Value::EmptyArray;
|
||||
let instr = null();
|
||||
let expected = mismatch(left_value, right_value, instr);
|
||||
@ -213,7 +213,7 @@ fn mismatch_with_empty_array() {
|
||||
"#;
|
||||
|
||||
let left_value = Value::EmptyArray;
|
||||
let right_value = Value::Variable(VariableWithLambda::scalar("variable", 23));
|
||||
let right_value = Value::Variable(VariableWithLambda::scalar("variable", 23.into()));
|
||||
let instr = null();
|
||||
let expected = mismatch(left_value, right_value, instr);
|
||||
|
||||
|
@ -30,9 +30,9 @@ fn parse_new_with_scalar() {
|
||||
|
||||
let instruction = parse(source_code);
|
||||
let expected = new(
|
||||
NewArgument::Scalar(Scalar::new("scalar", 5)),
|
||||
NewArgument::Scalar(Scalar::new("scalar", 5.into())),
|
||||
null(),
|
||||
Span::new(0, 40),
|
||||
Span::new(0.into(), 40.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
@ -46,9 +46,9 @@ fn parse_new_with_stream() {
|
||||
|
||||
let instruction = parse(source_code);
|
||||
let expected = new(
|
||||
NewArgument::Stream(Stream::new("$stream", 5)),
|
||||
NewArgument::Stream(Stream::new("$stream", 5.into())),
|
||||
null(),
|
||||
Span::new(0, 41),
|
||||
Span::new(0.into(), 41.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
@ -62,9 +62,9 @@ fn parse_new_with_canon_stream() {
|
||||
|
||||
let instruction = parse(source_code);
|
||||
let expected = new(
|
||||
NewArgument::CanonStream(CanonStream::new("#canon_stream", 5)),
|
||||
NewArgument::CanonStream(CanonStream::new("#canon_stream", 5.into())),
|
||||
null(),
|
||||
Span::new(0, 47),
|
||||
Span::new(0.into(), 47.into()),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
}
|
||||
|
@ -32,11 +32,11 @@ fn parse_seq() {
|
||||
let instruction = parse(source_code);
|
||||
let expected = seq(
|
||||
call(
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 32)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("service_id", 41)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("function_name", 52)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 32.into())),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("service_id", 41.into())),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("function_name", 52.into())),
|
||||
Rc::new(vec![Value::EmptyArray, Value::EmptyArray]),
|
||||
CallOutputValue::Scalar(Scalar::new("output", 75)),
|
||||
CallOutputValue::Scalar(Scalar::new("output", 75.into())),
|
||||
),
|
||||
call(
|
||||
CallInstrValue::Literal("peer_id"),
|
||||
@ -45,7 +45,7 @@ fn parse_seq() {
|
||||
Rc::new(vec![
|
||||
Value::Literal("hello"),
|
||||
Value::EmptyArray,
|
||||
Value::Variable(VariableWithLambda::scalar("name", 154)),
|
||||
Value::Variable(VariableWithLambda::scalar("name", 154.into())),
|
||||
]),
|
||||
CallOutputValue::None,
|
||||
),
|
||||
@ -68,16 +68,16 @@ fn parse_seq_seq() {
|
||||
let expected = seq(
|
||||
seq(
|
||||
call(
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 53)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("service_id", 62)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("function_name", 73)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 53.into())),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("service_id", 62.into())),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("function_name", 73.into())),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::None,
|
||||
),
|
||||
call(
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 114)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("peer_id", 114.into())),
|
||||
CallInstrValue::Literal("service_B"),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("function_name", 135)),
|
||||
CallInstrValue::Variable(VariableWithLambda::scalar("function_name", 135.into())),
|
||||
Rc::new(vec![]),
|
||||
CallOutputValue::None,
|
||||
),
|
||||
@ -88,9 +88,9 @@ fn parse_seq_seq() {
|
||||
CallInstrValue::Literal("function_name"),
|
||||
Rc::new(vec![
|
||||
Value::Literal("hello"),
|
||||
Value::Variable(VariableWithLambda::scalar("name", 236)),
|
||||
Value::Variable(VariableWithLambda::scalar("name", 236.into())),
|
||||
]),
|
||||
CallOutputValue::Stream(Stream::new("$output", 242)),
|
||||
CallOutputValue::Stream(Stream::new("$output", 242.into())),
|
||||
),
|
||||
);
|
||||
assert_eq!(instruction, expected);
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
use crate::ast::*;
|
||||
|
||||
use super::lexer::AirPos;
|
||||
use crate::parser::lexer::Token;
|
||||
use crate::parser::ParserError;
|
||||
use crate::parser::Span;
|
||||
@ -152,7 +153,7 @@ impl<'i> VariableValidator<'i> {
|
||||
self.met_variable_name_definition(ap.result.name(), span);
|
||||
}
|
||||
|
||||
pub(super) fn finalize(self) -> Vec<ErrorRecovery<usize, Token<'i>, ParserError>> {
|
||||
pub(super) fn finalize(self) -> Vec<ErrorRecovery<AirPos, Token<'i>, ParserError>> {
|
||||
ValidatorErrorBuilder::new(self)
|
||||
.check_undefined_variables()
|
||||
.check_undefined_iterables()
|
||||
@ -274,7 +275,7 @@ impl<'i> VariableValidator<'i> {
|
||||
}
|
||||
|
||||
struct ValidatorErrorBuilder<'i> {
|
||||
errors: Vec<ErrorRecovery<usize, Token<'i>, ParserError>>,
|
||||
errors: Vec<ErrorRecovery<AirPos, Token<'i>, ParserError>>,
|
||||
validator: VariableValidator<'i>,
|
||||
}
|
||||
|
||||
@ -384,7 +385,7 @@ impl<'i> ValidatorErrorBuilder<'i> {
|
||||
self
|
||||
}
|
||||
|
||||
fn build(self) -> Vec<ErrorRecovery<usize, Token<'i>, ParserError>> {
|
||||
fn build(self) -> Vec<ErrorRecovery<AirPos, Token<'i>, ParserError>> {
|
||||
self.errors
|
||||
}
|
||||
|
||||
@ -406,7 +407,7 @@ impl<'i> ValidatorErrorBuilder<'i> {
|
||||
}
|
||||
|
||||
fn add_to_errors<'i>(
|
||||
errors: &mut Vec<ErrorRecovery<usize, Token<'i>, ParserError>>,
|
||||
errors: &mut Vec<ErrorRecovery<AirPos, Token<'i>, ParserError>>,
|
||||
span: Span,
|
||||
token: Token<'i>,
|
||||
error: ParserError,
|
||||
|
@ -12,3 +12,6 @@ categories = ["wasm"]
|
||||
[lib]
|
||||
name = "air_execution_info_collector"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
air-parser = { path = "../air-parser" }
|
||||
|
@ -14,6 +14,8 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use air_parser::AirPos;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Intended to track a number of executed instruction of each type. For instructions that
|
||||
@ -63,7 +65,7 @@ pub struct NewTracker {
|
||||
/// Mapping from a new instruction position in a script
|
||||
/// to a number of their execution. This is needed to
|
||||
/// support private stream generation mappings.
|
||||
pub executed_count: HashMap<usize, u32>,
|
||||
pub executed_count: HashMap<AirPos, u32>,
|
||||
}
|
||||
|
||||
impl InstructionTracker {
|
||||
@ -123,7 +125,7 @@ impl InstructionTracker {
|
||||
self.xor_count += 1;
|
||||
}
|
||||
|
||||
pub fn meet_new(&mut self, position: usize) {
|
||||
pub fn meet_new(&mut self, position: AirPos) {
|
||||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
||||
|
||||
match self.new_tracker.executed_count.entry(position) {
|
||||
@ -136,7 +138,7 @@ impl InstructionTracker {
|
||||
}
|
||||
|
||||
impl NewTracker {
|
||||
pub fn get_iteration(&self, position: usize) -> u32 {
|
||||
pub fn get_iteration(&self, position: AirPos) -> u32 {
|
||||
self.executed_count
|
||||
.get(&position)
|
||||
.copied()
|
||||
|
@ -15,6 +15,7 @@ path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
air-utils = { path = "../utils" }
|
||||
air-parser = { path = "../air-parser" }
|
||||
|
||||
serde = {version = "1.0.144", features = ["derive", "rc"]}
|
||||
serde_json = "1.0.85"
|
||||
|
@ -14,6 +14,8 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
use air_parser::AirPos;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Mapping from a stream name to it's generation count.
|
||||
@ -28,4 +30,4 @@ pub type GlobalStreamGens = HashMap<String, u32>;
|
||||
/// so it could be met several times during script execution. This field anchors iteration
|
||||
/// where it was met.
|
||||
/// Similar to pi-calculus restricted names/channels.
|
||||
pub type RestrictedStreamGens = HashMap<String, HashMap<u32, Vec<u32>>>;
|
||||
pub type RestrictedStreamGens = HashMap<String, HashMap<AirPos, Vec<u32>>>;
|
||||
|
@ -60,34 +60,34 @@ impl<'input> LambdaASTLexer<'input> {
|
||||
return Some(self.try_parse_first_token());
|
||||
}
|
||||
|
||||
self.chars.next().map(|(start_pos, ch)| match ch {
|
||||
'[' => Ok((start_pos, Token::OpenSquareBracket, start_pos + 1)),
|
||||
']' => Ok((start_pos, Token::CloseSquareBracket, start_pos + 1)),
|
||||
self.chars.next().map(|(start_offset, ch)| match ch {
|
||||
'[' => Ok((start_offset, Token::OpenSquareBracket, start_offset + 1)),
|
||||
']' => Ok((start_offset, Token::CloseSquareBracket, start_offset + 1)),
|
||||
|
||||
'.' => Ok((start_pos, Token::ValuePathSelector, start_pos + 1)),
|
||||
'.' => Ok((start_offset, Token::ValuePathSelector, start_offset + 1)),
|
||||
|
||||
d if d.is_digit(ARRAY_IDX_BASE) => self.tokenize_arrays_idx(start_pos),
|
||||
s if is_air_alphanumeric(s) => self.tokenize_field_name(start_pos),
|
||||
d if d.is_digit(ARRAY_IDX_BASE) => self.tokenize_arrays_idx(start_offset),
|
||||
s if is_air_alphanumeric(s) => self.tokenize_field_name(start_offset),
|
||||
|
||||
'!' => Ok((start_pos, Token::FlatteningSign, start_pos + 1)),
|
||||
'!' => Ok((start_offset, Token::FlatteningSign, start_offset + 1)),
|
||||
|
||||
_ => Err(LexerError::UnexpectedSymbol(start_pos, start_pos + 1)),
|
||||
_ => Err(LexerError::UnexpectedSymbol(start_offset, start_offset + 1)),
|
||||
})
|
||||
}
|
||||
|
||||
fn tokenize_arrays_idx(
|
||||
&mut self,
|
||||
start_pos: usize,
|
||||
start_offset: usize,
|
||||
) -> Spanned<Token<'input>, usize, LexerError> {
|
||||
let array_idx = self.tokenize_until(start_pos, |ch| ch.is_digit(ARRAY_IDX_BASE));
|
||||
let array_idx = self.tokenize_until(start_offset, |ch| ch.is_digit(ARRAY_IDX_BASE));
|
||||
match array_idx
|
||||
.parse::<u32>()
|
||||
.map_err(|e| LexerError::ParseIntError(start_pos, start_pos + array_idx.len(), e))
|
||||
.map_err(|e| LexerError::ParseIntError(start_offset, start_offset + array_idx.len(), e))
|
||||
{
|
||||
Ok(idx) => Ok((
|
||||
start_pos,
|
||||
start_offset,
|
||||
Token::NumberAccessor(idx),
|
||||
start_pos + array_idx.len(),
|
||||
start_offset + array_idx.len(),
|
||||
)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
@ -95,23 +95,23 @@ impl<'input> LambdaASTLexer<'input> {
|
||||
|
||||
fn tokenize_field_name(
|
||||
&mut self,
|
||||
start_pos: usize,
|
||||
start_offset: usize,
|
||||
) -> Spanned<Token<'input>, usize, LexerError> {
|
||||
let field_name = self.tokenize_until(start_pos, is_air_alphanumeric);
|
||||
let field_name = self.tokenize_until(start_offset, is_air_alphanumeric);
|
||||
|
||||
Ok((
|
||||
start_pos,
|
||||
start_offset,
|
||||
Token::StringAccessor(field_name),
|
||||
start_pos + field_name.len(),
|
||||
start_offset + field_name.len(),
|
||||
))
|
||||
}
|
||||
|
||||
fn tokenize_until(
|
||||
&mut self,
|
||||
start_pos: usize,
|
||||
start_offset: usize,
|
||||
condition: impl Fn(char) -> bool,
|
||||
) -> &'input str {
|
||||
let mut end_pos = start_pos;
|
||||
let mut end_pos = start_offset;
|
||||
while let Some((pos, ch)) = self.chars.peek() {
|
||||
if !condition(*ch) {
|
||||
break;
|
||||
@ -120,7 +120,7 @@ impl<'input> LambdaASTLexer<'input> {
|
||||
self.chars.next();
|
||||
}
|
||||
|
||||
&self.input[start_pos..end_pos + 1]
|
||||
&self.input[start_offset..end_pos + 1]
|
||||
}
|
||||
|
||||
fn try_parse_first_token(&mut self) -> Spanned<Token<'input>, usize, LexerError> {
|
||||
|
Loading…
x
Reference in New Issue
Block a user