From 3bb9a4db7e43e86c382848ab42cc8b7bcb0d7eeb Mon Sep 17 00:00:00 2001 From: dmitry Date: Tue, 2 Mar 2021 19:46:27 +0300 Subject: [PATCH] WIP: passers --- build.sbt | 5 +- src/main/resources/experimental.aqua | 4 +- src/main/resources/typecheck.aqua | 2 +- src/main/scala/aqua/Aqua.scala | 23 ++-- .../aqua/{Error.scala => AquaError.scala} | 12 +- src/main/scala/aqua/Main.scala | 2 +- src/main/scala/aqua/ir/Air.scala | 13 +- src/main/scala/aqua/model/Acc.scala | 17 +-- src/main/scala/aqua/model/DataAcc.scala | 85 +++++++++++++ src/main/scala/aqua/model/InOutAcc.scala | 117 ++++-------------- src/main/scala/aqua/model/Names.scala | 7 +- src/main/scala/aqua/model/Passer.scala | 106 ++++++++++++++++ src/main/scala/aqua/model/ScopePasser.scala | 38 ++++++ .../scala/aqua/model/marker/ArrowMarker.scala | 10 ++ .../scala/aqua/model/marker/DataMarker.scala | 12 ++ src/main/scala/aqua/model/marker/Marker.scala | 3 + .../scala/aqua/model/marker/TypeMarker.scala | 10 ++ src/main/scala/aqua/parser/ArrowMarker.scala | 14 --- src/main/scala/aqua/parser/Block.scala | 56 ++++----- src/main/scala/aqua/parser/Expression.scala | 5 + src/main/scala/aqua/parser/FuncOp.scala | 94 +++++++------- src/main/scala/aqua/parser/TypeMarker.scala | 14 --- src/main/scala/aqua/parser/lexer/Token.scala | 8 +- src/main/scala/aqua/parser/lexer/Type.scala | 1 + src/main/scala/aqua/parser/lift/Span.scala | 5 + .../scala/aqua/parser/lexer/TokenSpec.scala | 24 ++-- 26 files changed, 433 insertions(+), 254 deletions(-) rename src/main/scala/aqua/{Error.scala => AquaError.scala} (71%) create mode 100644 src/main/scala/aqua/model/DataAcc.scala create mode 100644 src/main/scala/aqua/model/Passer.scala create mode 100644 src/main/scala/aqua/model/ScopePasser.scala create mode 100644 src/main/scala/aqua/model/marker/ArrowMarker.scala create mode 100644 src/main/scala/aqua/model/marker/DataMarker.scala create mode 100644 src/main/scala/aqua/model/marker/Marker.scala create mode 100644 src/main/scala/aqua/model/marker/TypeMarker.scala delete mode 100644 src/main/scala/aqua/parser/ArrowMarker.scala create mode 100644 src/main/scala/aqua/parser/Expression.scala delete mode 100644 src/main/scala/aqua/parser/TypeMarker.scala diff --git a/build.sbt b/build.sbt index 4ca830f9..b731c9cd 100644 --- a/build.sbt +++ b/build.sbt @@ -13,8 +13,11 @@ lazy val root = project libraryDependencies ++= Seq( "org.typelevel" %% "cats-effect" % "3.0.0-RC2", "org.typelevel" %% "cats-parse" % "0.3.1", - "org.typelevel" %% "cats-free" % catsV + "org.typelevel" %% "cats-free" % catsV, + "com.chuusai" %% "shapeless" % "2.3.3" ), libraryDependencies += "org.scalactic" %% "scalactic" % "3.2.5" % Test, libraryDependencies += "org.scalatest" %% "scalatest" % "3.2.5" % Test ) + +addCompilerPlugin("org.typelevel" %% "kind-projector" % "0.11.3" cross CrossVersion.full) diff --git a/src/main/resources/experimental.aqua b/src/main/resources/experimental.aqua index f4228cd0..90110b98 100644 --- a/src/main/resources/experimental.aqua +++ b/src/main/resources/experimental.aqua @@ -11,7 +11,7 @@ service MySrv: pure: -> []Cdef func do_smth( a: X, b: -> Z ): -- And comments after the line - b() + a <- b() x(a) func do_smth2( a: X, b: -> Z ): -- And comments after the line @@ -20,4 +20,4 @@ func do_smth2( a: X, b: -> Z ): -- And comments after the line alias Akaka : i32 -alias Akaka : bool \ No newline at end of file +alias Akaka : bool diff --git a/src/main/resources/typecheck.aqua b/src/main/resources/typecheck.aqua index e08f524b..8e8ff692 100644 --- a/src/main/resources/typecheck.aqua +++ b/src/main/resources/typecheck.aqua @@ -11,7 +11,7 @@ data Enclosing: arr: []Wrapping flag: bool -func call(enc: Enclosing, oni32: i32 -> (), onString: string -> (), onBool: bool -> ()) -> (): +func call(enc: Enclosing, oni32: i32 -> (), onString: string -> (), onBool: bool -> (), onInside: Inside -> Inside) -> Inside: onBool(enc.flag) onBool(enc.wrap) diff --git a/src/main/scala/aqua/Aqua.scala b/src/main/scala/aqua/Aqua.scala index 1e34eac0..048ecca6 100644 --- a/src/main/scala/aqua/Aqua.scala +++ b/src/main/scala/aqua/Aqua.scala @@ -1,28 +1,29 @@ package aqua -import aqua.model.Names +import aqua.model.{DataAcc, Passer, ScopePasser} import aqua.parser.Block import cats.data.{NonEmptyList, Validated, ValidatedNel} import cats.parse.{Parser => P, Parser0 => P0} import aqua.parser.lift.Span +import cats.data.Validated.Valid +import shapeless.HNil object Aqua { import aqua.parser.lexer.Token._ - val `parser`: P0[List[Block[Span.F]]] = P.repSep0(Block.`block`[Span.F], ` \n*`) <* ` \n*` + private val parser: P0[List[Block[Span.F, HNil]]] = P.repSep0(Block.`block`[Span.F], ` \n+`) <* ` \n+` - def parse(input: String): ValidatedNel[Error, List[Block[Span.F]]] = + val passer = Passer.hnil[Span.F].andThen(new ScopePasser(_)).andThen(new DataAcc.Pass(_)) + + def parse(input: String): ValidatedNel[AquaError, List[Block[Span.F, passer.Out]]] = Validated .fromEither( - `parser` + parser .parseAll(input) .left - .map(pe => NonEmptyList.one[Error](SyntaxError(pe.failedAtOffset, pe.expected))) - ) - .andThen(blocks => - Names - .foldVerify[Span.F](blocks.map(Names.blockNames(_))) - .leftMap(_.map(sp => NamesError(sp._1, sp._2))) - .map(_ => blocks) + .map(pe => NonEmptyList.one[AquaError](SyntaxError(pe.failedAtOffset, pe.expected))) ) + .andThen { blocks => + passer.pass(blocks).leftMap(_.map(sv => NamesError(sv._1, sv._2))) + } } diff --git a/src/main/scala/aqua/Error.scala b/src/main/scala/aqua/AquaError.scala similarity index 71% rename from src/main/scala/aqua/Error.scala rename to src/main/scala/aqua/AquaError.scala index 559f37a2..aad8880c 100644 --- a/src/main/scala/aqua/Error.scala +++ b/src/main/scala/aqua/AquaError.scala @@ -4,22 +4,22 @@ import aqua.parser.lift.Span import cats.data.NonEmptyList import cats.parse.Parser.Expectation -sealed trait Error { +sealed trait AquaError { def showForConsole(script: String): String } -case class SyntaxError(offset: Int, expectations: NonEmptyList[Expectation]) extends Error { +case class SyntaxError(offset: Int, expectations: NonEmptyList[Expectation]) extends AquaError { override def showForConsole(script: String): String = Span(offset, offset + 1) .focus(script, 3) .map(_.toConsoleStr(s"Syntax error, expected: ${expectations.toList.mkString(", ")}", Console.RED)) .getOrElse( - "(offset is beyond the script)" - ) + "\n" + "(offset is beyond the script, syntax errors) " + Console.RED + expectations.toList.mkString(", ") + ) + Console.RESET + "\n" } -case class NamesError(span: Span, hint: String) extends Error { +case class NamesError(span: Span, hint: String) extends AquaError { override def showForConsole(script: String): String = span @@ -28,7 +28,7 @@ case class NamesError(span: Span, hint: String) extends Error { .getOrElse("(offset is beyond the script)") + "\n" } -case class GetTypeError(span: Span, hint: String) extends Error { +case class GetTypeError(span: Span, hint: String) extends AquaError { override def showForConsole(script: String): String = span diff --git a/src/main/scala/aqua/Main.scala b/src/main/scala/aqua/Main.scala index 1e151eec..d549b852 100644 --- a/src/main/scala/aqua/Main.scala +++ b/src/main/scala/aqua/Main.scala @@ -19,7 +19,7 @@ object Main extends IOApp.Simple { println(Console.RED + s"Aqua script errored, total ${errs.length} problems found" + Console.RESET) } - val experimental = Source.fromResource("typecheck.aqua").mkString + val experimental = Source.fromResource("experimental.aqua").mkString tryParse(experimental) } diff --git a/src/main/scala/aqua/ir/Air.scala b/src/main/scala/aqua/ir/Air.scala index 4f07af46..cf1b368d 100644 --- a/src/main/scala/aqua/ir/Air.scala +++ b/src/main/scala/aqua/ir/Air.scala @@ -44,7 +44,7 @@ object DataView { case class VarLens(name: String, lens: String) extends DataView implicit val show: Show[DataView] = Show.show { - case StringScalar(v) ⇒ "\""+v+"\"" + case StringScalar(v) ⇒ v case InitPeerId ⇒ "%init_peer_id%" case LastError ⇒ "%last_error%" case Variable(name) ⇒ name @@ -62,8 +62,8 @@ object Triplet { case class Full(peerId: DataView, serviceId: DataView, functionName: String) extends Triplet implicit val show: Show[Triplet] = Show.show { - case FromData(ps, fn) ⇒ s"${ps.show} "+"\""+fn+"\"" - case Full(p, s, fn) ⇒ s"${p.show} (${s.show} "+"\""+fn+"\"" + case FromData(ps, fn) ⇒ s"${ps.show} " + "\"" + fn + "\"" + case Full(p, s, fn) ⇒ s"${p.show} (${s.show} " + "\"" + fn + "\"" } } @@ -89,7 +89,6 @@ object Air { case class Call(triplet: Triplet, args: List[DataView], result: Option[String]) extends Air(Keyword.Call) - private def show(depth: Int, air: Air): String = { def showNext(a: Air) = show(depth + 1, a) @@ -104,10 +103,10 @@ object Air { case Air.Par(l, r) ⇒ s"\n${showNext(l)}${showNext(r)}$space" case Air.Seq(l, r) ⇒ s"\n${showNext(l)}${showNext(r)}$space" case Air.Xor(l, r) ⇒ s"\n${showNext(l)}${showNext(r)}$space" - case Air.Call(triplet, args, res) ⇒ s" ${triplet.show} [${args.map(_.show).mkString(", ")}]${res.fold("")(" " + _)}" + case Air.Call(triplet, args, res) ⇒ + s" ${triplet.show} [${args.map(_.show).mkString(", ")}]${res.fold("")(" " + _)}" }) + ")\n" } - implicit val s: Show[Air] = Show.show(show(0, _)) -} \ No newline at end of file +} diff --git a/src/main/scala/aqua/model/Acc.scala b/src/main/scala/aqua/model/Acc.scala index a9211ecd..d0fb5612 100644 --- a/src/main/scala/aqua/model/Acc.scala +++ b/src/main/scala/aqua/model/Acc.scala @@ -1,12 +1,12 @@ package aqua.model -import aqua.parser.lexer.{ArrayType, ArrowType, BasicType, CustomType, Token, Type, Value, VarLambda} -import cats.{Comonad, Functor} +import aqua.parser.lexer.{ArrayType, ArrowType, BasicType, CustomType, Type, Value, VarLambda} +import cats.Comonad import cats.data.NonEmptyList import cats.syntax.comonad._ import cats.syntax.functor._ -case class Acc[F[_], T <: Token[F]](data: Map[String, NonEmptyList[T]]) { +case class Acc[F[_], T](data: Map[String, NonEmptyList[T]]) { def add(other: Acc[F, T], subtract: Set[String] = Set.empty): Acc[F, T] = copy(data = (other.data -- subtract).foldLeft(data) { @@ -18,22 +18,17 @@ case class Acc[F[_], T <: Token[F]](data: Map[String, NonEmptyList[T]]) { def sub(n: String): Acc[F, T] = copy(data = data - n) - def erase: Acc[F, T] = Acc.empty + def erase: Acc[F, T] = Acc.empty[F, T] def addOne(n: String, v: T): Acc[F, T] = add(Acc.one(n, v)) def takeKeys(keys: Set[String]): Acc[F, T] = copy(data = data.view.filterKeys(keys).toMap) - - def toErrors(toMsg: (String, T) => String)(implicit F: Functor[F]): List[F[String]] = - data.flatMap { - case (k, vs) => vs.toList.map(v => v.as(toMsg(k, v))) - }.toList } object Acc { - def empty[F[_], T <: Token[F]]: Acc[F, T] = Acc(Map.empty[String, NonEmptyList[T]]) + def empty[F[_], T]: Acc[F, T] = Acc(Map.empty[String, NonEmptyList[T]]) - def one[F[_], T <: Token[F]](n: String, v: T): Acc[F, T] = Acc(Map(n -> NonEmptyList.one(v))) + def one[F[_], T](n: String, v: T): Acc[F, T] = Acc(Map(n -> NonEmptyList.one(v))) def fromValues[F[_]: Comonad](args: List[Value[F]]): Acc[F, Value[F]] = args.collect { diff --git a/src/main/scala/aqua/model/DataAcc.scala b/src/main/scala/aqua/model/DataAcc.scala new file mode 100644 index 00000000..8b970c43 --- /dev/null +++ b/src/main/scala/aqua/model/DataAcc.scala @@ -0,0 +1,85 @@ +package aqua.model + +import aqua.AquaError +import aqua.model.marker.{DataMarker, VarMarker} +import aqua.parser.{AbilityFuncCall, AbilityId, Block, DefFunc, Extract, FuncCall, FuncOp, On, Par} +import aqua.parser.lexer.{DataType, Value, Var} +import cats.Comonad +import cats.data.ValidatedNel +import shapeless._ +import shapeless.ops.hlist.Selector +import cats.syntax.comonad._ + +case class DataAcc[F[_]](acc: InOutAcc[F, Value[F], DataMarker[F, HNil]]) + +object DataAcc { + type Acc[F[_]] = InOutAcc[F, Value[F], DataMarker[F, HNil]] + def emptyAcc[F[_]]: Acc[F] = InOutAcc.empty[F, Value[F], DataMarker[F, HNil]] + def empty[F[_]]: DataAcc[F] = DataAcc[F](emptyAcc[F]) + + class Pass[F[_]: Comonad, I <: HList, O <: HList](extend: Passer[F, I, O])(implicit getScope: Selector[O, Scope[F]]) + extends Passer[F, I, DataAcc[F] :: O] { + type Ctx = DataAcc[F] :: O + + override def exitFuncOpGroup(group: FuncOp[F, I], last: Ctx): Ctx = + last.head :: extend.exitFuncOpGroup(group, last.tail) + + override def funcOpCtx(op: FuncOp[F, I], prev: Ctx): Ctx = { + lazy val in = extend.funcOpCtx(op, prev.tail) + val data = prev.head + lazy val mode = getScope(in).mode.map(_.extract) + def combinedWith(other: Acc[F] => InOutAcc[F, Value[F], DataMarker[F, HNil]]): DataAcc[F] = + DataAcc[F](data.acc.combine(other(emptyAcc[F]), mode)) + + op match { + case FuncCall(_, args, _) => + combinedWith(_ addIn Acc.fromValues(args)) :: in + case AbilityFuncCall(_, fc, _) => + funcOpCtx(fc, prev) + case Extract(n, fc, _) => + val f = funcOpCtx(fc, prev) + f.head.copy(f.head.acc.combine(empty[F].acc addOut Acc.one(n.name.extract, VarMarker(n)), mode)) :: f.tail + case AbilityId(_, id, _) => + combinedWith(_ addIn Acc.fromValues(id :: Nil)) :: in + case On(p, _, _) => + combinedWith(_ addIn Acc.fromValues(p :: Nil)) :: in + case Par(_, _, _) => + data :: in + } + } + + override def blockCtx(block: Block[F, I]): Ctx = + (block match { + case DefFunc(head, _, _) => + head.args.foldLeft(empty[F]) { + case (acc, (k, v, _: DataType[F])) => + // TODO we know data type there, should we care? + acc.copy(acc.acc.addOut(Acc.one(k, VarMarker(Var(v))))) + case (acc, _) => acc + } + + case _ => + empty[F] + + }) :: extend.blockCtx(block) + + override def duplicates(prev: Out, next: Out): List[F[String]] = + next.head.acc.out + .takeKeys(prev.head.acc.out.keys) + .data + .flatMap { + case (k, vs) => vs.toList.map(_.toError(s"Duplicated variable definition `$k`")) + } + .toList ::: extend.duplicates(prev.tail, next.tail) + + override def emptyCtx: Out = empty[F] :: extend.emptyCtx + + override def combineBlockCtx(prev: Out, block: Out): Out = + DataAcc(prev.head.acc.eraseOut combineSeq block.head.acc) :: extend.combineBlockCtx(prev.tail, block.tail) + + override def unresolved(ctx: Out): List[F[String]] = + ctx.head.acc.in.data.flatMap { + case (k, vs) => vs.toList.map(v => v.as(s"Unresolved variable `$k`")) + }.toList ::: extend.unresolved(ctx.tail) + } +} diff --git a/src/main/scala/aqua/model/InOutAcc.scala b/src/main/scala/aqua/model/InOutAcc.scala index 9ba31c21..c14e73f0 100644 --- a/src/main/scala/aqua/model/InOutAcc.scala +++ b/src/main/scala/aqua/model/InOutAcc.scala @@ -4,149 +4,76 @@ import aqua.parser.{ AbilityFuncCall, AbilityId, AbilityResolve, - ArrowMarker, Block, DefAlias, DefFunc, DefService, DefType, + Expression, Extract, - FuncArrow, FuncCall, FuncOp, - LocalArrow, On, - Par, - TypeAlias, - TypeDef, - TypeMarker + Par } import aqua.parser.lexer.{Ability, ArrowName, ArrowType, CustomType, DataType, Token, Value, Var} +import aqua.model.marker.{ArrowMarker, FuncArrow, LocalArrow, Marker, TypeAlias, TypeDef, TypeMarker} import cats.{Comonad, Functor} import cats.data.NonEmptyList import cats.syntax.comonad._ import cats.syntax.functor._ -case class InOutAcc[F[_], In <: Token[F], Out <: Token[F]]( +case class InOutAcc[F[_], In <: Token[F], Out <: Marker[F, _]]( in: Acc[F, In], - out: Acc[F, Out], - scope: Scope[F] + out: Acc[F, Out] ) { - def par(f: F[Unit])(implicit F: Comonad[F]): InOutAcc[F, In, Out] = copy(scope = scope.par(f)) - def xor(f: F[Unit])(implicit F: Comonad[F]): InOutAcc[F, In, Out] = copy(scope = scope.xor(f)) - def on(v: Value[F]): InOutAcc[F, In, Out] = copy(scope = scope.on(v)) - def unsetMode: InOutAcc[F, In, Out] = copy(scope = scope.unsetMode) + type Self = InOutAcc[F, In, Out] - def unsetPeer: InOutAcc[F, In, Out] = copy(scope = scope.unsetPeer) - def unsetScope: InOutAcc[F, In, Out] = unsetMode.unsetPeer - - def combine(other: InOutAcc[F, In, Out])(implicit F: Comonad[F]): InOutAcc[F, In, Out] = - scope.mode.map(_.extract) match { + def combine(other: Self, mode: Option[Mode])(implicit F: Comonad[F]): Self = + mode match { case None => combineSeq(other) case Some(XorMode) => combineXor(other) case Some(ParMode) => combinePar(other) } - def combineSeq(other: InOutAcc[F, In, Out]): InOutAcc[F, In, Out] = + def combineSeq(other: Self): Self = copy(in = in.add(other.in, out.keys), out = out add other.out) - def combinePar(other: InOutAcc[F, In, Out]): InOutAcc[F, In, Out] = + def combinePar(other: Self): Self = copy(in = in add other.in, out = out add other.out) - def combineXor(other: InOutAcc[F, In, Out]): InOutAcc[F, In, Out] = + def combineXor(other: Self): Self = copy(in = in add other.in) - def addIn(addition: Acc[F, In]): InOutAcc[F, In, Out] = + def addIn(addition: Acc[F, In]): Self = copy(in = in add addition) - def subIn(rem: String): InOutAcc[F, In, Out] = + def subIn(rem: String): Self = copy(in = in sub rem) - def addOut(addition: Acc[F, Out]): InOutAcc[F, In, Out] = + def addOut(addition: Acc[F, Out]): Self = copy(out = out add addition) - def collectOut(pf: PartialFunction[Out, Out]): InOutAcc[F, In, Out] = + def collectOut(pf: PartialFunction[Out, Out]): Self = copy(out = out.copy(data = out.data.map { case (k, v) => k -> v.toList.collect(pf) }.collect { case (k, h :: tail) => k -> NonEmptyList[Out](h, tail) })) - def subOut(rem: String): InOutAcc[F, In, Out] = + def subOut(rem: String): Self = copy(out = out sub rem) - def eraseOut: InOutAcc[F, In, Out] = copy(out = out.erase) - def eraseIn: InOutAcc[F, In, Out] = copy(in = in.erase) - - def validateDuplicates(toMsg: (String, Out) => String, next: InOutAcc[F, In, Out])(implicit - F: Functor[F] - ): List[F[String]] = - next.out.takeKeys(out.keys).toErrors(toMsg) - - def validateUnresolved(toMsg: (String, In) => String)(implicit F: Functor[F]): List[F[String]] = - in.toErrors(toMsg) + def eraseOut: Self = copy(out = out.erase) + def eraseIn: Self = copy(in = in.erase) } object InOutAcc { - def empty[F[_], In <: Token[F], Out <: Token[F]]: InOutAcc[F, In, Out] = - InOutAcc(Acc.empty[F, In], Acc.empty[F, Out], Scope()) - - trait Visitor[IOA[_[_]]] { - def funcOp[F[_]: Comonad](op: FuncOp[F]): IOA[F] - - def func[F[_]: Comonad](func: DefFunc[F]): IOA[F] - - def block[F[_]: Comonad](block: Block[F]): IOA[F] - } - - type Data[F[_]] = InOutAcc[F, Value[F], Var[F]] - - object Data extends Visitor[Data] { - - def funcOp[F[_]: Comonad](op: FuncOp[F]): Data[F] = - op match { - case FuncCall(_, fargs) => - (empty: Data[F]) addIn Acc.fromValues(fargs) - case AbilityFuncCall(_, fc) => - funcOp(fc) - case Extract(n, fc) => - funcOp(fc) addOut Acc.one(n.name.extract, n) - case AbilityId(_, id) => - (empty: Data[F]) addIn Acc.fromValues(id :: Nil) - - case On(p, ops) => - ops - .widen[FuncOp[F]] - .map(funcOp[F](_).on(p)) - .foldLeft( - (empty: Data[F]).on(p) addIn Acc.fromValues(p :: Nil) - )(_ combine _) - case Par(f, op) => - funcOp(op).par(f) - case _ => - empty: Data[F] - } - - override def func[F[_]: Comonad](func: DefFunc[F]): Data[F] = - func.head.args.foldLeft( - func.body.map(funcOp[F]).reduceLeft(_ combine _).unsetScope - ) { - case (acc, (k, _, _: DataType[F])) => - acc.subIn(k) - case (acc, _) => acc - - } - - override def block[F[_]: Comonad](block: Block[F]): Data[F] = - block match { - case fn: DefFunc[F] => - func(fn) - case _ => - empty: Data[F] - } - } + def empty[F[_], In <: Token[F], Out <: Marker[F, _]]: InOutAcc[F, In, Out] = + InOutAcc(Acc.empty[F, In], Acc.empty[F, Out]) + /* type Abilities[F[_]] = InOutAcc[F, Ability[F], DefService[F]] object Abilities extends Visitor[Abilities] { @@ -291,5 +218,5 @@ object InOutAcc { case _ => empty: Arrows[F] } - } + }*/ } diff --git a/src/main/scala/aqua/model/Names.scala b/src/main/scala/aqua/model/Names.scala index 475cfa8c..31e7a0db 100644 --- a/src/main/scala/aqua/model/Names.scala +++ b/src/main/scala/aqua/model/Names.scala @@ -2,6 +2,7 @@ package aqua.model import aqua.parser._ import aqua.parser.lexer._ +import aqua.model.marker.{ArrowMarker, TypeMarker} import cats.Comonad import cats.data.Validated.{Invalid, Valid} import cats.data.{NonEmptyList, Validated, ValidatedNel} @@ -9,10 +10,11 @@ import cats.data.{NonEmptyList, Validated, ValidatedNel} import scala.collection.immutable.Queue // Fully resolved Scope must have no expected abilities (all resolved) -case class Names[F[_]]( +/*case class Names[F[_]]( // Take vars, set vars // Data type is not yet known - data: InOutAcc.Data[F] = InOutAcc.empty[F, Value[F], Var[F]], + // TODO VarMarker + data: DataAcc.Pass[F] = InOutAcc.empty[F, Value[F], Var[F]], // Abilities can be imported or set abilitiesResolve: InOutAcc.AbilitiesResolve[F] = InOutAcc.empty[F, Ability[F], AbilityResolve[F]], // Abilities can be defined and expected @@ -65,3 +67,4 @@ object Names { } } + */ diff --git a/src/main/scala/aqua/model/Passer.scala b/src/main/scala/aqua/model/Passer.scala new file mode 100644 index 00000000..3f4acd13 --- /dev/null +++ b/src/main/scala/aqua/model/Passer.scala @@ -0,0 +1,106 @@ +package aqua.model + +import aqua.parser.{Block, DefAlias, DefFunc, DefService, DefType, ExecOp, FuncOp, InstrOp, On, Par} +import cats.Functor +import cats.data.Validated.{Invalid, Valid} +import cats.data.{NonEmptyList, ValidatedNel} +import shapeless._ +import cats.syntax.functor._ + +import scala.collection.immutable.Queue + +trait Passer[F[_], I <: HList, O <: HList] { + type Out = O + + def exitFuncOpGroup(group: FuncOp[F, I], last: O): O + + def funcOpCtx(op: FuncOp[F, I], prev: O): O + + def blockCtx(block: Block[F, I]): O + + def mapFuncOp(op: FuncOp[F, I], prev: O): FuncOp[F, O] = { + val ctx = funcOpCtx(op, prev) + op match { + case p @ Par(_, inner, _) => + val inOp = mapFuncOp(inner, ctx) + p.copy(op = inOp.asInstanceOf[InstrOp[F, O]], context = exitFuncOpGroup(p, inOp.context)) + case o @ On(_, ops, _) => + val (inOps, inCtx) = mapFuncOps(ops, ctx) + o.copy( + ops = inOps.asInstanceOf[NonEmptyList[ExecOp[F, O]]], + context = exitFuncOpGroup(o, exitFuncOpGroup(o, inCtx)) + ) + case _ => + op.as(ctx) + } + } + + def mapFuncOps(ops: NonEmptyList[FuncOp[F, I]], context: O): (NonEmptyList[FuncOp[F, O]], O) = { + val (queue, lastCtx) = ops.foldLeft[(Queue[FuncOp[F, O]], O)](Queue.empty -> context) { + case ((acc, o), op) => + val mapped = mapFuncOp(op, o) + acc.appended(mapped) -> mapped.context + } + NonEmptyList.fromListUnsafe(queue.toList) -> lastCtx + } + + def emptyCtx: Out + + def combineBlockCtx(prev: Out, block: Out): Out + + def mapBlock(block: Block[F, I], prevCtx: Out): (List[F[String]], Block[F, Out]) = { + val ctx = blockCtx(block) + val dupErr = duplicates(prevCtx, ctx) + val bCtx = combineBlockCtx(prevCtx, ctx) + val combinedBlock = block match { + case df @ DefFunc(_, body, _) => + val (newBody, bodyCtx) = mapFuncOps(body, bCtx) + df.copy(body = newBody, context = bodyCtx) + case ds: DefService[F, I] => + ds.copy(context = bCtx) + case al: DefAlias[F, I] => + al.copy(context = bCtx) + case dt: DefType[F, I] => + dt.copy(context = bCtx) + } + + (dupErr ::: unresolved(combinedBlock.context)) -> combinedBlock + } + + def andThen[O2 <: HList](f: Passer[F, I, O] => Passer[F, I, O2]): Passer[F, I, O2] = f(this) + + def duplicates(prev: Out, next: Out): List[F[String]] + def unresolved(ctx: Out): List[F[String]] + + def pass(blocks: List[Block[F, I]]): ValidatedNel[F[String], List[Block[F, Out]]] = { + val (errs, _, nblocks) = + blocks.foldLeft[(Queue[F[String]], Out, Queue[Block[F, Out]])]((Queue.empty, emptyCtx, Queue.empty)) { + case ((errs, prevCtx, blockAcc), next) => + val (addErrs, mappedBlock) = mapBlock(next, prevCtx) + (errs.appendedAll(addErrs), mappedBlock.context, blockAcc.appended(mappedBlock)) + } + NonEmptyList + .fromList(errs.toList) + .fold[ValidatedNel[F[String], List[Block[F, Out]]]](Valid(nblocks.toList))(Invalid(_)) + } +} + +object Passer { + + def hnil[F[_]]: Passer[F, HNil, HNil] = + new Passer[F, HNil, HNil] { + override def exitFuncOpGroup(group: FuncOp[F, HNil], last: HNil): HNil = HNil + + override def funcOpCtx(op: FuncOp[F, HNil], prev: HNil): HNil = HNil + + override def blockCtx(block: Block[F, HNil]): HNil = HNil + + override def duplicates(prev: Out, next: Out): List[F[String]] = Nil + + override def emptyCtx: Out = HNil + + override def combineBlockCtx(prev: Out, block: Out): Out = HNil + + override def unresolved(ctx: Out): List[F[String]] = Nil + } +} diff --git a/src/main/scala/aqua/model/ScopePasser.scala b/src/main/scala/aqua/model/ScopePasser.scala new file mode 100644 index 00000000..1fbaa780 --- /dev/null +++ b/src/main/scala/aqua/model/ScopePasser.scala @@ -0,0 +1,38 @@ +package aqua.model + +import aqua.parser.{Block, FuncOp, On, Par} +import cats.Functor +import shapeless._ + +class ScopePasser[F[_]: Functor, I <: HList, O <: HList](extend: Passer[F, I, O]) extends Passer[F, I, Scope[F] :: O] { + type Ctx = Scope[F] :: O + + override def exitFuncOpGroup(group: FuncOp[F, I], last: Ctx): Ctx = + (group match { + case _: Par[F, I] => + last.head.unsetMode + case _: On[F, I] => + last.head.unsetPeer + case _ => + last.head + }) :: extend.exitFuncOpGroup(group, last.tail) + + override def funcOpCtx(op: FuncOp[F, I], prev: Ctx): Ctx = + (op match { + case p: Par[F, I] => prev.head.par(p.f) + case o: On[F, I] => prev.head.on(o.peer) + case _ => prev.head + }) :: extend.funcOpCtx(op, prev.tail) + + override def blockCtx(block: Block[F, I]): Ctx = Scope[F]() :: extend.blockCtx(block) + + override def duplicates(prev: Out, next: Out): List[F[String]] = + extend.duplicates(prev.tail, next.tail) + + override def emptyCtx: Out = Scope[F]() :: extend.emptyCtx + + override def combineBlockCtx(prev: Out, block: Out): Out = + Scope[F]() :: extend.combineBlockCtx(prev.tail, block.tail) + + override def unresolved(ctx: Out): List[F[String]] = Nil +} diff --git a/src/main/scala/aqua/model/marker/ArrowMarker.scala b/src/main/scala/aqua/model/marker/ArrowMarker.scala new file mode 100644 index 00000000..fd7aaae3 --- /dev/null +++ b/src/main/scala/aqua/model/marker/ArrowMarker.scala @@ -0,0 +1,10 @@ +package aqua.model.marker + +import aqua.parser.DefFunc +import aqua.parser.lexer.ArrowType + +sealed trait ArrowMarker[F[_], L] extends Marker[F, L] + +case class LocalArrow[F[_], L](arr: ArrowType[F]) extends ArrowMarker[F, L] + +case class FuncArrow[F[_], L](funcDef: DefFunc[F, L]) extends ArrowMarker[F, L] diff --git a/src/main/scala/aqua/model/marker/DataMarker.scala b/src/main/scala/aqua/model/marker/DataMarker.scala new file mode 100644 index 00000000..6f260cb5 --- /dev/null +++ b/src/main/scala/aqua/model/marker/DataMarker.scala @@ -0,0 +1,12 @@ +package aqua.model.marker + +import aqua.parser.lexer.Var +import cats.Functor + +sealed trait DataMarker[F[_], L] extends Marker[F, L] { + def toError(str: String)(implicit F: Functor[F]): F[String] +} + +case class VarMarker[F[_], L](v: Var[F]) extends DataMarker[F, L] { + override def toError(str: String)(implicit F: Functor[F]): F[String] = v.as(str) +} diff --git a/src/main/scala/aqua/model/marker/Marker.scala b/src/main/scala/aqua/model/marker/Marker.scala new file mode 100644 index 00000000..ca5cb7a2 --- /dev/null +++ b/src/main/scala/aqua/model/marker/Marker.scala @@ -0,0 +1,3 @@ +package aqua.model.marker + +trait Marker[F[_], L] diff --git a/src/main/scala/aqua/model/marker/TypeMarker.scala b/src/main/scala/aqua/model/marker/TypeMarker.scala new file mode 100644 index 00000000..c328f7be --- /dev/null +++ b/src/main/scala/aqua/model/marker/TypeMarker.scala @@ -0,0 +1,10 @@ +package aqua.model.marker + +import aqua.parser.DefType +import aqua.parser.lexer.Type + +sealed trait TypeMarker[F[_], L] extends Marker[F, L] + +case class TypeAlias[F[_], L](forType: Type[F]) extends TypeMarker[F, L] + +case class TypeDef[F[_], L](forDef: DefType[F, L]) extends TypeMarker[F, L] diff --git a/src/main/scala/aqua/parser/ArrowMarker.scala b/src/main/scala/aqua/parser/ArrowMarker.scala deleted file mode 100644 index 4020a7f2..00000000 --- a/src/main/scala/aqua/parser/ArrowMarker.scala +++ /dev/null @@ -1,14 +0,0 @@ -package aqua.parser - -import aqua.parser.lexer.{ArrowType, Token} -import cats.Functor - -sealed trait ArrowMarker[F[_]] extends Token[F] - -case class LocalArrow[F[_]](arr: ArrowType[F]) extends ArrowMarker[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = arr.as(v) -} - -case class FuncArrow[F[_]](funcDef: DefFunc[F]) extends ArrowMarker[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = funcDef.as(v) -} diff --git a/src/main/scala/aqua/parser/Block.scala b/src/main/scala/aqua/parser/Block.scala index 7903939d..cf739521 100644 --- a/src/main/scala/aqua/parser/Block.scala +++ b/src/main/scala/aqua/parser/Block.scala @@ -3,47 +3,43 @@ package aqua.parser import aqua.parser.lexer.DataType.`datatypedef` import aqua.parser.lexer.Token._ import aqua.parser.lexer.Type.{`arrowdef`, `typedef`} -import aqua.parser.lexer.{Ability, AquaArrowType, ArrowName, ArrowType, CustomType, DataType, Token, Type} +import aqua.parser.lexer.{Ability, AquaArrowType, ArrowName, ArrowType, CustomType, DataType, Type, Var} import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser._ -import cats.{Comonad, Functor} +import cats.Comonad import cats.data.{NonEmptyList, NonEmptyMap} import cats.parse.{Parser => P} import cats.syntax.comonad._ +import shapeless.HNil -sealed trait Block[F[_]] extends Token[F] +sealed trait Block[F[_], L] extends Expression[F, L] -case class DefType[F[_]](name: CustomType[F], fields: NonEmptyMap[String, (F[String], DataType[F])]) extends Block[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = name.as(v) -} +case class DefType[F[_], L](name: CustomType[F], fields: NonEmptyMap[String, (Var[F], DataType[F])], context: L) + extends Block[F, L] -case class DefService[F[_]](name: Ability[F], funcs: NonEmptyMap[String, ArrowType[F]]) extends Block[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = name.as(v) -} +case class DefService[F[_], L](name: Ability[F], funcs: NonEmptyMap[String, ArrowType[F]], context: L) + extends Block[F, L] +// TODO arg is either Var, or ArrowName case class FuncHead[F[_]](name: ArrowName[F], args: List[(String, F[String], Type[F])], ret: Option[DataType[F]]) { def toArrowDef(implicit F: Comonad[F]): F[AquaArrowType[F]] = name.as(AquaArrowType(args.map(_._3), ret)) } -case class DefFunc[F[_]](head: FuncHead[F], body: NonEmptyList[FuncOp[F]]) extends Block[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = head.name.as(v) -} +case class DefFunc[F[_], L](head: FuncHead[F], body: NonEmptyList[FuncOp[F, L]], context: L) extends Block[F, L] -case class DefAlias[F[_]](alias: CustomType[F], target: Type[F]) extends Block[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = alias.as(v) -} +case class DefAlias[F[_], L](alias: CustomType[F], target: Type[F], context: L) extends Block[F, L] object DefType { - def `dname`[F[_]: LiftParser]: P[CustomType[F]] = `data` *> ` ` *> CustomType.ct[F] <* ` `.? <* `:` <* ` \n*` + def `dname`[F[_]: LiftParser]: P[CustomType[F]] = `data` *> ` ` *> CustomType.ct[F] <* ` `.? <* `:` <* ` \n+` - def `dataname`[F[_]: LiftParser]: P[(F[String], DataType[F])] = - (`name`.lift <* ` : `) ~ `datatypedef` + def `dataname`[F[_]: LiftParser]: P[(Var[F], DataType[F])] = + (Var.v[F] <* ` : `) ~ `datatypedef` - def `deftype`[F[_]: LiftParser: Comonad]: P[DefType[F]] = + def `deftype`[F[_]: LiftParser: Comonad]: P[DefType[F, HNil]] = (`dname` ~ indented(`dataname`)).map { - case (n, t) ⇒ DefType(n, t.map(kv => kv._1.extract -> kv).toNem) + case (n, t) ⇒ DefType(n, t.map(kv => kv._1.name.extract -> kv).toNem, HNil) } } @@ -63,9 +59,9 @@ object DefFunc { } // TODO: if funchead has return type, for the last op, add extract, add Return.reply(extracted) - def `deffunc`[F[_]: LiftParser: Comonad]: P[DefFunc[F]] = - ((`funchead` <* ` : ` <* ` \n*`) ~ FuncOp.body).map { - case (h, b) ⇒ DefFunc(h, b) + def `deffunc`[F[_]: LiftParser: Comonad]: P[DefFunc[F, HNil]] = + ((`funchead` <* ` : ` <* ` \n+`) ~ FuncOp.body).map { + case (h, b) ⇒ DefFunc(h, b, HNil) } } @@ -73,27 +69,27 @@ object DefFunc { object DefService { import DefFunc.`funcdef` - def `servicename`[F[_]: LiftParser]: P[Ability[F]] = `service` *> ` ` *> Ability.ab[F] <* ` `.? <* `:` <* ` \n*` + def `servicename`[F[_]: LiftParser]: P[Ability[F]] = `service` *> ` ` *> Ability.ab[F] <* ` `.? <* `:` <* ` \n+` // TODO switch to funchead? - def `defservice`[F[_]: LiftParser]: P[DefService[F]] = + def `defservice`[F[_]: LiftParser]: P[DefService[F, HNil]] = (`servicename` ~ indented(`funcdef`).map(_.toNem)).map { - case (n, f) ⇒ DefService(n, f) + case (n, f) ⇒ DefService(n, f, HNil) } } object DefAlias { - def `defalias`[F[_]: LiftParser]: P[DefAlias[F]] = + def `defalias`[F[_]: LiftParser]: P[DefAlias[F, HNil]] = ((`alias` *> ` ` *> CustomType.ct[F] <* ` : `) ~ `typedef`).map { - case (ct, t) => DefAlias(ct, t) + case (ct, t) => DefAlias(ct, t, HNil) } } object Block { - def block[F[_]: LiftParser: Comonad]: P[Block[F]] = - ` \n*`.rep0.with1 *> P.oneOf( + def block[F[_]: LiftParser: Comonad]: P[Block[F, HNil]] = + ` \n+`.rep0.with1 *> P.oneOf( DefType.`deftype` :: DefService.`defservice` :: DefFunc.`deffunc` :: diff --git a/src/main/scala/aqua/parser/Expression.scala b/src/main/scala/aqua/parser/Expression.scala new file mode 100644 index 00000000..6f6418e1 --- /dev/null +++ b/src/main/scala/aqua/parser/Expression.scala @@ -0,0 +1,5 @@ +package aqua.parser + +trait Expression[F[_], L] { + def context: L +} diff --git a/src/main/scala/aqua/parser/FuncOp.scala b/src/main/scala/aqua/parser/FuncOp.scala index 2aee8182..68458991 100644 --- a/src/main/scala/aqua/parser/FuncOp.scala +++ b/src/main/scala/aqua/parser/FuncOp.scala @@ -1,7 +1,7 @@ package aqua.parser import aqua.parser.lexer.Token._ -import aqua.parser.lexer.{Ability, Token, Value, Var} +import aqua.parser.lexer.{Ability, ArrowName, Value, Var} import cats.data.NonEmptyList import cats.parse.{Parser => P} import aqua.parser.lexer.Value.`value` @@ -9,91 +9,99 @@ import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser._ import cats.{Comonad, Functor} import cats.syntax.functor._ +import shapeless.HNil -sealed trait FuncOp[F[_]] extends Token[F] -sealed trait InstrOp[F[_]] extends FuncOp[F] +sealed trait FuncOp[F[_], L] extends Expression[F, L] +sealed trait InstrOp[F[_], L] extends FuncOp[F, L] -sealed trait ExecOp[F[_]] extends InstrOp[F] -sealed trait CallOp[F[_]] extends ExecOp[F] +sealed trait ExecOp[F[_], L] extends InstrOp[F, L] +sealed trait CallOp[F[_], L] extends ExecOp[F, L] -case class FuncCall[F[_]](name: F[String], args: List[Value[F]]) extends CallOp[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = name.as(v) -} +case class FuncCall[F[_], L](arrow: ArrowName[F], args: List[Value[F]], context: L) extends CallOp[F, L] -case class AbilityFuncCall[F[_]](ability: Ability[F], call: FuncCall[F]) extends CallOp[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = ability.as(v) -} +case class AbilityFuncCall[F[_], L](ability: Ability[F], call: FuncCall[F, L], context: L) extends CallOp[F, L] -case class Extract[F[_]](vr: Var[F], from: CallOp[F]) extends ExecOp[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = vr.as(v) -} +case class Extract[F[_], L](vr: Var[F], from: CallOp[F, L], context: L) extends ExecOp[F, L] -case class On[F[_]](peer: Value[F], ops: NonEmptyList[ExecOp[F]]) extends InstrOp[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = peer.as(v) -} +case class On[F[_], L](peer: Value[F], ops: NonEmptyList[ExecOp[F, L]], context: L) extends InstrOp[F, L] -case class Par[F[_]](f: F[Unit], op: InstrOp[F]) extends FuncOp[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = f.as(v) -} +case class Par[F[_], L](f: F[Unit], op: InstrOp[F, L], context: L) extends FuncOp[F, L] // TODO: can't be in Par, can be in On -sealed trait AbilityResolve[F[_]] extends ExecOp[F] { +sealed trait AbilityResolve[F[_], L] extends ExecOp[F, L] { def ability: Ability[F] - override def as[T](v: T)(implicit F: Functor[F]): F[T] = ability.as(v) } -case class AbilityId[F[_]](ability: Ability[F], id: Value[F]) extends AbilityResolve[F] +case class AbilityId[F[_], L](ability: Ability[F], id: Value[F], context: L) extends AbilityResolve[F, L] object FuncOp { - def funcCall[F[_]: LiftParser: Comonad]: P[FuncCall[F]] = - (`name`.lift ~ P.repSep0(`value`, `,`).between(`(`, `)`)).map { - case (fnName, args) ⇒ FuncCall(fnName, args) + def funcCall[F[_]: LiftParser: Comonad]: P[FuncCall[F, HNil]] = + (ArrowName.an[F] ~ P.repSep0(`value`, `,`).between(`(`, `)`)).map { + case (fnName, args) ⇒ FuncCall(fnName, args, HNil) } - def abilityFuncCall[F[_]: LiftParser: Comonad]: P[AbilityFuncCall[F]] = + def abilityFuncCall[F[_]: LiftParser: Comonad]: P[AbilityFuncCall[F, HNil]] = ((Ability.ab[F] <* `.`) ~ funcCall).map { - case (abName, fc) ⇒ AbilityFuncCall(abName, fc) + case (abName, fc) ⇒ AbilityFuncCall(abName, fc, HNil) } - def callOp[F[_]: LiftParser: Comonad]: P[CallOp[F]] = + def callOp[F[_]: LiftParser: Comonad]: P[CallOp[F, HNil]] = P.oneOf(funcCall[F] :: abilityFuncCall[F] :: Nil) - def extract[F[_]: LiftParser: Comonad]: P[Extract[F]] = + def extract[F[_]: LiftParser: Comonad]: P[Extract[F, HNil]] = ((Var.v <* `<-`) ~ callOp[F]).map { - case (v, f) ⇒ Extract(v, f) + case (v, f) ⇒ Extract(v, f, HNil) } - def abilityResolve[F[_]: LiftParser: Comonad]: P[AbilityResolve[F]] = + def abilityResolve[F[_]: LiftParser: Comonad]: P[AbilityResolve[F, HNil]] = ((Ability.ab <* ` `) ~ `value`).map { - case (n, v) ⇒ AbilityId(n, v) - }.widen[AbilityResolve[F]] + case (n, v) ⇒ AbilityId[F, HNil](n, v, HNil) + }.widen[AbilityResolve[F, HNil]] // TODO can't be in Par, can be in On - def execOp[F[_]: LiftParser: Comonad]: P[ExecOp[F]] = + def execOp[F[_]: LiftParser: Comonad]: P[ExecOp[F, HNil]] = P.oneOf( callOp.backtrack :: abilityResolve.backtrack :: extract :: Nil ) - def startOn[F[_]: LiftParser: Comonad]: P[Value[F]] = `on` *> ` ` *> `value` <* ` `.? <* `:` <* ` \n*` + def startOn[F[_]: LiftParser: Comonad]: P[Value[F]] = `on` *> ` ` *> `value` <* ` `.? <* `:` <* ` \n+` - def execOn[F[_]: LiftParser: Comonad]: P[On[F]] = + def execOn[F[_]: LiftParser: Comonad]: P[On[F, HNil]] = (startOn ~ indented(execOp[F])).map { - case (v, i) ⇒ On(v, i) + case (v, i) ⇒ On(v, i, HNil) } - def instrOp[F[_]: LiftParser: Comonad]: P[InstrOp[F]] = + def instrOp[F[_]: LiftParser: Comonad]: P[InstrOp[F, HNil]] = P.oneOf( execOn.backtrack :: execOp :: Nil ) - def parOp[F[_]: LiftParser: Comonad]: P[Par[F]] = - ((`par`.lift <* ` `) ~ instrOp[F]).map(pi => Par(pi._1, pi._2)) + def parOp[F[_]: LiftParser: Comonad]: P[Par[F, HNil]] = + ((`par`.lift <* ` `) ~ instrOp[F]).map(pi => Par(pi._1, pi._2, HNil)) - def `funcop`[F[_]: LiftParser: Comonad]: P[FuncOp[F]] = + def `funcop`[F[_]: LiftParser: Comonad]: P[FuncOp[F, HNil]] = P.oneOf(parOp.backtrack :: instrOp :: Nil) - def body[F[_]: LiftParser: Comonad]: P[NonEmptyList[FuncOp[F]]] = indented(`funcop`) + def body[F[_]: LiftParser: Comonad]: P[NonEmptyList[FuncOp[F, HNil]]] = indented(`funcop`) + + implicit def funcOpFunctor[F[_]]: Functor[FuncOp[F, *]] = + new Functor[FuncOp[F, *]] { + + override def map[A, B](fa: FuncOp[F, A])(f: A => B): FuncOp[F, B] = + fa match { + case fc @ FuncCall(_, _, ctx) => fc.copy(context = f(ctx)) + case afc @ AbilityFuncCall(_, fc, ctx) => afc.copy(call = fc.copy(context = f(fc.context)), context = f(ctx)) + case e @ Extract(_, afc @ AbilityFuncCall(_, fc, actx), ctx) => + e.copy(from = afc.copy(call = fc.copy(context = f(fc.context)), context = f(actx)), context = f(ctx)) + case e @ Extract(_, fc @ FuncCall(_, _, fctx), ctx) => + e.copy(from = fc.copy(context = f(fctx)), context = f(ctx)) + case on @ On(_, ops, ctx) => on.copy(ops = ops.map(map(_)(f).asInstanceOf[ExecOp[F, B]]), context = f(ctx)) + case p @ Par(_, op, ctx) => p.copy(op = map(op)(f).asInstanceOf[InstrOp[F, B]], context = f(ctx)) + case aid @ AbilityId(_, _, ctx) => aid.copy(context = f(ctx)) + } + } + } diff --git a/src/main/scala/aqua/parser/TypeMarker.scala b/src/main/scala/aqua/parser/TypeMarker.scala deleted file mode 100644 index bbe8f66c..00000000 --- a/src/main/scala/aqua/parser/TypeMarker.scala +++ /dev/null @@ -1,14 +0,0 @@ -package aqua.parser - -import aqua.parser.lexer.{Token, Type} -import cats.Functor - -sealed trait TypeMarker[F[_]] extends Token[F] - -case class TypeAlias[F[_]](forType: Type[F]) extends TypeMarker[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = forType.as(v) -} - -case class TypeDef[F[_]](forDef: DefType[F]) extends TypeMarker[F] { - override def as[T](v: T)(implicit F: Functor[F]): F[T] = forDef.as(v) -} diff --git a/src/main/scala/aqua/parser/lexer/Token.scala b/src/main/scala/aqua/parser/lexer/Token.scala index cda8cb4e..87f243b7 100644 --- a/src/main/scala/aqua/parser/lexer/Token.scala +++ b/src/main/scala/aqua/parser/lexer/Token.scala @@ -34,7 +34,7 @@ object Token { val `\n` : P[Unit] = P.char('\n') val `--` : P[Unit] = ` `.?.with1 *> P.string("--") <* ` `.? val ` \n` : P[Unit] = (` `.?.void *> (`--` *> P.charsWhile(_ != '\n')).?.void).with1 *> `\n` - val ` \n*` : P[Unit] = P.repAs[Unit, Unit](` \n`.backtrack, 1)(Accumulator0.unitAccumulator0) + val ` \n+` : P[Unit] = P.repAs[Unit, Unit](` \n`.backtrack, 1)(Accumulator0.unitAccumulator0) val `,` : P[Unit] = P.char(',') <* ` `.? val `.` : P[Unit] = P.char('.') val `"` : P[Unit] = P.char('"') @@ -45,13 +45,13 @@ object Token { val `<-` : P[Unit] = (` `.?.with1 *> P.string("<-") <* ` `.?).backtrack def comma[T](p: P[T]): P[NonEmptyList[T]] = - P.repSep(p, `,` <* ` \n*`.rep0) + P.repSep(p, `,` <* ` \n+`.rep0) def comma0[T](p: P[T]): P0[List[T]] = - P.repSep0(p, `,` <* ` \n*`.rep0) + P.repSep0(p, `,` <* ` \n+`.rep0) def indented[T](p: P[T]): P[NonEmptyList[T]] = - ` `.flatMap(indent ⇒ p.map(NonEmptyList.one) ~ (` \n*` *> (P.string(indent) *> p).repSep0(` \n*`)).?).map { + ` `.flatMap(indent ⇒ p.map(NonEmptyList.one) ~ (` \n+` *> (P.string(indent) *> p).repSep0(` \n+`)).?).map { case (nel, l) ⇒ nel ++ l.getOrElse(Nil) } } diff --git a/src/main/scala/aqua/parser/lexer/Type.scala b/src/main/scala/aqua/parser/lexer/Type.scala index 1c56a732..440ae2c1 100644 --- a/src/main/scala/aqua/parser/lexer/Type.scala +++ b/src/main/scala/aqua/parser/lexer/Type.scala @@ -10,6 +10,7 @@ import cats.syntax.functor._ sealed trait Type[F[_]] extends Token[F] sealed trait DataType[F[_]] extends Type[F] +// TODO add F[Unit] case class ArrayType[F[_]](data: DataType[F]) extends DataType[F] { override def as[T](v: T)(implicit F: Functor[F]): F[T] = data.as(v) } diff --git a/src/main/scala/aqua/parser/lift/Span.scala b/src/main/scala/aqua/parser/lift/Span.scala index f35c1016..9d062612 100644 --- a/src/main/scala/aqua/parser/lift/Span.scala +++ b/src/main/scala/aqua/parser/lift/Span.scala @@ -67,6 +67,11 @@ object Span { } type F[T] = (Span, T) + //type F[T] = (Option[Ctx], Span, T) + + //def mapK[K[_]](nat: F ~> K) => F[T] => K[T] + //def mapF(fn: F[Unit] => F[Unit]) token.setF(fn(token.unit)) + //type K[T] = (Names, Span, T) implicit object spanComonad extends Comonad[F] { override def extract[A](x: F[A]): A = x._2 diff --git a/src/test/scala/aqua/parser/lexer/TokenSpec.scala b/src/test/scala/aqua/parser/lexer/TokenSpec.scala index d116d805..0287a752 100644 --- a/src/test/scala/aqua/parser/lexer/TokenSpec.scala +++ b/src/test/scala/aqua/parser/lexer/TokenSpec.scala @@ -23,21 +23,21 @@ class TokenSpec extends AnyFlatSpec with Matchers with EitherValues { } "\\n* token" should "match the same strings" in { - ` \n*`.parseAll("\n") should be('right) - ` \n*`.parseAll(" \n") should be('right) - ` \n*`.parseAll(" \n") should be('right) - ` \n*`.parseAll(" \n") should be('right) - ` \n*`.parseAll("--comment\n") should be('right) - ` \n*`.parseAll(" --comment\n") should be('right) - ` \n*`.parseAll(" --comment\n") should be('right) - ` \n*`.parseAll(" --comment with many words\n") should be('right) - ` \n*`.parseAll(" --comment with many words \n") should be('right) - ` \n*`.parse(" --comment with many words \n").right.value should be(("", ())) - ` \n*`.parse(" --comment with many words \n ").right.value should be((" ", ())) + ` \n+`.parseAll("\n") should be('right) + ` \n+`.parseAll(" \n") should be('right) + ` \n+`.parseAll(" \n") should be('right) + ` \n+`.parseAll(" \n") should be('right) + ` \n+`.parseAll("--comment\n") should be('right) + ` \n+`.parseAll(" --comment\n") should be('right) + ` \n+`.parseAll(" --comment\n") should be('right) + ` \n+`.parseAll(" --comment with many words\n") should be('right) + ` \n+`.parseAll(" --comment with many words \n") should be('right) + ` \n+`.parse(" --comment with many words \n").right.value should be(("", ())) + ` \n+`.parse(" --comment with many words \n ").right.value should be((" ", ())) } "\\n* token" should "match multi-line comments" in { - ` \n*`.parseAll(""" -- comment line 1 + ` \n+`.parseAll(""" -- comment line 1 |-- line 2 | | -- line 3