LNG-40 AIR validation on compilation (#546)

This commit is contained in:
Dima 2022-08-22 16:37:02 +03:00 committed by GitHub
parent 2daf6ca422
commit db016288b0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 829 additions and 912 deletions

View File

@ -1,6 +1,6 @@
package aqua.backend.air
import aqua.backend.{Backend, Generated, Version}
import aqua.backend.{AirString, Backend, Generated, Version}
import aqua.res.AquaRes
import cats.syntax.show.*
@ -11,16 +11,15 @@ object AirBackend extends Backend {
override def generate(aqua: AquaRes): Seq[Generated] = {
val docs = s"""; This file is auto-generated. Do not edit manually: changes may be erased.
|; Generated by Aqua compiler: https://github.com/fluencelabs/aqua/.
|; If you find any bugs, please write an issue on GitHub: https://github.com/fluencelabs/aqua/issues
|; Aqua version: ${Version.version}
|
|""".stripMargin
|; Generated by Aqua compiler: https://github.com/fluencelabs/aqua/.
|; If you find any bugs, please write an issue on GitHub: https://github.com/fluencelabs/aqua/issues
|; Aqua version: ${Version.version}
|
|""".stripMargin
aqua.funcs.toList
.map{ fr =>
val airStr = docs + FuncAirGen(fr).generate.show
Generated("." + fr.funcName + ext, airStr)
}
aqua.funcs.toList.map { fr =>
val airStr = FuncAirGen(fr).generate.show
Generated("." + fr.funcName + ext, docs + airStr, AirString(fr.funcName, airStr) :: Nil)
}
}
}

View File

@ -0,0 +1,3 @@
package aqua.backend
case class AirString(name: String, air: String)

View File

@ -6,4 +6,4 @@ package aqua.backend
* @param suffix extension or another info that will be added to a resulted file
* @param content compiled code
*/
case class Generated(suffix: String, content: String)
case class Generated(suffix: String, content: String, air: List[AirString])

View File

@ -6,22 +6,28 @@ import aqua.res.AquaRes
case class OutputFile(res: AquaRes) {
def generate(types: Types, isJs: Boolean, isCommonJS: Boolean): String = {
def generate(types: Types, isJs: Boolean, isCommonJS: Boolean): (List[AirString], String) = {
import types.*
val services = res.services
.map(s => OutputService(s, types))
.map(_.generate)
.toList
.mkString("\n\n")
val functions =
res.funcs.map(f => OutputFunc(f, types)).map(_.generate).toList.mkString("\n\n")
s"""${Header.header(isJs, isCommonJS)}
|
|// Services
|$services
|// Functions
|$functions
|""".stripMargin
val scripts =
res.funcs.map(f => OutputFunc(f, types)).map(_.generate)
val (airs, functions) = scripts.toList.unzip
(
airs,
s"""${Header.header(isJs, isCommonJS)}
|
|// Services
|$services
|// Functions
|${functions.mkString("\n\n")}
|""".stripMargin
)
}
}

View File

@ -22,25 +22,28 @@ case class OutputFunc(func: FuncRes, types: Types) {
import funcTypes.*
import TypeDefinition.*
def generate: String = {
def generate: (AirString, String) = {
val tsAir = FuncAirGen(func).generate
val codeLeftSpace = " " * 20
val script = tsAir.show.linesIterator.map(codeLeftSpace + _).mkString("\n")
val funcDef = FunctionDef(func)
s"""${funcTypes.generate}
|export function ${func.funcName}(${typed("...args", "any")}) {
|
| let script = `
|$script
| `
| return callFunction(
| args,
| ${funcDef.asJson.deepDropNullValues.spaces4},
| script
| )
|}""".stripMargin
(
AirString(func.funcName, script),
s"""${funcTypes.generate}
|export function ${func.funcName}(${typed("...args", "any")}) {
|
| let script = `
|$script
| `
| return callFunction(
| args,
| ${funcDef.asJson.deepDropNullValues.spaces4},
| script
| )
|}""".stripMargin
)
}
}

View File

@ -19,20 +19,21 @@ case class JavaScriptBackend(isCommonJS: Boolean) extends Backend {
res.funcs.map(f => TypeScriptTypes.funcType(f)).map(_.generate).toList.mkString("\n")
val body = s"""${Header.header(true, false)}
|
|// Services
|$services
|
|// Functions
|$functions
|""".stripMargin
|
|// Services
|$services
|
|// Functions
|$functions
|""".stripMargin
Generated(tsExt, body)
Generated(tsExt, body, Nil)
}
override def generate(res: AquaRes): Seq[Generated] =
if (res.isEmpty) Nil
else {
Generated(ext, OutputFile(res).generate(EmptyTypes, true, isCommonJS)):: typesFile(res) :: Nil
val (airs, script) = OutputFile(res).generate(EmptyTypes, true, isCommonJS)
Generated(ext, script, airs) :: typesFile(res) :: Nil
}
}

View File

@ -9,5 +9,9 @@ object TypeScriptBackend extends Backend {
val ext = ".ts"
override def generate(res: AquaRes): Seq[Generated] =
if (res.isEmpty) Nil else Generated(ext, OutputFile(res).generate(TypeScriptTypes, false, false)) :: Nil
if (res.isEmpty) Nil
else {
val (airs, script) = OutputFile(res).generate(TypeScriptTypes, false, false)
Generated(ext, script, airs) :: Nil
}
}

View File

@ -0,0 +1,47 @@
package aqua.air
import aqua.backend.AirString
import aqua.js.Fluence
import cats.data.Validated.{invalid, validNec}
import cats.data.{Chain, NonEmptyChain, ValidatedNec}
import cats.effect.Async
import cats.syntax.traverse.*
import cats.syntax.functor.*
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import scala.concurrent.ExecutionContext
import scala.scalajs.js
object AirValidation {
// HACK: memoize doesn't work in scalajs, so, call this function once before `validate`
def init[F[_]: Async](): F[Unit] = {
Async[F].fromFuture(Fluence.start(js.undefined).toFuture.pure[F]).as(())
}
def validate[F[_]: Async](
airs: List[AirString]
): F[ValidatedNec[String, Unit]] =
Async[F].fromFuture {
Async[F].executionContext.map { implicit ec =>
for {
statuses <- airs
.map(a => Fluence.getPeer().internals.parseAst(a.air).toFuture.map(s => (a.name, s)))
.sequence
} yield {
val errors = NonEmptyChain.fromSeq(statuses.filterNot(_._2.success))
errors.map { errs =>
val errorsStrs = errs.map { case (fName, status) =>
s"Cannot compile AIR for '$fName' function: ${js.JSON.stringify(status.data)}\n\n" +
"This is unexpected error. Please, dump your Aqua code and make an issue here https://github.com/fluencelabs/aqua/issues."
}
invalid(errorsStrs)
}.getOrElse(validNec(()))
}
}
}
}

View File

@ -217,6 +217,15 @@ case class PeerConfig(
debug: js.UndefOr[Debug]
)
trait AstStatus extends js.Object {
def success: Boolean
def data: js.Any
}
trait Internals extends js.Object {
def parseAst(air: String): js.Promise[AstStatus]
}
/**
* This class implements the Fluence protocol for javascript-based environments.
* It provides all the necessary features to communicate with Fluence network
@ -226,6 +235,7 @@ case class PeerConfig(
class FluencePeer extends js.Object {
def getStatus(): PeerStatus = js.native
def stop(): js.Promise[Unit] = js.native
def internals: Internals = js.native
}
object V3 {
@ -258,7 +268,7 @@ object FluenceUtils {
@js.native
@JSImport("@fluencelabs/fluence", "Fluence")
object Fluence extends js.Object {
def start(config: PeerConfig): js.Promise[js.Any] = js.native
def start(config: js.UndefOrOps[PeerConfig]): js.Promise[js.Any] = js.native
def stop(): js.Promise[js.Any] = js.native
def getPeer(): FluencePeer = js.native
def getStatus(): PeerStatus = js.native

View File

@ -19,7 +19,8 @@ import cats.syntax.show.*
import scala.concurrent.duration.Duration
import scala.concurrent.{ExecutionContext, Future, Promise, TimeoutException}
import scala.scalajs.js
import scala.scalajs.js.{JSON, JavaScriptException, timers}
import scala.scalajs.js.JSConverters.*
import scala.scalajs.js.{timers, JSON, JavaScriptException}
object FuncCaller {
@ -57,12 +58,14 @@ object FuncCaller {
)
_ <- Fluence
.start(
PeerConfig(
config.common.multiaddr,
config.common.timeout.toMillis.toInt : js.UndefOr[Int],
keyPair,
Debug(printParticleId = config.common.flags.verbose, marineLogLevel = logLevel)
)
Some(
PeerConfig(
config.common.multiaddr,
config.common.timeout.toMillis.toInt : js.UndefOr[Int],
keyPair,
Debug(printParticleId = config.common.flags.verbose, marineLogLevel = logLevel)
)
).orUndefined
)
.toFuture
_ =
@ -90,6 +93,7 @@ object FuncCaller {
// use a timeout in finisher if we have an async function and it hangs on node's side
finisher = setTimeout(name, finisherFuture, config.common.timeout)
_ <- finisher
_ <- Fluence.stop().toFuture
} yield validNec(()))
.recover(handleFuncCallErrors(name, config.common.timeout))
.pure[F]

View File

@ -53,10 +53,6 @@ object RunCommand extends Logging {
}.getOrElse(KeyPair.randomEd25519().toFuture)
}
private def findFunction(contexts: Chain[AquaContext], funcName: String): Option[FuncArrow] =
contexts
.collectFirstSome(_.allFuncs.get(funcName))
/**
* Runs a function that is located in `input` file with FluenceJS SDK. Returns no output
* @param func

View File

@ -105,10 +105,6 @@ object ScriptOpts extends Logging {
Opts
.option[String]("script-id", "Script id to remove", "c")
private def findFunction(contexts: Chain[AquaContext], funcName: String): Option[FuncArrow] =
contexts
.collectFirstSome(_.allFuncs.get(funcName))
def generateAir(callable: FuncArrow, transformConfig: TransformConfig): String = {
val funcRes = Transform.funcRes(callable, transformConfig).value
AirGen(funcRes.body).generate.show

View File

@ -26,7 +26,8 @@ object Test extends IOApp.Simple {
List(Path("./aqua")),
Option(Path("./target")),
TypeScriptBackend,
TransformConfig(wrapWithXor = false)
TransformConfig(wrapWithXor = false),
false
)
.map {
case Validated.Invalid(errs) =>

View File

@ -0,0 +1,15 @@
package aqua.air
import aqua.backend.AirString
import cats.data.ValidatedNec
import cats.effect.Async
import cats.data.Validated.validNec
import scala.concurrent.ExecutionContext
object AirValidation {
def init[F[_]: Async](): F[Unit] = Async[F].pure(())
def validate[F[_]: Async](airs: List[AirString]): F[ValidatedNec[String, Unit]] = Async[F].pure(validNec(()))
}

View File

@ -148,11 +148,11 @@ class SourcesSpec extends AsyncFlatSpec with Matchers {
sourceGen = new AquaFileSources[IO](path, Nil)
content = "some random content"
compiled = AquaCompiled[FileModuleId](
FileModuleId(filePath),
Seq(Generated("_hey.custom", content)),
1,
1
)
FileModuleId(filePath),
Seq(Generated("_hey.custom", content, Nil)),
1,
1
)
resolved <- sourceGen.write(targetPath)(compiled)
_ = {
resolved.size shouldBe 1

View File

@ -21,7 +21,7 @@ class WriteFileSpec extends AnyFlatSpec with Matchers {
val bc = TransformConfig()
AquaPathCompiler
.compileFilesTo[IO](src, List.empty, Option(targetTs), TypeScriptBackend, bc)
.compileFilesTo[IO](src, List.empty, Option(targetTs), TypeScriptBackend, bc, false)
.unsafeRunSync()
.leftMap { err =>
println(err)
@ -33,7 +33,7 @@ class WriteFileSpec extends AnyFlatSpec with Matchers {
Files[IO].deleteIfExists(targetTsFile).unsafeRunSync()
AquaPathCompiler
.compileFilesTo[IO](src, List.empty, Option(targetJs), JavaScriptBackend(false), bc)
.compileFilesTo[IO](src, List.empty, Option(targetJs), JavaScriptBackend(false), bc, false)
.unsafeRunSync()
.leftMap { err =>
println(err)
@ -45,7 +45,7 @@ class WriteFileSpec extends AnyFlatSpec with Matchers {
Files[IO].deleteIfExists(targetJsFile).unsafeRunSync()
AquaPathCompiler
.compileFilesTo[IO](src, List.empty, Option(targetAir), AirBackend, bc)
.compileFilesTo[IO](src, List.empty, Option(targetAir), AirBackend, bc, false)
.unsafeRunSync()
.leftMap { err =>
println(err)

View File

@ -122,6 +122,12 @@ object AppOpts {
}
.withDefault(List.empty)
val noAirValidation: Opts[Boolean] =
Opts
.flag("no-air-validation", "Don't parse and validate AIR after compilation")
.map(_ => false)
.withDefault(true)
val compileToAir: Opts[Boolean] =
Opts
.flag("air", "Generate .air file instead of .ts", "a")

View File

@ -98,7 +98,8 @@ object AquaCli extends IOApp with Logging {
FluenceOpts.logLevelOpt,
constantOpts,
dryOpt,
scriptOpt
scriptOpt,
noAirValidation
).mapN {
case (
inputF,
@ -113,7 +114,8 @@ object AquaCli extends IOApp with Logging {
logLevel,
constants,
isDryRun,
isScheduled
isScheduled,
disableAirValidation
) =>
val toAir = toAirOp || isScheduled
val noXor = noXorOp || isScheduled
@ -156,7 +158,8 @@ object AquaCli extends IOApp with Logging {
imports,
resultOutput,
targetToBackend(target),
bc
bc,
disableAirValidation
)
}
}

View File

@ -1,9 +1,18 @@
package aqua
import aqua.backend.{Backend, Generated}
import aqua.compiler.{AquaCompiled, AquaCompiler, AquaCompilerConf, AquaError, CompilerAPI}
import aqua.compiler.{
AirValidator,
AquaCompiled,
AquaCompiler,
AquaCompilerConf,
AquaError,
CompilerAPI
}
import aqua.files.{AquaFileSources, FileModuleId}
import aqua.io.*
import aqua.air.AirValidation
import aqua.backend.AirString
import aqua.model.AquaContext
import aqua.model.transform.TransformConfig
import aqua.model.transform.Transform
@ -14,6 +23,7 @@ import aqua.parser.{Ast, LexerError, Parser}
import aqua.raw.ConstantRaw
import aqua.res.AquaRes
import cats.data.*
import cats.effect.Async
import cats.parse.LocationMap
import cats.syntax.applicative.*
import cats.syntax.functor.*
@ -22,6 +32,7 @@ import cats.syntax.show.*
import cats.{~>, Applicative, Eval, Monad, Show}
import fs2.io.file.{Files, Path}
import scribe.Logging
import cats.data.Validated.validNec
object AquaPathCompiler extends Logging {
@ -33,21 +44,38 @@ object AquaPathCompiler extends Logging {
* @param transformConfig transformation configuration for a model
* @return errors or result messages
*/
def compileFilesTo[F[_]: AquaIO: Monad: Files](
def compileFilesTo[F[_]: AquaIO: Monad: Files: Async](
srcPath: Path,
imports: List[Path],
targetPath: Option[Path],
backend: Backend,
transformConfig: TransformConfig
transformConfig: TransformConfig,
disableAirValidation: Boolean
): F[ValidatedNec[String, Chain[String]]] = {
import ErrorRendering.showError
(for {
prelude <- Prelude.init()
sources = new AquaFileSources[F](srcPath, imports ++ prelude.importPaths)
validator =
if (disableAirValidation) {
new AirValidator[F] {
override def init(): F[Unit] = Applicative[F].pure(())
override def validate(airs: List[AirString]): F[ValidatedNec[String, Unit]] =
Applicative[F].pure(validNec(()))
}
} else {
new AirValidator[F] {
override def init(): F[Unit] = AirValidation.init[F]()
override def validate(
airs: List[AirString]
): F[ValidatedNec[String, Unit]] = AirValidation.validate[F](airs)
}
}
compiler <- CompilerAPI
.compileTo[F, AquaFileError, FileModuleId, FileSpan.F, String](
sources,
SpanParser.parser,
validator,
new Backend.Transform:
override def transform(ex: AquaContext): AquaRes =
Transform.contextRes(ex, transformConfig)

View File

@ -65,6 +65,8 @@ object ErrorRendering {
}
case SourcesErr(err) =>
Console.RED + err.showForConsole + Console.RESET
case AirValidationError(errors) =>
Console.RED + errors.toChain.toList.mkString("\n") + Console.RESET
case ResolveImportsErr(_, token, err) =>
val span = token.unit._1
showForConsole("Cannot resolve imports", span, err.showForConsole :: Nil)

View File

@ -0,0 +1,13 @@
package aqua.compiler
import aqua.backend.AirString
import cats.data.ValidatedNec
trait AirValidator[F[_]] {
def init(): F[Unit]
def validate(
airs: List[AirString]
): F[ValidatedNec[String, Unit]]
}

View File

@ -3,6 +3,7 @@ package aqua.compiler
import aqua.parser.ParserError
import aqua.parser.lexer.Token
import aqua.semantics.SemanticError
import cats.data.NonEmptyChain
trait AquaError[I, E, S[_]]
case class SourcesErr[I, E, S[_]](err: E) extends AquaError[I, E, S]
@ -16,3 +17,4 @@ case class CycleError[I, E, S[_]](modules: List[I]) extends AquaError[I, E, S]
case class CompileError[I, E, S[_]](err: SemanticError[S]) extends AquaError[I, E, S]
case class OutputError[I, E, S[_]](compiled: AquaCompiled[I], err: E) extends AquaError[I, E, S]
case class AirValidationError[I, E, S[_]](errors: NonEmptyChain[String]) extends AquaError[I, E, S]

View File

@ -1,6 +1,6 @@
package aqua.compiler
import aqua.backend.Backend
import aqua.backend.{AirString, Backend}
import aqua.linker.{AquaModule, Linker, Modules}
import aqua.model.AquaContext
import aqua.parser.lift.{LiftParser, Span}
@ -12,7 +12,7 @@ import aqua.semantics.{CompilerState, LspSemantics, RawSemantics, Semantics}
import aqua.semantics.header.{HeaderHandler, HeaderSem}
import aqua.semantics.lsp.LspContext
import cats.data.*
import cats.data.Validated.{Invalid, Valid, validNec}
import cats.data.Validated.{validNec, Invalid, Valid, invalid}
import cats.parse.Parser0
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
@ -134,34 +134,53 @@ object CompilerAPI extends Logging {
def compile[F[_]: Monad, E, I: Order, S[_]: Comonad](
sources: AquaSources[F, E, I],
parser: I => String => ValidatedNec[ParserError[S], Ast[S]],
airValidator: AirValidator[F],
backend: Backend.Transform,
config: AquaCompilerConf
): F[ValidatedNec[AquaError[I, E, S], Chain[AquaCompiled[I]]]] = {
val compiler = getAquaCompiler[F, E, I, S](config)
compiler
.compileRaw(sources, parser)
.map(_.andThen { filesWithContext =>
toAquaProcessed(filesWithContext)
})
.map(_.map { compiled =>
compiled.map { ap =>
logger.trace("generating output...")
val res = backend.transform(ap.context)
val compiled = backend.generate(res)
AquaCompiled(ap.id, compiled, res.funcs.length.toInt, res.services.length.toInt)
for {
compiledV <- compiler
.compileRaw(sources, parser)
.map(_.andThen { filesWithContext =>
toAquaProcessed(filesWithContext)
})
_ <- airValidator.init()
result <- compiledV.map { compiled =>
compiled.map { ap =>
logger.trace("generating output...")
val res = backend.transform(ap.context)
val compiled = backend.generate(res)
airValidator
.validate(
compiled.toList.flatMap(_.air)
)
.map(
_.bimap(
errs => NonEmptyChain.one(AirValidationError(errs): AquaError[I, E, S]),
_ =>
AquaCompiled(ap.id, compiled, res.funcs.length.toInt, res.services.length.toInt)
)
)
}.sequence.map(_.sequence)
} match {
case Valid(f) => f
case Invalid(e) =>
invalid[NonEmptyChain[AquaError[I, E, S]], Chain[AquaCompiled[I]]](e).pure[F]
}
})
} yield result
}
def compileTo[F[_]: Monad, E, I: Order, S[_]: Comonad, T](
sources: AquaSources[F, E, I],
parser: I => String => ValidatedNec[ParserError[S], Ast[S]],
airValidator: AirValidator[F],
backend: Backend.Transform,
config: AquaCompilerConf,
write: AquaCompiled[I] => F[Seq[Validated[E, T]]]
): F[ValidatedNec[AquaError[I, E, S], Chain[T]]] =
compile[F, E, I, S](sources, parser, backend, config).flatMap {
compile[F, E, I, S](sources, parser, airValidator, backend, config).flatMap {
case Valid(compiled) =>
compiled.map { ac =>
write(ac).map(

1399
npm/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -23,7 +23,7 @@
"dependencies": {
"@fluencelabs/aqua-ipfs": "0.5.2",
"@fluencelabs/aqua-lib": "0.5.1",
"@fluencelabs/fluence": "0.23.1",
"@fluencelabs/fluence": "0.23.3",
"@fluencelabs/fluence-network-environment": "1.0.13",
"ipfs-http-client": "50.1.2"
},