mirror of
https://github.com/fluencelabs/aqua.git
synced 2025-04-24 22:42:13 +00:00
DXJ-21 Save token definitions for LSP (#514)
This commit is contained in:
parent
59419b4e1d
commit
6064659610
@ -1,5 +1,6 @@
|
||||
package aqua.backend
|
||||
|
||||
import aqua.model.AquaContext
|
||||
import aqua.res.AquaRes
|
||||
|
||||
/**
|
||||
@ -15,3 +16,10 @@ trait Backend {
|
||||
*/
|
||||
def generate(aqua: AquaRes): Seq[Generated]
|
||||
}
|
||||
|
||||
object Backend {
|
||||
|
||||
trait Transform extends Backend {
|
||||
def transform(ex: AquaContext): AquaRes
|
||||
}
|
||||
}
|
||||
|
@ -185,7 +185,7 @@ lazy val compiler = crossProject(JVMPlatform, JSPlatform)
|
||||
.crossType(CrossType.Pure)
|
||||
.in(file("compiler"))
|
||||
.settings(commons: _*)
|
||||
.dependsOn(semantics, linker, backend)
|
||||
.dependsOn(semantics, linker, backend, transform % Test)
|
||||
|
||||
lazy val backend = crossProject(JVMPlatform, JSPlatform)
|
||||
.withoutSuffixFor(JVMPlatform)
|
||||
@ -197,14 +197,14 @@ lazy val backend = crossProject(JVMPlatform, JSPlatform)
|
||||
buildInfoKeys := Seq[BuildInfoKey](version),
|
||||
buildInfoPackage := "aqua.backend"
|
||||
)
|
||||
.dependsOn(transform)
|
||||
.dependsOn(res)
|
||||
|
||||
lazy val `backend-air` = crossProject(JVMPlatform, JSPlatform)
|
||||
.withoutSuffixFor(JVMPlatform)
|
||||
.crossType(CrossType.Pure)
|
||||
.in(file("backend/air"))
|
||||
.settings(commons: _*)
|
||||
.dependsOn(backend)
|
||||
.dependsOn(backend, transform)
|
||||
|
||||
lazy val `backend-ts` = crossProject(JVMPlatform, JSPlatform)
|
||||
.withoutSuffixFor(JVMPlatform)
|
||||
|
@ -1,12 +1,13 @@
|
||||
package aqua
|
||||
|
||||
import aqua.compiler.AquaCompiler
|
||||
import aqua.compiler.{AquaCompiler, AquaCompilerConf}
|
||||
import aqua.ErrorRendering.showError
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
|
||||
import aqua.io.AquaFileError
|
||||
import aqua.model.{AquaContext, FuncArrow}
|
||||
import aqua.model.transform.TransformConfig
|
||||
import aqua.parser.lift.FileSpan
|
||||
import aqua.raw.ConstantRaw
|
||||
import aqua.run.RunCommand.logger
|
||||
import cats.data.{Chain, Validated, ValidatedNec}
|
||||
import cats.data.Validated.{invalidNec, validNec}
|
||||
@ -53,14 +54,14 @@ class FuncCompiler[F[_]: Files: AquaIO: Async](
|
||||
.compileToContext[F, AquaFileError, FileModuleId, FileSpan.F](
|
||||
sources,
|
||||
SpanParser.parser,
|
||||
transformConfig
|
||||
AquaCompilerConf(transformConfig.constantsList)
|
||||
)
|
||||
.map(_.leftMap(_.map(_.show)))
|
||||
)
|
||||
(compileTime, contextV) = compileResult
|
||||
} yield {
|
||||
logger.debug(s"Compile time: ${compileTime.toMillis}ms")
|
||||
contextV.andThen(c => findFunction(c, func))
|
||||
contextV.andThen(c => findFunction(c._2, func))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,18 @@
|
||||
package aqua
|
||||
|
||||
import aqua.backend.Backend
|
||||
import aqua.compiler.{AquaCompiled, AquaCompiler, AquaError}
|
||||
import aqua.backend.{Backend, Generated}
|
||||
import aqua.compiler.{AquaCompiled, AquaCompiler, AquaCompilerConf, AquaError}
|
||||
import aqua.files.{AquaFileSources, FileModuleId}
|
||||
import aqua.io.*
|
||||
import aqua.model.AquaContext
|
||||
import aqua.model.transform.TransformConfig
|
||||
import aqua.model.transform.Transform
|
||||
import aqua.parser.lift.LiftParser.LiftErrorOps
|
||||
import aqua.parser.lift.Span.spanLiftParser
|
||||
import aqua.parser.lift.{FileSpan, LiftParser, Span}
|
||||
import aqua.parser.{Ast, LexerError, Parser}
|
||||
import aqua.raw.ConstantRaw
|
||||
import aqua.res.AquaRes
|
||||
import cats.data.*
|
||||
import cats.parse.LocationMap
|
||||
import cats.syntax.applicative.*
|
||||
@ -44,8 +48,13 @@ object AquaPathCompiler extends Logging {
|
||||
.compileTo[F, AquaFileError, FileModuleId, FileSpan.F, String](
|
||||
sources,
|
||||
SpanParser.parser,
|
||||
backend,
|
||||
transformConfig,
|
||||
new Backend.Transform:
|
||||
override def transform(ex: AquaContext): AquaRes =
|
||||
Transform.contextRes(ex, transformConfig)
|
||||
|
||||
override def generate(aqua: AquaRes): Seq[Generated] = backend.generate(aqua)
|
||||
,
|
||||
AquaCompilerConf(transformConfig.constantsList),
|
||||
targetPath.map(sources.write).getOrElse(dry[F])
|
||||
)
|
||||
} yield {
|
||||
|
@ -1,16 +1,14 @@
|
||||
package aqua.compiler
|
||||
|
||||
import aqua.backend.Backend
|
||||
import aqua.linker.Linker
|
||||
import aqua.linker.{AquaModule, Linker, Modules}
|
||||
import aqua.model.AquaContext
|
||||
import aqua.model.transform.TransformConfig
|
||||
import aqua.model.transform.Transform
|
||||
import aqua.parser.lift.{LiftParser, Span}
|
||||
import aqua.parser.{Ast, ParserError}
|
||||
import aqua.raw.RawPart.Parts
|
||||
import aqua.raw.{RawContext, RawPart}
|
||||
import aqua.res.AquaRes
|
||||
import aqua.semantics.Semantics
|
||||
import aqua.semantics.{CompilerState, Semantics}
|
||||
import aqua.semantics.header.HeaderSem
|
||||
import cats.data.*
|
||||
import cats.data.Validated.{validNec, Invalid, Valid}
|
||||
@ -20,31 +18,93 @@ import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.monoid.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.semigroup.*
|
||||
import cats.{~>, Comonad, Monad, Monoid, Order}
|
||||
import scribe.Logging
|
||||
|
||||
object AquaCompiler extends Logging {
|
||||
trait AquaCompiler[C] extends Logging {
|
||||
|
||||
type Err[I, E, S[_]] = AquaError[I, E, S]
|
||||
// TODO: find the way to replace RawContext with C; maybe move some functions to RawContext-specific subclasses, etc.
|
||||
type Ctx[I] = NonEmptyMap[I, RawContext]
|
||||
// TODO: remove CompilerState[S] from the right
|
||||
type ValidatedCtx[I, E, S[_]] = ValidatedNec[Err[I, E, S], (CompilerState[S], Ctx[I])]
|
||||
type ValidatedCtxT[I, E, S[_]] = ValidatedCtx[I, E, S] => ValidatedCtx[I, E, S]
|
||||
|
||||
private def linkModules[E, I: Order, S[_]: Comonad](
|
||||
modules: Modules[
|
||||
I,
|
||||
Err[I, E, S],
|
||||
ValidatedCtxT[I, E, S]
|
||||
],
|
||||
cycleError: List[AquaModule[I, Err[I, E, S], ValidatedCtxT[I, E, S]]] => Err[I, E, S]
|
||||
)(implicit
|
||||
rc: Monoid[RawContext]
|
||||
): ValidatedNec[Err[I, E, S], (Chain[CompilerState[S]], Chain[AquaProcessed[I]])] = {
|
||||
logger.trace("linking modules...")
|
||||
|
||||
Linker
|
||||
.link(
|
||||
modules,
|
||||
cycleError,
|
||||
// By default, provide an empty context for this module's id
|
||||
i => validNec((CompilerState[S](), NonEmptyMap.one(i, Monoid.empty[RawContext])))
|
||||
)
|
||||
.andThen { filesWithContext =>
|
||||
logger.trace("linking finished")
|
||||
filesWithContext
|
||||
.foldLeft[
|
||||
(
|
||||
ValidatedNec[Err[I, E, S], (Chain[CompilerState[S]], Chain[AquaProcessed[I]])],
|
||||
AquaContext.Cache
|
||||
)
|
||||
](
|
||||
validNec((Chain.nil, Chain.nil)) -> AquaContext.Cache()
|
||||
) {
|
||||
case ((acc, cache), (i, Valid(result))) =>
|
||||
val (processed, cacheProcessed) =
|
||||
result._2.toNel.toList.foldLeft[
|
||||
((Chain[CompilerState[S]], Chain[AquaProcessed[I]]), AquaContext.Cache)
|
||||
](
|
||||
(Chain.nil, Chain.nil) -> cache
|
||||
) { case ((acc, accCache), (i, c)) =>
|
||||
logger.trace(s"Going to prepare exports for ${i}...")
|
||||
val (exp, expCache) = AquaContext.exportsFromRaw(c, accCache)
|
||||
logger.trace(s"AquaProcessed prepared for ${i}")
|
||||
(acc._1 :+ result._1, acc._2 :+ AquaProcessed(i, exp)) -> expCache
|
||||
}
|
||||
acc.combine(
|
||||
validNec(
|
||||
processed
|
||||
)
|
||||
) -> cacheProcessed
|
||||
case ((acc, cache), (_, Invalid(errs))) =>
|
||||
acc.combine(Invalid(errs)) -> cache
|
||||
}
|
||||
._1
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private def compileRaw[F[_]: Monad, E, I: Order, S[_]: Comonad](
|
||||
sources: AquaSources[F, E, I],
|
||||
parser: I => String => ValidatedNec[ParserError[S], Ast[S]],
|
||||
config: TransformConfig
|
||||
): F[ValidatedNec[AquaError[I, E, S], Chain[AquaProcessed[I]]]] = {
|
||||
config: AquaCompilerConf
|
||||
): F[ValidatedNec[AquaError[I, E, S], (Chain[CompilerState[S]], Chain[AquaProcessed[I]])]] = {
|
||||
implicit val rc: Monoid[RawContext] = RawContext
|
||||
.implicits(
|
||||
RawContext.blank
|
||||
.copy(parts = Chain.fromSeq(config.constantsList).map(const => RawContext.blank -> const))
|
||||
)
|
||||
.rawContextMonoid
|
||||
type Err = AquaError[I, E, S]
|
||||
type Ctx = NonEmptyMap[I, RawContext]
|
||||
type ValidatedCtx = ValidatedNec[Err, Ctx]
|
||||
type CErr = Err[I, E, S]
|
||||
type VCtx = ValidatedCtx[I, E, S]
|
||||
logger.trace("starting resolving sources...")
|
||||
new AquaParser[F, E, I, S](sources, parser)
|
||||
.resolve[ValidatedCtx](mod =>
|
||||
.resolve[VCtx](mod =>
|
||||
context =>
|
||||
// Context with prepared imports
|
||||
context.andThen(ctx =>
|
||||
context.andThen { case (_, ctx) =>
|
||||
// To manage imports, exports run HeaderSem
|
||||
HeaderSem
|
||||
.sem(
|
||||
@ -62,52 +122,18 @@ object AquaCompiler extends Logging {
|
||||
mod.body,
|
||||
headerSem.initCtx
|
||||
)
|
||||
// Handle exports, declares – finalize the resulting context
|
||||
.andThen(headerSem.finCtx)
|
||||
.map(rc => NonEmptyMap.one(mod.id, rc))
|
||||
// Handle exports, declares - finalize the resulting context
|
||||
.andThen { case (state, ctx) =>
|
||||
headerSem.finCtx(ctx).map(r => (state, r))
|
||||
}
|
||||
.map { case (state, rc) => (state, NonEmptyMap.one(mod.id, rc)) }
|
||||
}
|
||||
// The whole chain returns a semantics error finally
|
||||
.leftMap(_.map[Err](CompileError(_)))
|
||||
)
|
||||
.leftMap(_.map[CErr](CompileError(_)))
|
||||
}
|
||||
)
|
||||
.map(
|
||||
_.andThen { modules =>
|
||||
logger.trace("linking modules...")
|
||||
Linker
|
||||
.link[I, AquaError[I, E, S], ValidatedCtx](
|
||||
modules,
|
||||
cycle => CycleError[I, E, S](cycle.map(_.id)),
|
||||
// By default, provide an empty context for this module's id
|
||||
i => validNec(NonEmptyMap.one(i, Monoid.empty[RawContext]))
|
||||
)
|
||||
.andThen { filesWithContext =>
|
||||
logger.trace("linking finished")
|
||||
filesWithContext
|
||||
.foldLeft[(ValidatedNec[Err, Chain[AquaProcessed[I]]], AquaContext.Cache)](
|
||||
validNec(Chain.nil) -> AquaContext.Cache()
|
||||
) {
|
||||
case ((acc, cache), (i, Valid(context))) =>
|
||||
val (processed, cacheProcessed) =
|
||||
context.toNel.toList.foldLeft[(Chain[AquaProcessed[I]], AquaContext.Cache)](
|
||||
Chain.nil -> cache
|
||||
) { case ((acc, accCache), (i, c)) =>
|
||||
logger.trace(s"Going to prepare exports for ${i}...")
|
||||
val (exp, expCache) = AquaContext.exportsFromRaw(c, accCache)
|
||||
logger.trace(s"AquaProcessed prepared for ${i}")
|
||||
(acc :+ AquaProcessed(i, exp)) -> expCache
|
||||
}
|
||||
acc.combine(
|
||||
validNec(
|
||||
processed
|
||||
)
|
||||
) -> cacheProcessed
|
||||
case ((acc, cache), (_, Invalid(errs))) =>
|
||||
acc.combine(Invalid(errs)) -> cache
|
||||
}
|
||||
._1
|
||||
|
||||
}
|
||||
}
|
||||
_.andThen { modules => linkModules(modules, cycle => CycleError[I, E, S](cycle.map(_.id))) }
|
||||
)
|
||||
}
|
||||
|
||||
@ -115,13 +141,16 @@ object AquaCompiler extends Logging {
|
||||
def compileToContext[F[_]: Monad, E, I: Order, S[_]: Comonad](
|
||||
sources: AquaSources[F, E, I],
|
||||
parser: I => String => ValidatedNec[ParserError[S], Ast[S]],
|
||||
config: TransformConfig
|
||||
): F[ValidatedNec[AquaError[I, E, S], Chain[AquaContext]]] = {
|
||||
compileRaw(sources, parser, config).map(_.map {
|
||||
_.map { ap =>
|
||||
logger.trace("generating output...")
|
||||
ap.context
|
||||
}
|
||||
config: AquaCompilerConf
|
||||
): F[ValidatedNec[AquaError[I, E, S], (Chain[CompilerState[S]], Chain[AquaContext])]] = {
|
||||
compileRaw(sources, parser, config).map(_.map { case (st, compiled) =>
|
||||
(
|
||||
st,
|
||||
compiled.map { ap =>
|
||||
logger.trace("generating output...")
|
||||
ap.context
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@ -129,13 +158,13 @@ object AquaCompiler extends Logging {
|
||||
def compile[F[_]: Monad, E, I: Order, S[_]: Comonad](
|
||||
sources: AquaSources[F, E, I],
|
||||
parser: I => String => ValidatedNec[ParserError[S], Ast[S]],
|
||||
backend: Backend,
|
||||
config: TransformConfig
|
||||
backend: Backend.Transform,
|
||||
config: AquaCompilerConf
|
||||
): F[ValidatedNec[AquaError[I, E, S], Chain[AquaCompiled[I]]]] = {
|
||||
compileRaw(sources, parser, config).map(_.map {
|
||||
_.map { ap =>
|
||||
compileRaw(sources, parser, config).map(_.map { case (_, compiled) =>
|
||||
compiled.map { ap =>
|
||||
logger.trace("generating output...")
|
||||
val res = Transform.contextRes(ap.context, config)
|
||||
val res = backend.transform(ap.context)
|
||||
val compiled = backend.generate(res)
|
||||
AquaCompiled(ap.id, compiled, res.funcs.length.toInt, res.services.length.toInt)
|
||||
}
|
||||
@ -145,8 +174,8 @@ object AquaCompiler extends Logging {
|
||||
def compileTo[F[_]: Monad, E, I: Order, S[_]: Comonad, T](
|
||||
sources: AquaSources[F, E, I],
|
||||
parser: I => String => ValidatedNec[ParserError[S], Ast[S]],
|
||||
backend: Backend,
|
||||
config: TransformConfig,
|
||||
backend: Backend.Transform,
|
||||
config: AquaCompilerConf,
|
||||
write: AquaCompiled[I] => F[Seq[Validated[E, T]]]
|
||||
): F[ValidatedNec[AquaError[I, E, S], Chain[T]]] =
|
||||
compile[F, E, I, S](sources, parser, backend, config).flatMap {
|
||||
@ -173,3 +202,5 @@ object AquaCompiler extends Logging {
|
||||
Validated.invalid[NonEmptyChain[AquaError[I, E, S]], Chain[T]](errs).pure[F]
|
||||
}
|
||||
}
|
||||
|
||||
object AquaCompiler extends AquaCompiler[RawContext]
|
||||
|
10
compiler/src/main/scala/aqua/compiler/AquaCompilerConf.scala
Normal file
10
compiler/src/main/scala/aqua/compiler/AquaCompilerConf.scala
Normal file
@ -0,0 +1,10 @@
|
||||
package aqua.compiler
|
||||
|
||||
import aqua.raw.ConstantRaw
|
||||
|
||||
/**
|
||||
* What should compiler care about during compilation – before generator backend takes its role
|
||||
*
|
||||
* @param constantsList List of known constants
|
||||
*/
|
||||
case class AquaCompilerConf(constantsList: List[ConstantRaw] = ConstantRaw.defaultConstants(None))
|
@ -11,7 +11,7 @@ import cats.syntax.applicative.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.{Comonad, Monad, ~>}
|
||||
import cats.{~>, Comonad, Monad}
|
||||
import scribe.Logging
|
||||
|
||||
// TODO: add tests
|
||||
|
@ -30,25 +30,27 @@ import cats.syntax.show.*
|
||||
class AquaCompilerSpec extends AnyFlatSpec with Matchers {
|
||||
|
||||
private def compileToContext(src: Map[String, String], imports: Map[String, String]) =
|
||||
AquaCompiler.compileToContext[Id, String, String, Span.S](
|
||||
new AquaSources[Id, String, String] {
|
||||
AquaCompiler
|
||||
.compileToContext[Id, String, String, Span.S](
|
||||
new AquaSources[Id, String, String] {
|
||||
|
||||
override def sources: Id[ValidatedNec[String, Chain[(String, String)]]] =
|
||||
Validated.validNec(Chain.fromSeq(src.toSeq))
|
||||
override def sources: Id[ValidatedNec[String, Chain[(String, String)]]] =
|
||||
Validated.validNec(Chain.fromSeq(src.toSeq))
|
||||
|
||||
override def resolveImport(from: String, imp: String): Id[ValidatedNec[String, String]] =
|
||||
Validated.validNec(imp)
|
||||
override def resolveImport(from: String, imp: String): Id[ValidatedNec[String, String]] =
|
||||
Validated.validNec(imp)
|
||||
|
||||
override def load(file: String): Id[ValidatedNec[String, String]] =
|
||||
Validated.fromEither(
|
||||
(imports ++ src)
|
||||
.get(file)
|
||||
.toRight(NonEmptyChain.one(s"Cannot load imported file $file"))
|
||||
)
|
||||
},
|
||||
id => txt => Parser.parse(Parser.parserSchema)(txt),
|
||||
TransformConfig(wrapWithXor = false)
|
||||
)
|
||||
override def load(file: String): Id[ValidatedNec[String, String]] =
|
||||
Validated.fromEither(
|
||||
(imports ++ src)
|
||||
.get(file)
|
||||
.toRight(NonEmptyChain.one(s"Cannot load imported file $file"))
|
||||
)
|
||||
},
|
||||
id => txt => Parser.parse(Parser.parserSchema)(txt),
|
||||
AquaCompilerConf()
|
||||
)
|
||||
.map(_._2)
|
||||
|
||||
"aqua compiler" should "compile a simple snipped to the right context" in {
|
||||
|
||||
|
@ -3,7 +3,7 @@ package aqua.lsp
|
||||
import aqua.compiler.*
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
|
||||
import aqua.io.*
|
||||
import aqua.model.transform.TransformConfig
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lift.{FileSpan, Span}
|
||||
import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError}
|
||||
import aqua.semantics.{HeaderError, RulesViolated, WrongAST}
|
||||
@ -17,10 +17,30 @@ import scribe.Logging
|
||||
|
||||
import scala.concurrent.ExecutionContext.Implicits.global
|
||||
import scala.concurrent.Future
|
||||
import scala.scalajs.js
|
||||
import scala.scalajs.js.JSConverters.*
|
||||
import scala.scalajs.js.annotation.*
|
||||
import scala.scalajs.js.{undefined, UndefOr}
|
||||
|
||||
@JSExportAll
|
||||
case class CompilationResult(
|
||||
errors: js.Array[ErrorInfo],
|
||||
locations: js.Array[TokenLink]
|
||||
)
|
||||
|
||||
@JSExportAll
|
||||
case class TokenLocation(name: String, start: Int, end: Int)
|
||||
|
||||
@JSExportAll
|
||||
case class TokenLink(current: TokenLocation, definition: TokenLocation)
|
||||
|
||||
object TokenLocation {
|
||||
|
||||
def apply(span: FileSpan): TokenLocation = {
|
||||
TokenLocation(span.name, span.span.startIndex, span.span.endIndex)
|
||||
}
|
||||
}
|
||||
|
||||
@JSExportAll
|
||||
case class ErrorInfo(start: Int, end: Int, message: String, location: UndefOr[String])
|
||||
|
||||
@ -95,16 +115,17 @@ object AquaLSP extends App with Logging {
|
||||
def compile(
|
||||
pathStr: String,
|
||||
imports: scalajs.js.Array[String]
|
||||
): scalajs.js.Promise[scalajs.js.Array[ErrorInfo]] = {
|
||||
): scalajs.js.Promise[CompilationResult] = {
|
||||
|
||||
logger.debug(s"Compiling '$pathStr' with imports: $imports")
|
||||
|
||||
implicit val aio: AquaIO[IO] = new AquaFilesIO[IO]
|
||||
|
||||
val sources = new AquaFileSources[IO](Path(pathStr), imports.toList.map(Path.apply))
|
||||
val config = TransformConfig()
|
||||
val config = AquaCompilerConf()
|
||||
|
||||
val proc = for {
|
||||
// TODO: should run a custom AquaCompiler that collects RawContext + token definitions from the CompilerState to enable cross-file ctrl+click support
|
||||
res <- AquaCompiler
|
||||
.compileToContext[IO, AquaFileError, FileModuleId, FileSpan.F](
|
||||
sources,
|
||||
@ -114,13 +135,26 @@ object AquaLSP extends App with Logging {
|
||||
} yield {
|
||||
logger.debug("Compilation done.")
|
||||
val result = res match {
|
||||
case Valid(_) =>
|
||||
case Valid((state, _)) =>
|
||||
logger.debug("No errors on compilation.")
|
||||
List.empty.toJSArray
|
||||
CompilationResult(
|
||||
List.empty.toJSArray,
|
||||
state.toList
|
||||
.flatMap(s =>
|
||||
(s.names.locations ++ s.abilities.locations).flatMap { case (t, tInfo) =>
|
||||
tInfo.definition match {
|
||||
case None => Nil
|
||||
case Some(d) =>
|
||||
TokenLink(TokenLocation(t.unit._1), TokenLocation(d.unit._1)) :: Nil
|
||||
}
|
||||
}
|
||||
)
|
||||
.toJSArray
|
||||
)
|
||||
case Invalid(e: NonEmptyChain[AquaError[FileModuleId, AquaFileError, FileSpan.F]]) =>
|
||||
val errors = e.toNonEmptyList.toList.flatMap(errorToInfo)
|
||||
logger.debug("Errors: " + errors.mkString("\n"))
|
||||
errors.toJSArray
|
||||
CompilationResult(errors.toJSArray, List.empty.toJSArray)
|
||||
}
|
||||
result
|
||||
}
|
||||
|
18
language-server-npm/aqua-lsp-api.d.ts
vendored
18
language-server-npm/aqua-lsp-api.d.ts
vendored
@ -1,3 +1,14 @@
|
||||
export interface TokenLocation {
|
||||
name: string,
|
||||
start: number,
|
||||
end: number
|
||||
}
|
||||
|
||||
export interface TokenLink {
|
||||
current: TokenLocation,
|
||||
definition: TokenLocation
|
||||
}
|
||||
|
||||
export interface ErrorInfo {
|
||||
start: number,
|
||||
end: number,
|
||||
@ -5,8 +16,13 @@ export interface ErrorInfo {
|
||||
location: string | null
|
||||
}
|
||||
|
||||
export interface CompilationResult {
|
||||
errors: ErrorInfo[],
|
||||
locations: TokenLink[]
|
||||
}
|
||||
|
||||
export class Compiler {
|
||||
compile(path: string, imports: string[]): Promise<ErrorInfo[]>;
|
||||
compile(path: string, imports: string[]): Promise<CompilationResult>;
|
||||
}
|
||||
|
||||
export var AquaLSP: Compiler;
|
||||
|
@ -8,7 +8,7 @@
|
||||
"aqua-lsp-api.d.ts"
|
||||
],
|
||||
"scripts": {
|
||||
"move:scalajs": "cp ../language-server-api/target/scala-3.1.0/language-server-opt/main.js ./aqua-lsp-api.js"
|
||||
"move:scalajs": "cp ../language-server-api/target/scala-3.1.0/language-server-api-opt/main.js ./aqua-lsp-api.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
@ -1,10 +1,61 @@
|
||||
package aqua.raw
|
||||
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.types.Type
|
||||
import aqua.raw.value.{ValueRaw, VarRaw}
|
||||
import aqua.types.{ScalarType, Type}
|
||||
|
||||
case class ConstantRaw(name: String, value: ValueRaw, allowOverrides: Boolean) extends RawPart {
|
||||
override def rename(s: String): RawPart = copy(name = s)
|
||||
|
||||
override def rawPartType: Type = value.`type`
|
||||
}
|
||||
|
||||
object ConstantRaw {
|
||||
|
||||
val initPeerId: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"INIT_PEER_ID",
|
||||
ValueRaw.InitPeerId,
|
||||
false
|
||||
)
|
||||
|
||||
val particleTtl: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"PARTICLE_TTL",
|
||||
ValueRaw.ParticleTtl,
|
||||
false
|
||||
)
|
||||
|
||||
val particleTimestamp: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"PARTICLE_TIMESTAMP",
|
||||
ValueRaw.ParticleTimestamp,
|
||||
false
|
||||
)
|
||||
|
||||
val nil: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"nil", // TODO: shouldn't it be NIL?
|
||||
ValueRaw.Nil,
|
||||
false
|
||||
)
|
||||
|
||||
val lastError: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"LAST_ERROR",
|
||||
ValueRaw.LastError,
|
||||
false
|
||||
)
|
||||
|
||||
|
||||
// Host peer id holds %init_peer_id% in case Aqua is not compiled to be executed behind a relay,
|
||||
// or relay's variable otherwise
|
||||
def hostPeerId(relayVarName: Option[String]): ConstantRaw =
|
||||
ConstantRaw(
|
||||
"HOST_PEER_ID",
|
||||
relayVarName.fold[ValueRaw](ValueRaw.InitPeerId)(r => VarRaw(r, ScalarType.string)),
|
||||
false
|
||||
)
|
||||
|
||||
def defaultConstants(relayVarName: Option[String]): List[ConstantRaw] =
|
||||
hostPeerId(relayVarName) :: initPeerId :: particleTtl :: particleTimestamp :: nil :: lastError :: Nil
|
||||
}
|
@ -26,50 +26,6 @@ case class TransformConfig(
|
||||
val callbackSrvId: ValueRaw = quote(callbackService)
|
||||
val dataSrvId: ValueRaw = quote(getDataService)
|
||||
|
||||
// Host peer id holds %init_peer_id% in case Aqua is not compiled to be executed behind a relay,
|
||||
// or relay's variable otherwise
|
||||
val hostPeerId: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"HOST_PEER_ID",
|
||||
relayVarName.fold[ValueRaw](ValueRaw.InitPeerId)(r => VarRaw(r, ScalarType.string)),
|
||||
false
|
||||
)
|
||||
|
||||
val initPeerId: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"INIT_PEER_ID",
|
||||
ValueRaw.InitPeerId,
|
||||
false
|
||||
)
|
||||
|
||||
val particleTtl: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"PARTICLE_TTL",
|
||||
ValueRaw.ParticleTtl,
|
||||
false
|
||||
)
|
||||
|
||||
val particleTimestamp: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"PARTICLE_TIMESTAMP",
|
||||
ValueRaw.ParticleTimestamp,
|
||||
false
|
||||
)
|
||||
|
||||
val nil: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"nil", // TODO: shouldn't it be NIL?
|
||||
ValueRaw.Nil,
|
||||
false
|
||||
)
|
||||
|
||||
val lastError: ConstantRaw =
|
||||
ConstantRaw(
|
||||
"LAST_ERROR",
|
||||
ValueRaw.LastError,
|
||||
false
|
||||
)
|
||||
|
||||
val constantsList: List[ConstantRaw] =
|
||||
hostPeerId :: initPeerId :: particleTtl :: particleTimestamp :: nil :: lastError :: constants
|
||||
ConstantRaw.defaultConstants(relayVarName) ::: constants
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import aqua.raw.RawContext
|
||||
import aqua.semantics.rules.abilities.AbilitiesState
|
||||
import aqua.semantics.rules.names.NamesState
|
||||
import aqua.semantics.rules.types.TypesState
|
||||
import cats.Semigroup
|
||||
import cats.data.{Chain, State}
|
||||
import cats.kernel.Monoid
|
||||
import cats.syntax.monoid.*
|
||||
@ -26,6 +27,17 @@ object CompilerState {
|
||||
types = TypesState.init[F](ctx)
|
||||
)
|
||||
|
||||
// TODO remove it after switching from RawContext to LspContext[S]
|
||||
implicit def semigroupState[S[_]]: Semigroup[CompilerState[S]] =
|
||||
(x: CompilerState[S], y: CompilerState[S]) => {
|
||||
CompilerState[S](
|
||||
x.errors ++ y.errors,
|
||||
x.names |+| y.names,
|
||||
x.abilities |+| y.abilities,
|
||||
x.types |+| y.types
|
||||
)
|
||||
}
|
||||
|
||||
implicit def compilerStateMonoid[S[_]]: Monoid[St[S]] = new Monoid[St[S]] {
|
||||
override def empty: St[S] = State.pure(Raw.Empty("compiler state monoid empty"))
|
||||
|
||||
|
@ -24,7 +24,7 @@ import scribe.{log, Logging}
|
||||
|
||||
object Semantics extends Logging {
|
||||
|
||||
def folder[S[_], G[_]: Monad](implicit
|
||||
private def folder[S[_], G[_]: Monad](implicit
|
||||
A: AbilitiesAlgebra[S, G],
|
||||
N: NamesAlgebra[S, G],
|
||||
T: TypesAlgebra[S, G]
|
||||
@ -77,46 +77,54 @@ object Semantics extends Logging {
|
||||
private def astToState[S[_]](ast: Ast[S]): Interpreter[S, Raw] =
|
||||
transpile[S](ast)
|
||||
|
||||
def process[S[_]](ast: Ast[S], init: RawContext): ValidatedNec[SemanticError[S], RawContext] =
|
||||
// If there are any errors, they're inside CompilerState[S]
|
||||
// TODO: pass external token definitions for the RawContext somehow
|
||||
def interpret[S[_]](ast: Ast[S], init: RawContext): Eval[(CompilerState[S], RawContext)] =
|
||||
astToState[S](ast)
|
||||
.run(CompilerState.init[S](init))
|
||||
.map {
|
||||
case (state, _: Raw.Empty) =>
|
||||
// No `parts`, but has `init`
|
||||
NonEmptyChain
|
||||
.fromChain(state.errors)
|
||||
.fold[ValidatedNec[SemanticError[S], RawContext]](
|
||||
Valid(
|
||||
RawContext.blank.copy(
|
||||
init = Some(init.copy(module = init.module.map(_ + "|init")))
|
||||
.filter(_ != RawContext.blank)
|
||||
)
|
||||
)
|
||||
)(Invalid(_))
|
||||
case (state, part: (RawPart | RawPart.Parts)) =>
|
||||
val accCtx =
|
||||
RawPart
|
||||
.contextPart(part)
|
||||
.parts
|
||||
.foldLeft(
|
||||
RawContext.blank.copy(
|
||||
init = Some(init.copy(module = init.module.map(_ + "|init")))
|
||||
.filter(_ != RawContext.blank)
|
||||
)
|
||||
) { case (ctx, p) =>
|
||||
ctx.copy(parts = ctx.parts :+ (ctx -> p))
|
||||
}
|
||||
(
|
||||
state,
|
||||
RawContext.blank.copy(
|
||||
init = Some(init.copy(module = init.module.map(_ + "|init")))
|
||||
.filter(_ != RawContext.blank)
|
||||
)
|
||||
)
|
||||
|
||||
NonEmptyChain
|
||||
.fromChain(state.errors)
|
||||
.fold[ValidatedNec[SemanticError[S], RawContext]](Valid(accCtx))(Invalid(_))
|
||||
case (state, m) =>
|
||||
case (state, part: (RawPart | RawPart.Parts)) =>
|
||||
state -> RawPart
|
||||
.contextPart(part)
|
||||
.parts
|
||||
.foldLeft(
|
||||
RawContext.blank.copy(
|
||||
init = Some(init.copy(module = init.module.map(_ + "|init")))
|
||||
.filter(_ != RawContext.blank)
|
||||
)
|
||||
) { case (ctx, p) =>
|
||||
ctx.copy(parts = ctx.parts :+ (ctx -> p))
|
||||
}
|
||||
case (state: CompilerState[S], m) =>
|
||||
logger.error("Got unexpected " + m)
|
||||
NonEmptyChain
|
||||
.fromChain(state.errors)
|
||||
.map(Invalid(_))
|
||||
.getOrElse(Validated.invalidNec[SemanticError[S], RawContext](WrongAST(ast)))
|
||||
state.copy(errors = state.errors :+ WrongAST(ast)) -> RawContext.blank.copy(
|
||||
init = Some(init.copy(module = init.module.map(_ + "|init")))
|
||||
.filter(_ != RawContext.blank)
|
||||
)
|
||||
}
|
||||
|
||||
// TODO: return just RawContext on the right side
|
||||
def process[S[_]](
|
||||
ast: Ast[S],
|
||||
init: RawContext
|
||||
): ValidatedNec[SemanticError[S], (CompilerState[S], RawContext)] =
|
||||
interpret(ast, init).map { case (state, ctx) =>
|
||||
NonEmptyChain
|
||||
.fromChain(state.errors)
|
||||
.fold[ValidatedNec[SemanticError[S], (CompilerState[S], RawContext)]](
|
||||
Valid(state -> ctx)
|
||||
)(Invalid(_))
|
||||
}
|
||||
// TODO: return as Eval
|
||||
.value
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ class ServiceSem[S[_]](val expr: ServiceExpr[S]) extends AnyVal {
|
||||
(_: Unit, body: Raw) =>
|
||||
(A.purgeArrows(expr.name) <* A.endScope()).flatMap {
|
||||
case Some(nel) =>
|
||||
val arrows = nel.map(kv => kv._1.value -> kv._2).toNem
|
||||
val arrows = nel.map(kv => kv._1.value -> (kv._1, kv._2)).toNem
|
||||
for {
|
||||
defaultId <- expr.id
|
||||
.map(v => V.valueToRaw(v))
|
||||
@ -44,7 +44,7 @@ class ServiceSem[S[_]](val expr: ServiceExpr[S]) extends AnyVal {
|
||||
)
|
||||
} yield
|
||||
if (defineResult) {
|
||||
ServiceRaw(expr.name.value, arrows, defaultId)
|
||||
ServiceRaw(expr.name.value, arrows.map(_._2), defaultId)
|
||||
} else Raw.empty("Service not created due to validation errors")
|
||||
|
||||
case None =>
|
||||
|
21
semantics/src/main/scala/aqua/semantics/lsp/TokenInfo.scala
Normal file
21
semantics/src/main/scala/aqua/semantics/lsp/TokenInfo.scala
Normal file
@ -0,0 +1,21 @@
|
||||
package aqua.semantics.lsp
|
||||
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.types.{ArrowType, Type}
|
||||
|
||||
// Token description with it's definition, type, etc
|
||||
sealed trait TokenInfo[F[_]] {
|
||||
def definition: Option[Token[F]]
|
||||
}
|
||||
|
||||
case class TokenDef[F[_]](definition: Option[Token[F]]) extends TokenInfo[F]
|
||||
|
||||
sealed trait TokenType[F[_]] extends TokenInfo[F] {
|
||||
def definition: Option[Token[F]]
|
||||
def tokenType: Type
|
||||
}
|
||||
|
||||
case class TokenTypeInfo[F[_]](definition: Option[Token[F]], tokenType: Type) extends TokenType[F]
|
||||
|
||||
case class TokenArrowInfo[F[_]](definition: Option[Token[F]], tokenType: ArrowType)
|
||||
extends TokenType[F]
|
@ -14,7 +14,7 @@ trait AbilitiesAlgebra[S[_], Alg[_]] {
|
||||
|
||||
def defineService(
|
||||
name: Ability[S],
|
||||
arrows: NonEmptyMap[String, ArrowType],
|
||||
arrows: NonEmptyMap[String, (Name[S], ArrowType)],
|
||||
defaultId: Option[ValueRaw]
|
||||
): Alg[Boolean]
|
||||
|
||||
|
@ -4,6 +4,7 @@ import aqua.parser.lexer.{Ability, Name, Token, ValueToken}
|
||||
import aqua.raw.ServiceRaw
|
||||
import aqua.raw.RawContext
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.semantics.lsp.{TokenArrowInfo, TokenDef, TokenTypeInfo}
|
||||
import aqua.semantics.Levenshtein
|
||||
import aqua.semantics.rules.{abilities, ReportError, StackInterpreter}
|
||||
import aqua.types.ArrowType
|
||||
@ -51,7 +52,7 @@ class AbilitiesInterpreter[S[_], X](implicit
|
||||
|
||||
override def defineService(
|
||||
name: Ability[S],
|
||||
arrows: NonEmptyMap[String, ArrowType],
|
||||
arrows: NonEmptyMap[String, (Name[S], ArrowType)],
|
||||
defaultId: Option[ValueRaw]
|
||||
): SX[Boolean] =
|
||||
getService(name.value).flatMap {
|
||||
@ -65,12 +66,36 @@ class AbilitiesInterpreter[S[_], X](implicit
|
||||
modify(s =>
|
||||
s.copy(
|
||||
services = s.services
|
||||
.updated(name.value, ServiceRaw(name.value, arrows, defaultId)),
|
||||
definitions = s.definitions.updated(name.value, name)
|
||||
.updated(name.value, ServiceRaw(name.value, arrows.map(_._2), defaultId)),
|
||||
definitions =
|
||||
s.definitions.updated(name.value, (name, arrows.toSortedMap.values.toList))
|
||||
)
|
||||
).as(true)
|
||||
}
|
||||
|
||||
// adds location from token to its definition
|
||||
def addServiceArrowLocation(name: Ability[S], arrow: Name[S]): SX[Unit] = {
|
||||
getState.flatMap { st =>
|
||||
st.definitions.get(name.value) match {
|
||||
case Some((ab, arrows)) =>
|
||||
modify(st =>
|
||||
st.copy(locations =
|
||||
st.locations ++ (
|
||||
(name, TokenDef(Some(ab))) :: (
|
||||
arrow,
|
||||
TokenDef(
|
||||
arrows.find(_._1.value == arrow.value).map(_._1)
|
||||
)
|
||||
) :: Nil
|
||||
)
|
||||
)
|
||||
)
|
||||
case None =>
|
||||
State.pure(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override def getArrow(name: Ability[S], arrow: Name[S]): SX[Option[ArrowType]] =
|
||||
getService(name.value).map(_.map(_.arrows)).flatMap {
|
||||
case Some(arrows) =>
|
||||
@ -84,7 +109,7 @@ class AbilitiesInterpreter[S[_], X](implicit
|
||||
arrows.value.keys.toNonEmptyList.toList
|
||||
)
|
||||
).as(Option.empty[ArrowType])
|
||||
)(a => State.pure(Some(a)))
|
||||
)(a => addServiceArrowLocation(name, arrow).as(Some(a)))
|
||||
case None =>
|
||||
getAbility(name.value).flatMap {
|
||||
case Some(abCtx) =>
|
||||
@ -99,7 +124,9 @@ class AbilitiesInterpreter[S[_], X](implicit
|
||||
abCtx.funcs.keys.toList
|
||||
)
|
||||
).as(Option.empty[ArrowType])
|
||||
)(fn => State.pure(Some(fn.arrow.`type`)))
|
||||
) { fn =>
|
||||
addServiceArrowLocation(name, arrow).as(Some(fn.arrow.`type`))
|
||||
}
|
||||
case None =>
|
||||
report(name, "Ability with this name is undefined").as(Option.empty[ArrowType])
|
||||
}
|
||||
|
@ -3,16 +3,20 @@ package aqua.semantics.rules.abilities
|
||||
import aqua.raw.{RawContext, ServiceRaw}
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.parser.lexer.{Ability, Name, Token, ValueToken}
|
||||
import aqua.semantics.lsp.TokenInfo
|
||||
import aqua.types.ArrowType
|
||||
import cats.Monoid
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
case class AbilitiesState[S[_]](
|
||||
stack: List[AbilitiesState.Frame[S]] = Nil,
|
||||
services: Map[String, ServiceRaw] = Map.empty,
|
||||
abilities: Map[String, RawContext] = Map.empty,
|
||||
rootServiceIds: Map[String, (ValueToken[S], ValueRaw)] = Map.empty[String, (ValueToken[S], ValueRaw)],
|
||||
definitions: Map[String, Ability[S]] = Map.empty[String, Ability[S]]
|
||||
stack: List[AbilitiesState.Frame[S]] = Nil,
|
||||
services: Map[String, ServiceRaw] = Map.empty,
|
||||
abilities: Map[String, RawContext] = Map.empty,
|
||||
rootServiceIds: Map[String, (ValueToken[S], ValueRaw)] =
|
||||
Map.empty[String, (ValueToken[S], ValueRaw)],
|
||||
definitions: Map[String, (Ability[S], List[(Name[S], ArrowType)])] =
|
||||
Map.empty[String, (Ability[S], List[(Name[S], ArrowType)])],
|
||||
locations: List[(Token[S], TokenInfo[S])] = Nil
|
||||
) {
|
||||
|
||||
def purgeArrows: Option[(NonEmptyList[(Name[S], ArrowType)], AbilitiesState[S])] =
|
||||
@ -30,7 +34,8 @@ object AbilitiesState {
|
||||
case class Frame[S[_]](
|
||||
token: Token[S],
|
||||
arrows: Map[String, (Name[S], ArrowType)] = Map.empty[String, (Name[S], ArrowType)],
|
||||
serviceIds: Map[String, (ValueToken[S], ValueRaw)] = Map.empty[String, (ValueToken[S], ValueRaw)]
|
||||
serviceIds: Map[String, (ValueToken[S], ValueRaw)] =
|
||||
Map.empty[String, (ValueToken[S], ValueRaw)]
|
||||
)
|
||||
|
||||
implicit def abilitiesStateMonoid[S[_]]: Monoid[AbilitiesState[S]] =
|
||||
@ -43,7 +48,8 @@ object AbilitiesState {
|
||||
x.services ++ y.services,
|
||||
x.abilities ++ y.abilities,
|
||||
x.rootServiceIds ++ y.rootServiceIds,
|
||||
x.definitions ++ y.definitions
|
||||
x.definitions ++ y.definitions,
|
||||
x.locations ++ y.locations
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package aqua.semantics.rules.names
|
||||
|
||||
import aqua.parser.lexer.{Name, Token}
|
||||
import aqua.semantics.lsp.{TokenArrowInfo, TokenType, TokenTypeInfo}
|
||||
import aqua.semantics.Levenshtein
|
||||
import aqua.semantics.rules.{ReportError, StackInterpreter}
|
||||
import aqua.types.{ArrowType, StreamType, Type}
|
||||
@ -22,7 +23,7 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re
|
||||
|
||||
type SX[A] = State[X, A]
|
||||
|
||||
def readName(name: String): SX[Option[Type]] =
|
||||
def readName(name: String): SX[Option[TokenType[S]]] =
|
||||
getState.map { st =>
|
||||
st.constants.get(name) orElse st.stack.collectFirst {
|
||||
case frame if frame.names.contains(name) => frame.names(name)
|
||||
@ -31,7 +32,7 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re
|
||||
}
|
||||
|
||||
override def read(name: Name[S], mustBeDefined: Boolean = true): SX[Option[Type]] =
|
||||
OptionT(constantDefined(name))
|
||||
OptionT(constantInfo(name))
|
||||
.orElseF(readName(name.value))
|
||||
.value
|
||||
.flatTap {
|
||||
@ -47,15 +48,22 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re
|
||||
)
|
||||
)
|
||||
)
|
||||
case Some(tokenInfo) =>
|
||||
modify(st => st.copy(locations = st.locations :+ (name, tokenInfo)))
|
||||
case _ => State.pure(())
|
||||
}
|
||||
.map(_.map(_.tokenType))
|
||||
|
||||
def constantInfo(name: Name[S]): SX[Option[TokenType[S]]] =
|
||||
getState.map(_.constants.get(name.value))
|
||||
|
||||
override def constantDefined(name: Name[S]): SX[Option[Type]] =
|
||||
getState.map(_.constants.get(name.value))
|
||||
constantInfo(name).map(_.map(_.tokenType))
|
||||
|
||||
def readArrow(name: Name[S]): SX[Option[ArrowType]] =
|
||||
readArrowHelper(name.value).flatMap {
|
||||
case Some(g) => State.pure(Option(g))
|
||||
case Some(g) =>
|
||||
modify(st => st.copy(locations = st.locations :+ (name, g))).map(_ => Option(g.tokenType))
|
||||
case None =>
|
||||
getState.flatMap(st =>
|
||||
report(
|
||||
@ -70,9 +78,11 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re
|
||||
)
|
||||
}
|
||||
|
||||
def readArrowHelper(name: String): SX[Option[ArrowType]] =
|
||||
def readArrowHelper(name: String): SX[Option[TokenArrowInfo[S]]] =
|
||||
getState.map { st =>
|
||||
st.stack.flatMap(_.arrows.get(name)).headOption orElse st.rootArrows.get(name)
|
||||
st.stack
|
||||
.flatMap(_.arrows.get(name))
|
||||
.headOption orElse st.rootArrows.get(name)
|
||||
}
|
||||
|
||||
override def define(name: Name[S], `type`: Type): SX[Boolean] =
|
||||
@ -86,7 +96,7 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re
|
||||
mapStackHead(
|
||||
report(name, "Cannot define a variable in the root scope")
|
||||
.as(false)
|
||||
)(fr => fr.addName(name.value, `type`) -> true)
|
||||
)(fr => fr.addName(name, `type`) -> true)
|
||||
}
|
||||
|
||||
override def defineConstant(name: Name[S], `type`: Type): SX[Boolean] =
|
||||
@ -96,7 +106,7 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re
|
||||
case None =>
|
||||
modify(st =>
|
||||
st.copy(
|
||||
constants = st.constants.updated(name.value, `type`)
|
||||
constants = st.constants.updated(name.value, TokenTypeInfo(Some(name), `type`))
|
||||
)
|
||||
).as(true)
|
||||
}
|
||||
@ -114,7 +124,7 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re
|
||||
if (isRoot)
|
||||
modify(st =>
|
||||
st.copy(
|
||||
rootArrows = st.rootArrows.updated(name.value, gen),
|
||||
rootArrows = st.rootArrows.updated(name.value, TokenArrowInfo(Some(name), gen)),
|
||||
definitions = st.definitions.updated(name.value, name)
|
||||
)
|
||||
)
|
||||
@ -122,7 +132,7 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re
|
||||
else
|
||||
report(name, "Cannot define a variable in the root scope")
|
||||
.as(false)
|
||||
)(fr => fr.addArrow(name.value, gen) -> true)
|
||||
)(fr => fr.addArrow(name, gen) -> true)
|
||||
}
|
||||
|
||||
override def beginScope(token: Token[S]): SX[Unit] =
|
||||
@ -130,7 +140,7 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re
|
||||
|
||||
override def streamsDefinedWithinScope(): SX[Set[String]] =
|
||||
stackInt.mapStackHead(State.pure(Set.empty[String])) { frame =>
|
||||
frame -> frame.names.collect { case (n, StreamType(_)) =>
|
||||
frame -> frame.names.collect { case (n, TokenTypeInfo(_, StreamType(_))) =>
|
||||
n
|
||||
}.toSet
|
||||
}
|
||||
|
@ -2,16 +2,17 @@ package aqua.semantics.rules.names
|
||||
|
||||
import aqua.parser.lexer.{Name, Token}
|
||||
import aqua.raw.RawContext
|
||||
import aqua.semantics.lsp.{TokenArrowInfo, TokenType, TokenTypeInfo}
|
||||
import aqua.types.{ArrowType, Type}
|
||||
import cats.kernel.Monoid
|
||||
import cats.syntax.functor.*
|
||||
|
||||
case class NamesState[S[_]](
|
||||
stack: List[NamesState.Frame[S]] = Nil,
|
||||
rootArrows: Map[String, ArrowType] = Map.empty,
|
||||
constants: Map[String, Type] = Map.empty[String, Type],
|
||||
opaque: Map[String, Type] = Map.empty[String, Type],
|
||||
definitions: Map[String, Name[S]] = Map.empty[String, Name[S]]
|
||||
rootArrows: Map[String, TokenArrowInfo[S]] = Map.empty[String, TokenArrowInfo[S]],
|
||||
constants: Map[String, TokenType[S]] = Map.empty[String, TokenType[S]],
|
||||
definitions: Map[String, Name[S]] = Map.empty[String, Name[S]],
|
||||
locations: List[(Token[S], TokenType[S])] = Nil
|
||||
) {
|
||||
|
||||
def allNames: LazyList[String] =
|
||||
@ -29,13 +30,15 @@ object NamesState {
|
||||
|
||||
case class Frame[S[_]](
|
||||
token: Token[S],
|
||||
names: Map[String, Type] = Map.empty,
|
||||
arrows: Map[String, ArrowType] = Map.empty
|
||||
names: Map[String, TokenType[S]] = Map.empty[String, TokenType[S]],
|
||||
arrows: Map[String, TokenArrowInfo[S]] = Map.empty[String, TokenArrowInfo[S]]
|
||||
) {
|
||||
def addName(n: String, t: Type): NamesState.Frame[S] = copy[S](names = names.updated(n, t))
|
||||
|
||||
def addArrow(n: String, g: ArrowType): NamesState.Frame[S] =
|
||||
copy[S](arrows = arrows.updated(n, g))
|
||||
def addName(n: Name[S], t: Type): NamesState.Frame[S] =
|
||||
copy[S](names = names.updated(n.value, TokenTypeInfo(Some(n), t)))
|
||||
|
||||
def addArrow(n: Name[S], g: ArrowType): NamesState.Frame[S] =
|
||||
copy[S](arrows = arrows.updated(n.value, TokenArrowInfo(Some(n), g)))
|
||||
}
|
||||
|
||||
implicit def namesStateMonoid[S[_]]: Monoid[NamesState[S]] = new Monoid[NamesState[S]] {
|
||||
@ -52,7 +55,9 @@ object NamesState {
|
||||
|
||||
def init[S[_]](context: RawContext): NamesState[S] =
|
||||
NamesState(
|
||||
rootArrows = context.allFuncs.map { case (s, fc) => (s, fc.arrow.`type`) },
|
||||
constants = context.allValues.map { case (s, vm) => (s, vm.`type`) }
|
||||
rootArrows = context.allFuncs.map { case (s, fc) =>
|
||||
(s, TokenArrowInfo[S](None, fc.arrow.`type`))
|
||||
},
|
||||
constants = context.allValues.map { case (s, vm) => (s, TokenTypeInfo[S](None, vm.`type`)) }
|
||||
)
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ class SemanticsSpec extends AnyFlatSpec with Matchers {
|
||||
|
||||
val p = Semantics.process(ast, ctx)
|
||||
|
||||
val func = p.toList.head.funcs("parFunc")
|
||||
val func = p.toList.head._2.funcs("parFunc")
|
||||
|
||||
val proc = func.arrow.body
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user