More compiler work including stack reworking to inject items

This commit is contained in:
Chad Retz 2017-03-23 15:58:56 -05:00
parent 40bd73c9a1
commit 5021c554be
8 changed files with 399 additions and 173 deletions

View File

@ -1,59 +0,0 @@
package asmble.temp;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.function.Supplier;
class Temp {
public void temp() throws Throwable {
MethodHandle mh = MethodHandles.lookup().findVirtual(Temp.class, "foo",
MethodType.methodType(String.class));
int ret = (int) mh.invokeExact();
throw new UnsupportedOperationException("Unreachable: " + ret);
}
public static int foo() {
return 45;
}
static class Module1 {
private final ByteBuffer memory;
private final MethodHandle spectestPrint;
// private final MethodHandle localFunc0;
public Module1(int amount, MethodHandle spectestPrint) {
this(ByteBuffer.allocateDirect(amount), spectestPrint);
}
public Module1(ByteBuffer memory, MethodHandle spectestPrint) {
// TODO: could check memory capacity here
// We trust this is zeroed
this.memory = memory;
this.memory.limit(65536 /* 1 page */);
this.memory.put(new byte[] { 1, 2, 3 /*...*/ }, 0, 3 /* full length */);
this.spectestPrint = spectestPrint;
}
public void good(int param0) throws Throwable {
$func1(param0);
}
private void $func1(int param0) throws Throwable {
// Compiler option to determine number of accesses before it's made a local var...default 1
ByteBuffer memory = this.memory;
MethodHandle spectestPrint = this.spectestPrint;
// (call $print (i32.load8_u offset=0 (get_local $i))) ;; 97 'a'
// iload_1
int iload_1 = param0;
int param_var = memory.get(iload_1);
// TODO: compiler option to not put Throwable on functions
spectestPrint.invokeExact(param_var);
}
}
}

View File

@ -78,6 +78,3 @@ val AbstractInsnNode.isTerminating: Boolean get() = when (this.opcode) {
val Node.Type.Func.asmDesc: String get() =
(this.ret?.typeRef ?: Void::class.ref).asMethodRetDesc(*this.params.map { it.typeRef }.toTypedArray())
fun Node.Func.actualLocalIndex(givenIndex: Int) =
this.locals.take(givenIndex).sumBy { if (it == Node.Type.Value.I64 || it == Node.Type.Value.F64) 2 else 1 }

View File

@ -1,14 +1,14 @@
package asmble.compile.jvm
import asmble.ast.Node
import asmble.util.Either
import org.objectweb.asm.Opcodes
import org.objectweb.asm.Type
import org.objectweb.asm.tree.*
import java.lang.invoke.MethodHandle
open class AstToAsm {
// Note, the class does not have a name out of here (yet)
fun fromModule(ctx: Context) {
fun fromModule(ctx: ClsContext) {
// Invoke dynamic among other things
ctx.cls.version = Opcodes.V1_7
ctx.cls.access += Opcodes.ACC_PUBLIC
@ -19,14 +19,14 @@ open class AstToAsm {
// TODO: addImportForwarders
}
fun addFields(ctx: Context) {
fun addFields(ctx: ClsContext) {
// First field is always a private final memory field
// Ug, ambiguity on List<?> +=
ctx.cls.fields.plusAssign(FieldNode(Opcodes.ACC_PRIVATE + Opcodes.ACC_FINAL, "memory",
ctx.mem.memType.asmDesc, null, null))
// Now all method imports as method handles
ctx.cls.fields += ctx.importFuncs.indices.map {
FieldNode(Opcodes.ACC_PRIVATE + Opcodes.ACC_FINAL, importFuncFieldName(it),
FieldNode(Opcodes.ACC_PRIVATE + Opcodes.ACC_FINAL, funcName(it),
MethodHandle::class.ref.asmDesc, null, null)
}
// Now all import globals as getter (and maybe setter) method handles
@ -60,7 +60,7 @@ open class AstToAsm {
}
}
fun addConstructors(ctx: Context) {
fun addConstructors(ctx: ClsContext) {
// We have at least two constructors:
// <init>(int maxMemory, imports...)
// <init>(MemClass maxMemory, imports...)
@ -92,7 +92,7 @@ open class AstToAsm {
VarInsnNode(Opcodes.ALOAD, 0),
VarInsnNode(Opcodes.ALOAD, importIndex + 1),
FieldInsnNode(Opcodes.PUTFIELD, ctx.thisRef.asmName,
importFuncFieldName(importIndex), MethodHandle::class.ref.asmDesc)
funcName(importIndex), MethodHandle::class.ref.asmDesc)
)
}.addInsns(InsnNode(Opcodes.RETURN))
@ -104,7 +104,7 @@ open class AstToAsm {
).push(ctx.thisRef, Int::class.ref)
amountCon = ctx.mem.create(amountCon).popExpectingMulti(ctx.thisRef, ctx.mem.memType)
// In addition to this and mem on the stack, add all imports
amountCon = amountCon.params.drop(1).foldIndexed(amountCon) { index, amountCon, param ->
amountCon = amountCon.params.drop(1).indices.fold(amountCon) { amountCon, index ->
amountCon.addInsns(VarInsnNode(Opcodes.ALOAD, 2 + index))
}
// Make call
@ -127,7 +127,7 @@ open class AstToAsm {
ctx.cls.methods += constructors.map(Func::toMethodNode)
}
fun addFuncs(ctx: Context) {
fun addFuncs(ctx: ClsContext) {
ctx.cls.methods += ctx.mod.funcs.mapIndexed { index, func ->
fromFunc(ctx, func, ctx.importFuncs.size + index)
}
@ -136,39 +136,61 @@ open class AstToAsm {
fun importGlobalGetterFieldName(index: Int) = "import\$get" + globalName(index)
fun importGlobalSetterFieldName(index: Int) = "import\$set" + globalName(index)
fun globalName(index: Int) = "\$global$index"
fun importFuncFieldName(index: Int) = "import" + funcName(index)
fun funcName(index: Int) = "\$func$index"
fun fromFunc(ctx: Context, f: Node.Func, index: Int): Func {
// Technically all local funcs are static with "this" as the last param.
// This is important because it allows us to call other functions without
// reworking the stack. They are private, and if they are exported then
// the parameters get turned around as expected.
fun fromFunc(ctx: ClsContext, f: Node.Func, index: Int): Func {
// TODO: validate local size?
// TODO: initialize non-param locals?
var func = Func(
access = Opcodes.ACC_STATIC + Opcodes.ACC_PRIVATE,
access = Opcodes.ACC_PRIVATE,
name = funcName(index),
params = f.type.params.map(Node.Type.Value::typeRef) + ctx.thisRef,
params = f.type.params.map(Node.Type.Value::typeRef),
ret = f.type.ret?.let(Node.Type.Value::typeRef) ?: Void::class.ref
)
// Rework the instructions
val funcCtx = FuncContext(ctx, f, ctx.reworker.rework(ctx, f.instructions))
// Add all instructions
func = f.instructions.fold(func) { func, insn -> applyInsn(ctx, f, func, insn) }
func = funcCtx.insns.foldIndexed(func) { index, func, insn -> applyInsn(funcCtx, func, insn, index) }
return func
}
fun applyInsn(ctx: Context, f: Node.Func, fn: Func, i: Node.Instr) = when (i) {
fun applyInsn(ctx: FuncContext, fn: Func, i: Insn, index: Int) = when (i) {
is Insn.Node ->
applyNodeInsn(ctx, fn, i.insn, index)
is Insn.ImportFuncRefNeededOnStack ->
// Func refs are method handle fields
fn.addInsns(
VarInsnNode(Opcodes.ALOAD, 0),
FieldInsnNode(Opcodes.GETFIELD, ctx.cls.thisRef.asmName,
funcName(i.index), MethodHandle::class.ref.asmDesc)
).push(MethodHandle::class.ref)
is Insn.ImportGlobalSetRefNeededOnStack ->
// Import setters are method handle fields
fn.addInsns(
VarInsnNode(Opcodes.ALOAD, 0),
FieldInsnNode(Opcodes.GETFIELD, ctx.cls.thisRef.asmName,
importGlobalSetterFieldName(i.index), MethodHandle::class.ref.asmDesc)
).push(MethodHandle::class.ref)
is Insn.ThisNeededOnStack ->
fn.addInsns(VarInsnNode(Opcodes.ALOAD, 0)).push(ctx.cls.thisRef)
is Insn.MemNeededOnStack ->
putMemoryOnStackIfNecessary(ctx, fn)
}
fun applyNodeInsn(ctx: FuncContext, fn: Func, i: Node.Instr, index: Int) = when (i) {
is Node.Instr.Unreachable ->
fn.addInsns(UnsupportedOperationException::class.athrow("Unreachable"))
is Node.Instr.Nop ->
fn.addInsns(InsnNode(Opcodes.NOP))
// TODO: other control flow...
is Node.Instr.Return ->
applyReturnInsn(ctx, f, fn)
applyReturnInsn(ctx, fn)
is Node.Instr.Call ->
applyCallInsn(ctx, f, fn, i.index, false)
applyCallInsn(ctx, fn, i.index)
is Node.Instr.CallIndirect ->
applyCallInsn(ctx, f, fn, i.index, true)
TODO("To be determined w/ invokedynamic")
is Node.Instr.Drop ->
fn.pop().let { (fn, popped) ->
fn.addInsns(InsnNode(if (popped.stackSize == 2) Opcodes.POP2 else Opcodes.POP))
@ -176,94 +198,156 @@ open class AstToAsm {
is Node.Instr.Select ->
applySelectInsn(ctx, fn)
is Node.Instr.GetLocal ->
applyGetLocal(ctx, f, fn, i.index)
applyGetLocal(ctx, fn, i.index)
is Node.Instr.SetLocal ->
applySetLocal(ctx, f, fn, i.index)
applySetLocal(ctx, fn, i.index)
is Node.Instr.TeeLocal ->
applyTeeLocal(ctx, f, fn, i.index)
applyTeeLocal(ctx, fn, i.index)
is Node.Instr.GetGlobal ->
applyGetGlobal(ctx, fn, i.index)
is Node.Instr.SetGlobal ->
applySetGlobal(ctx, fn, i.index)
is Node.Instr.I32Load, is Node.Instr.I64Load, is Node.Instr.F32Load, is Node.Instr.F64Load,
is Node.Instr.I32Load8S, is Node.Instr.I32Load8U, is Node.Instr.I32Load16U, is Node.Instr.I32Load16S,
is Node.Instr.I64Load8S, is Node.Instr.I64Load8U, is Node.Instr.I64Load16U, is Node.Instr.I64Load16S,
is Node.Instr.I64Load32S, is Node.Instr.I64Load32U ->
// TODO: why do I have to cast?
applyLoadOp(ctx, fn, i as Node.Instr.Args.AlignOffset)
is Node.Instr.I32Store, is Node.Instr.I64Store, is Node.Instr.F32Store, is Node.Instr.F64Store,
is Node.Instr.I32Store8, is Node.Instr.I32Store16, is Node.Instr.I64Store8, is Node.Instr.I64Store16,
is Node.Instr.I64Store32 ->
applyStoreOp(ctx, fn, i as Node.Instr.Args.AlignOffset, index)
is Node.Instr.CurrentMemory ->
applyCurrentMemory(ctx, fn)
is Node.Instr.GrowMemory ->
applyGrowMemory(ctx, fn, index)
else -> TODO()
}
fun applySetGlobal(ctx: Context, fn: Func, index: Int) =
// Import is handled completeld differently than self
// TODO: check mutability?
if (index < ctx.importGlobals.size)
applyImportSetGlobal(ctx, fn, index, ctx.importGlobals[index].kind as Node.Import.Kind.Global)
else
applySelfSetGlobal(ctx, fn, index, ctx.mod.globals[ctx.importGlobals.size - index])
fun applyGrowMemory(ctx: FuncContext, fn: Func, insnIndex: Int) =
// Grow mem is a special case where the memory ref is already pre-injected on
// the stack before this call. But it can have a memory leftover on the stack
// so we pop it if we need to
ctx.cls.mem.growMemory(fn).let { fn ->
popMemoryIfNecessary(ctx, fn, ctx.insns.getOrNull(insnIndex + 1))
}
fun applySelfSetGlobal(ctx: Context, fn: Func, index: Int, global: Node.Global) =
// We have to swap "this" with the value on the stack
fn.addInsns(VarInsnNode(Opcodes.ALOAD, fn.lastParamLocalVarIndex)).
push(ctx.thisRef).
stackSwap().
popExpecting(global.type.contentType.typeRef).
fun applyCurrentMemory(ctx: FuncContext, fn: Func) =
// Curr mem is not specially injected, so we have to put the memory on the
// stack since we need it
putMemoryOnStackIfNecessary(ctx, fn).let { fn -> ctx.cls.mem.currentMemory(fn) }
fun applyStoreOp(ctx: FuncContext, fn: Func, insn: Node.Instr.Args.AlignOffset, insnIndex: Int) =
// Store is a special case where the memory ref is already pre-injected on
// the stack before this call. But it can have a memory leftover on the stack
// so we pop it if we need to
ctx.cls.mem.storeOp(fn, insn).let { fn ->
popMemoryIfNecessary(ctx, fn, ctx.insns.getOrNull(insnIndex + 1))
}
fun applyLoadOp(ctx: FuncContext, fn: Func, insn: Node.Instr.Args.AlignOffset) =
// Loads are not specially injected, so we have to put the memory on the
// stack since we need it
putMemoryOnStackIfNecessary(ctx, fn).let { fn -> ctx.cls.mem.loadOp(fn, insn) }
fun putMemoryOnStackIfNecessary(ctx: FuncContext, fn: Func) =
if (fn.stack.lastOrNull() == ctx.cls.mem.memType) fn
else if (fn.memIsLocalVar)
// Assume it's just past the locals
fn.addInsns(VarInsnNode(Opcodes.ALOAD, ctx.actualLocalIndex(ctx.node.locals.size))).
push(ctx.cls.mem.memType)
else fn.addInsns(
VarInsnNode(Opcodes.ALOAD, 0),
FieldInsnNode(Opcodes.GETFIELD, ctx.cls.thisRef.asmName, "memory", ctx.cls.mem.memType.asmDesc)
).push(ctx.cls.mem.memType)
fun popMemoryIfNecessary(ctx: FuncContext, fn: Func, nextInsn: Insn?) =
// We pop the mem if it's there and not a mem op next
if (fn.stack.lastOrNull() != ctx.cls.mem.memType) fn else {
val nextInstrRequiresMemOnStack = when (nextInsn) {
is Insn.Node -> nextInsn.insn is Node.Instr.Args.AlignOffset ||
nextInsn.insn is Node.Instr.CurrentMemory || nextInsn.insn is Node.Instr.GrowMemory
is Insn.MemNeededOnStack -> true
else -> false
}
if (nextInstrRequiresMemOnStack) fn
else fn.popExpecting(ctx.cls.mem.memType).addInsns(InsnNode(Opcodes.POP))
}
fun applySetGlobal(ctx: FuncContext, fn: Func, index: Int) = ctx.cls.globalAtIndex(index).let {
when (it) {
is Either.Left -> applyImportSetGlobal(ctx, fn, index, it.v.kind as Node.Import.Kind.Global)
is Either.Right -> applySelfSetGlobal(ctx, fn, index, it.v)
}
}
fun applySelfSetGlobal(ctx: FuncContext, fn: Func, index: Int, global: Node.Global) =
// Just call putfield
// Note, this is special and "this" has already been injected on the stack for us
fn.popExpecting(global.type.contentType.typeRef).
popExpecting(MethodHandle::class.ref).
addInsns(
FieldInsnNode(Opcodes.PUTFIELD, ctx.thisRef.asmName, globalName(index),
FieldInsnNode(Opcodes.PUTFIELD, ctx.cls.thisRef.asmName, globalName(index),
global.type.contentType.typeRef.asmDesc)
)
fun applyImportSetGlobal(ctx: Context, fn: Func, index: Int, import: Node.Import.Kind.Global) =
fun applyImportSetGlobal(ctx: FuncContext, fn: Func, index: Int, import: Node.Import.Kind.Global) =
// Load the setter method handle field, then invoke it with stack val
fn.popExpecting(import.type.contentType.typeRef).addInsns(
VarInsnNode(Opcodes.ALOAD, fn.lastParamLocalVarIndex),
FieldInsnNode(Opcodes.GETFIELD, ctx.thisRef.asmName,
importGlobalSetterFieldName(index), MethodHandle::class.ref.asmDesc),
MethodInsnNode(Opcodes.INVOKEVIRTUAL, MethodHandle::class.ref.asmName, "invokeExact",
"(${import.type.contentType.typeRef.asmDesc})V", false)
)
// Note, this is special and the method handle has already been injected on the stack for us
fn.popExpecting(import.type.contentType.typeRef).
popExpecting(MethodHandle::class.ref).
addInsns(
MethodInsnNode(Opcodes.INVOKEVIRTUAL, MethodHandle::class.ref.asmName, "invokeExact",
"(${import.type.contentType.typeRef.asmDesc})V", false)
)
fun applyGetGlobal(ctx: Context, fn: Func, index: Int) =
// Import is handled completely different than self
if (index < ctx.importGlobals.size)
applyImportGetGlobal(ctx, fn, index, ctx.importGlobals[index].kind as Node.Import.Kind.Global)
else
applySelfGetGlobal(ctx, fn, index, ctx.mod.globals[ctx.importGlobals.size - index])
fun applyGetGlobal(ctx: FuncContext, fn: Func, index: Int) = ctx.cls.globalAtIndex(index).let {
when (it) {
is Either.Left -> applyImportGetGlobal(ctx, fn, index, it.v.kind as Node.Import.Kind.Global)
is Either.Right -> applySelfGetGlobal(ctx, fn, index, it.v)
}
}
fun applySelfGetGlobal(ctx: Context, fn: Func, index: Int, global: Node.Global) =
fun applySelfGetGlobal(ctx: FuncContext, fn: Func, index: Int, global: Node.Global) =
fn.addInsns(
VarInsnNode(Opcodes.ALOAD, fn.lastParamLocalVarIndex),
FieldInsnNode(Opcodes.GETFIELD, ctx.thisRef.asmName, globalName(index),
VarInsnNode(Opcodes.ALOAD, 0),
FieldInsnNode(Opcodes.GETFIELD, ctx.cls.thisRef.asmName, globalName(index),
global.type.contentType.typeRef.asmDesc)
).push(global.type.contentType.typeRef)
fun applyImportGetGlobal(ctx: Context, fn: Func, index: Int, import: Node.Import.Kind.Global) =
fun applyImportGetGlobal(ctx: FuncContext, fn: Func, index: Int, import: Node.Import.Kind.Global) =
// Load the getter method handle field, then invoke it with nothing
fn.addInsns(
VarInsnNode(Opcodes.ALOAD, fn.lastParamLocalVarIndex),
FieldInsnNode(Opcodes.GETFIELD, ctx.thisRef.asmName,
VarInsnNode(Opcodes.ALOAD, 0),
FieldInsnNode(Opcodes.GETFIELD, ctx.cls.thisRef.asmName,
importGlobalGetterFieldName(index), MethodHandle::class.ref.asmDesc),
MethodInsnNode(Opcodes.INVOKEVIRTUAL, MethodHandle::class.ref.asmName, "invokeExact",
"()" + import.type.contentType.typeRef.asmDesc, false)
).push(import.type.contentType.typeRef)
fun applyTeeLocal(ctx: Context, f: Node.Func, fn: Func, index: Int) = f.locals[index].typeRef.let { typeRef ->
fun applyTeeLocal(ctx: FuncContext, fn: Func, index: Int) = ctx.node.locals[index].typeRef.let { typeRef ->
fn.addInsns(InsnNode(if (typeRef.stackSize == 2) Opcodes.DUP2 else Opcodes.DUP)).
push(typeRef).let { applySetLocal(ctx, f, it, index) }
push(typeRef).let { applySetLocal(ctx, it, index) }
}
fun applySetLocal(ctx: Context, f: Node.Func, fn: Func, index: Int) =
fn.popExpecting(f.locals[index].typeRef).let { fn ->
when (f.locals[index]) {
Node.Type.Value.I32 -> fn.addInsns(VarInsnNode(Opcodes.ISTORE, f.actualLocalIndex(index)))
Node.Type.Value.I64 -> fn.addInsns(VarInsnNode(Opcodes.LSTORE, f.actualLocalIndex(index)))
Node.Type.Value.F32 -> fn.addInsns(VarInsnNode(Opcodes.FSTORE, f.actualLocalIndex(index)))
Node.Type.Value.F64 -> fn.addInsns(VarInsnNode(Opcodes.DSTORE, f.actualLocalIndex(index)))
fun applySetLocal(ctx: FuncContext, fn: Func, index: Int) =
fn.popExpecting(ctx.node.locals[index].typeRef).let { fn ->
when (ctx.node.locals[index]) {
Node.Type.Value.I32 -> fn.addInsns(VarInsnNode(Opcodes.ISTORE, ctx.actualLocalIndex(index)))
Node.Type.Value.I64 -> fn.addInsns(VarInsnNode(Opcodes.LSTORE, ctx.actualLocalIndex(index)))
Node.Type.Value.F32 -> fn.addInsns(VarInsnNode(Opcodes.FSTORE, ctx.actualLocalIndex(index)))
Node.Type.Value.F64 -> fn.addInsns(VarInsnNode(Opcodes.DSTORE, ctx.actualLocalIndex(index)))
}
}
fun applyGetLocal(ctx: Context, f: Node.Func, fn: Func, index: Int) = when (f.locals[index]) {
Node.Type.Value.I32 -> fn.addInsns(VarInsnNode(Opcodes.ILOAD, f.actualLocalIndex(index)))
Node.Type.Value.I64 -> fn.addInsns(VarInsnNode(Opcodes.LLOAD, f.actualLocalIndex(index)))
Node.Type.Value.F32 -> fn.addInsns(VarInsnNode(Opcodes.FLOAD, f.actualLocalIndex(index)))
Node.Type.Value.F64 -> fn.addInsns(VarInsnNode(Opcodes.DLOAD, f.actualLocalIndex(index)))
}.push(f.locals[index].typeRef)
fun applyGetLocal(ctx: FuncContext, fn: Func, index: Int) = when (ctx.node.locals[index]) {
Node.Type.Value.I32 -> fn.addInsns(VarInsnNode(Opcodes.ILOAD, ctx.actualLocalIndex(index)))
Node.Type.Value.I64 -> fn.addInsns(VarInsnNode(Opcodes.LLOAD, ctx.actualLocalIndex(index)))
Node.Type.Value.F32 -> fn.addInsns(VarInsnNode(Opcodes.FLOAD, ctx.actualLocalIndex(index)))
Node.Type.Value.F64 -> fn.addInsns(VarInsnNode(Opcodes.DLOAD, ctx.actualLocalIndex(index)))
}.push(ctx.node.locals[index].typeRef)
fun applySelectInsn(ctx: Context, origFn: Func): Func {
fun applySelectInsn(ctx: FuncContext, origFn: Func): Func {
var fn = origFn
// 3 things, first two must have same type, third is 0 check (0 means use second, otherwise use first)
// What we'll do is:
@ -289,28 +373,24 @@ open class AstToAsm {
)
}
fun applyCallInsn(ctx: Context, f: Node.Func, origFn: Func, index: Int, indirect: Boolean): Func {
// Check whether it's an import or local to get type
val funcType = ctx.importFuncs.getOrNull(index).let {
when (it) {
null -> ctx.mod.funcs.getOrNull(ctx.importFuncs.size - index)?.type
else -> (it.kind as? Node.Import.Kind.Func)?.typeIndex?.let(ctx.mod.types::getOrNull)
fun applyCallInsn(ctx: FuncContext, fn: Func, index: Int) =
// Imports use a MethodHandle field, others call directly
ctx.cls.funcTypeAtIndex(index).let { funcType ->
fn.popExpectingMulti(funcType.params.map(Node.Type.Value::typeRef)).let { fn ->
when (ctx.cls.funcAtIndex(index)) {
is Either.Left -> fn.popExpecting(MethodHandle::class.ref).addInsns(
MethodInsnNode(Opcodes.INVOKEVIRTUAL, MethodHandle::class.ref.asmName,
"invokeExact", funcType.asmDesc, false)
)
is Either.Right -> fn.popExpecting(ctx.cls.thisRef).addInsns(
MethodInsnNode(Opcodes.INVOKESTATIC, ctx.cls.thisRef.asmName,
funcName(index), funcType.asmDesc, false)
)
}.let { fn -> funcType.ret?.let { fn.push(it.typeRef) } ?: fn }
}
} ?: throw RuntimeException("Cannot find func at index $index")
// Check stack expectations
var fn = origFn.popExpectingMulti(funcType.params.map(Node.Type.Value::typeRef))
// Add "this" at the end and call statically
fn = fn.addInsns(
VarInsnNode(Opcodes.ALOAD, fn.lastParamLocalVarIndex),
MethodInsnNode(Opcodes.INVOKESTATIC, ctx.thisRef.asmName,
funcName(index), funcType.asmDesc, false)
)
// Return push on stack?
funcType.ret?.also { fn = fn.push(it.typeRef) }
return fn
}
}
fun applyReturnInsn(ctx: Context, f: Node.Func, fn: Func) = when (f.type.ret) {
fun applyReturnInsn(ctx: FuncContext, fn: Func) = when (ctx.node.type.ret) {
null ->
fn.addInsns(InsnNode(Opcodes.RETURN))
Node.Type.Value.I32 ->
@ -322,21 +402,9 @@ open class AstToAsm {
Node.Type.Value.F64 ->
fn.popExpecting(Double::class.ref).addInsns(InsnNode(Opcodes.DRETURN))
}.let {
require(it.stack.isEmpty()) { "Stack not empty on void return" }
require(it.stack.isEmpty()) { "Stack not empty on return" }
it
}
data class Context(
val packageName: String,
val className: String,
val mod: Node.Module,
val cls: ClassNode,
val mem: Mem = ByteBufferMem()
) {
val importFuncs: List<Node.Import> by lazy { mod.imports.filter { it.kind is Node.Import.Kind.Func } }
val importGlobals: List<Node.Import> by lazy { mod.imports.filter { it.kind is Node.Import.Kind.Global } }
val thisRef = TypeRef(Type.getObjectType(packageName.replace('.', '/') + className))
}
companion object : AstToAsm()
}

View File

@ -0,0 +1,41 @@
package asmble.compile.jvm
import asmble.ast.Node
import asmble.util.Either
import org.objectweb.asm.Type
import org.objectweb.asm.tree.ClassNode
data class ClsContext(
val packageName: String,
val className: String,
val mod: Node.Module,
val cls: ClassNode,
val mem: Mem = ByteBufferMem,
val reworker: InsnReworker = InsnReworker,
val nonAdjacentMemAccessesRequiringLocalVar: Int = 3
) {
val importFuncs: List<Node.Import> by lazy { mod.imports.filter { it.kind is Node.Import.Kind.Func } }
val importGlobals: List<Node.Import> by lazy { mod.imports.filter { it.kind is Node.Import.Kind.Global } }
val thisRef = TypeRef(Type.getObjectType(packageName.replace('.', '/') + className))
fun funcAtIndex(index: Int) = importFuncs.getOrNull(index).let {
when (it) {
null -> Either.Right(mod.funcs.getOrNull(importFuncs.size - index) ?: error("No func at $index"))
else -> Either.Left(it)
}
}
fun funcTypeAtIndex(index: Int) = funcAtIndex(index).let {
when (it) {
is Either.Left -> mod.types[(it.v.kind as Node.Import.Kind.Func).typeIndex]
is Either.Right -> it.v.type
}
}
fun globalAtIndex(index: Int) = importGlobals.getOrNull(index).let {
when (it) {
null -> Either.Right(mod.globals.getOrNull(importGlobals.size - index) ?: error("No global at $index"))
else -> Either.Left(it)
}
}
}

View File

@ -1,5 +1,6 @@
package asmble.compile.jvm
import asmble.ast.Node
import org.objectweb.asm.Opcodes
import org.objectweb.asm.Type
import org.objectweb.asm.tree.AbstractInsnNode
@ -12,14 +13,12 @@ data class Func(
val ret: TypeRef = Void::class.ref,
val access: Int = Opcodes.ACC_PUBLIC,
val insns: List<AbstractInsnNode> = emptyList(),
val stack: List<TypeRef> = emptyList()
val stack: List<TypeRef> = emptyList(),
val memIsLocalVar: Boolean = false
) {
val desc: String get() = ret.asMethodRetDesc(*params.toTypedArray())
val lastParamLocalVarIndex: Int get() =
params.dropLast(1).fold(if (access.isAccessStatic) 1 else 2) { total, param -> total + param.stackSize }
fun addInsns(insns: List<AbstractInsnNode>) = copy(insns = this.insns + insns)
fun addInsns(vararg insns: AbstractInsnNode) = copy(insns = this.insns + insns)

View File

@ -0,0 +1,13 @@
package asmble.compile.jvm
import asmble.ast.Node
data class FuncContext(
val cls: ClsContext,
val node: Node.Func,
val insns: List<Insn>
) {
fun actualLocalIndex(givenIndex: Int) =
// Add 1 for "this"
node.locals.take(givenIndex).sumBy { it.typeRef.stackSize } + 1
}

View File

@ -0,0 +1,11 @@
package asmble.compile.jvm
import asmble.ast.Node
sealed class Insn {
data class Node(val insn: asmble.ast.Node.Instr) : Insn()
data class ImportFuncRefNeededOnStack(val index: Int) : Insn()
data class ImportGlobalSetRefNeededOnStack(val index: Int) : Insn()
object ThisNeededOnStack : Insn()
object MemNeededOnStack : Insn()
}

View File

@ -0,0 +1,156 @@
package asmble.compile.jvm
import asmble.ast.Node
open class InsnReworker {
fun rework(ctx: ClsContext, insns: List<Node.Instr>): List<Insn> {
// How we do this:
// We run over each insn, and keep a running list of stack
// manips. If there is an insn that needs something so far back,
// we calc where it needs to be added and keep a running list of
// insn inserts. Then at the end we settle up.
//
// Note, we don't do any injections for things like "this" if
// they aren't needed up the stack (e.g. a simple getfield can
// just aload 0 itself)
// Each pair is first the amount of stack that is changed (0 is
// ignored, push is positive, pull is negative) then the index
// of the insn that caused it. As a special case, if the stack
// is dynamic (i.e. call_indirect
var stackManips = emptyList<Pair<Int, Int>>()
// Keyed by the index to inject. With how the algorithm works, we
// guarantee the value will be in the right order if there are
// multiple for the same index
var insnsToInject = emptyMap<Int, List<Insn>>()
fun injectBeforeLastStackCount(insn: Insn, count: Int) {
var countSoFar = 0
for ((amountChanged, insnIndex) in stackManips.asReversed()) {
countSoFar += amountChanged
if (countSoFar == count) {
insnsToInject += insnIndex to (insnsToInject[insnIndex]?.let { it + insn } ?: listOf(insn))
return
}
}
error("Unable to find place to inject $insn")
}
// Go over each insn, determining where to inject
insns.forEachIndexed { index, insn ->
// Handle special injection cases
when (insn) {
// Calls require "this" or fn ref before the params
is Node.Instr.Call -> {
val inject =
if (insn.index < ctx.importFuncs.size) Insn.ImportFuncRefNeededOnStack(insn.index)
else Insn.ThisNeededOnStack
injectBeforeLastStackCount(inject, ctx.funcTypeAtIndex(insn.index).params.size)
}
is Node.Instr.CallIndirect -> TODO("Not sure what I need yet")
// Global set requires "this" before the single param
is Node.Instr.SetGlobal -> {
val inject =
if (insn.index < ctx.importGlobals.size) Insn.ImportGlobalSetRefNeededOnStack(insn.index)
else Insn.ThisNeededOnStack
injectBeforeLastStackCount(inject, 1)
}
// Storage requires "mem" before the single param
is Node.Instr.I32Store, is Node.Instr.I64Store, is Node.Instr.F32Store, is Node.Instr.F64Store,
is Node.Instr.I32Store8, is Node.Instr.I32Store16, is Node.Instr.I64Store8, is Node.Instr.I64Store16,
is Node.Instr.I64Store32 ->
injectBeforeLastStackCount(Insn.MemNeededOnStack, 1)
// Grow memory requires "mem" before the single param
is Node.Instr.GrowMemory ->
injectBeforeLastStackCount(Insn.MemNeededOnStack, 1)
else -> { }
}
// Add the current diff
stackManips += insnStackDiff(ctx, insn) to index
}
// Build resulting list
return insns.foldIndexed(emptyList<Insn>()) { index, ret, insn ->
val injections = insnsToInject[index] ?: emptyList()
ret + injections + Insn.Node(insn)
}
}
fun insnStackDiff(ctx: ClsContext, insn: Node.Instr) = when (insn) {
is Node.Instr.Call -> ctx.funcTypeAtIndex(insn.index).let {
// All calls pop "this" + params, and any return is a push
POP_THIS + (POP_PARAM + it.params.size) + (if (it.ret == null) NOP else PUSH_RESULT)
}
is Node.Instr.CallIndirect -> ctx.mod.types[insn.index].let {
POP_THIS + (POP_PARAM + it.params.size) + (if (it.ret == null) NOP else PUSH_RESULT)
}
is Node.Instr.Drop -> POP_PARAM
is Node.Instr.Select -> (POP_PARAM * 3) + PUSH_RESULT
is Node.Instr.GetLocal -> POP_PARAM + PUSH_RESULT
is Node.Instr.SetLocal -> POP_PARAM
is Node.Instr.TeeLocal -> POP_PARAM + PUSH_RESULT
is Node.Instr.GetGlobal -> POP_THIS + PUSH_RESULT
is Node.Instr.SetGlobal -> POP_THIS + POP_PARAM
is Node.Instr.I32Load, is Node.Instr.I64Load, is Node.Instr.F32Load, is Node.Instr.F64Load,
is Node.Instr.I32Load8S, is Node.Instr.I32Load8U, is Node.Instr.I32Load16U, is Node.Instr.I32Load16S,
is Node.Instr.I64Load8S, is Node.Instr.I64Load8U, is Node.Instr.I64Load16U, is Node.Instr.I64Load16S,
is Node.Instr.I64Load32S, is Node.Instr.I64Load32U -> POP_MEM + PUSH_RESULT
is Node.Instr.I32Store, is Node.Instr.I64Store, is Node.Instr.F32Store, is Node.Instr.F64Store,
is Node.Instr.I32Store8, is Node.Instr.I32Store16, is Node.Instr.I64Store8, is Node.Instr.I64Store16,
is Node.Instr.I64Store32 -> POP_MEM + POP_PARAM
is Node.Instr.CurrentMemory -> POP_MEM + PUSH_RESULT
is Node.Instr.GrowMemory -> POP_MEM + POP_PARAM
is Node.Instr.I32Const, is Node.Instr.I64Const,
is Node.Instr.F32Const, is Node.Instr.F64Const -> PUSH_RESULT
is Node.Instr.I32Add, is Node.Instr.I32Sub, is Node.Instr.I32Mul, is Node.Instr.I32DivS,
is Node.Instr.I32DivU, is Node.Instr.I32RemS, is Node.Instr.I32RemU, is Node.Instr.I32And,
is Node.Instr.I32Or, is Node.Instr.I32Xor, is Node.Instr.I32Shl, is Node.Instr.I32ShrS,
is Node.Instr.I32ShrU, is Node.Instr.I32Rotl, is Node.Instr.I32Rotr, is Node.Instr.I32Eq,
is Node.Instr.I32Ne, is Node.Instr.I32LtS, is Node.Instr.I32LeS, is Node.Instr.I32LtU,
is Node.Instr.I32LeU, is Node.Instr.I32GtS, is Node.Instr.I32GeS, is Node.Instr.I32GtU,
is Node.Instr.I32GeU -> POP_PARAM + POP_PARAM + PUSH_RESULT
is Node.Instr.I32Clz, is Node.Instr.I32Ctz, is Node.Instr.I32Popcnt,
is Node.Instr.I32Eqz -> POP_PARAM + PUSH_RESULT
is Node.Instr.I64Add, is Node.Instr.I64Sub, is Node.Instr.I64Mul, is Node.Instr.I64DivS,
is Node.Instr.I64DivU, is Node.Instr.I64RemS, is Node.Instr.I64RemU, is Node.Instr.I64And,
is Node.Instr.I64Or, is Node.Instr.I64Xor, is Node.Instr.I64Shl, is Node.Instr.I64ShrS,
is Node.Instr.I64ShrU, is Node.Instr.I64Rotl, is Node.Instr.I64Rotr, is Node.Instr.I64Eq,
is Node.Instr.I64Ne, is Node.Instr.I64LtS, is Node.Instr.I64LeS, is Node.Instr.I64LtU,
is Node.Instr.I64LeU, is Node.Instr.I64GtS, is Node.Instr.I64GeS, is Node.Instr.I64GtU,
is Node.Instr.I64GeU -> POP_PARAM + POP_PARAM + PUSH_RESULT
is Node.Instr.I64Clz, is Node.Instr.I64Ctz, is Node.Instr.I64Popcnt,
is Node.Instr.I64Eqz -> POP_PARAM + PUSH_RESULT
is Node.Instr.F32Add, is Node.Instr.F32Sub, is Node.Instr.F32Mul, is Node.Instr.F32Div,
is Node.Instr.F32Eq, is Node.Instr.F32Ne, is Node.Instr.F32Lt, is Node.Instr.F32Le,
is Node.Instr.F32Gt, is Node.Instr.F32Ge, is Node.Instr.F32Sqrt, is Node.Instr.F32Min,
is Node.Instr.F32Max -> POP_PARAM + POP_PARAM + PUSH_RESULT
is Node.Instr.F32Abs, is Node.Instr.F32Neg, is Node.Instr.F32CopySign, is Node.Instr.F32Ceil,
is Node.Instr.F32Floor, is Node.Instr.F32Trunc, is Node.Instr.F32Nearest -> POP_PARAM + PUSH_RESULT
is Node.Instr.F64Add, is Node.Instr.F64Sub, is Node.Instr.F64Mul, is Node.Instr.F64Div,
is Node.Instr.F64Eq, is Node.Instr.F64Ne, is Node.Instr.F64Lt, is Node.Instr.F64Le,
is Node.Instr.F64Gt, is Node.Instr.F64Ge, is Node.Instr.F64Sqrt, is Node.Instr.F64Min,
is Node.Instr.F64Max -> POP_PARAM + POP_PARAM + PUSH_RESULT
is Node.Instr.F64Abs, is Node.Instr.F64Neg, is Node.Instr.F64CopySign, is Node.Instr.F64Ceil,
is Node.Instr.F64Floor, is Node.Instr.F64Trunc, is Node.Instr.F64Nearest -> POP_PARAM + PUSH_RESULT
is Node.Instr.I32WrapI64, is Node.Instr.I32TruncSF32, is Node.Instr.I32TruncUF32,
is Node.Instr.I32TruncSF64, is Node.Instr.I32TruncUF64, is Node.Instr.I64ExtendSI32,
is Node.Instr.I64ExtendUI32, is Node.Instr.I64TruncSF32, is Node.Instr.I64TruncUF32,
is Node.Instr.I64TruncSF64, is Node.Instr.I64TruncUF64, is Node.Instr.F32ConvertSI32,
is Node.Instr.F32ConvertUI32, is Node.Instr.F32ConvertSI64, is Node.Instr.F32ConvertUI64,
is Node.Instr.F32DemoteF64, is Node.Instr.F64ConvertSI32, is Node.Instr.F64ConvertUI32,
is Node.Instr.F64ConvertSI64, is Node.Instr.F64ConvertUI64, is Node.Instr.F64PromoteF32,
is Node.Instr.I32ReinterpretF32, is Node.Instr.I64ReinterpretF64, is Node.Instr.F32ReinterpretI32,
is Node.Instr.F64ReinterpretI64 -> POP_PARAM + PUSH_RESULT
else -> TODO()
}
companion object : InsnReworker() {
const val POP_THIS = -1
const val POP_PARAM = -1
const val POP_MEM = -1
const val PUSH_RESULT = 1
const val NOP = 0
}
}