mirror of
https://github.com/fluencelabs/asmble
synced 2025-04-24 22:32:19 +00:00
Fixes #1. Fix large mem data, copysign stack count, and reworked leftover mem instance support
This commit is contained in:
parent
132b50772d
commit
a9dc8ddd77
@ -47,15 +47,12 @@ open class ByteBufferMem(val direct: Boolean = true) : Mem {
|
||||
addInsns(
|
||||
forceFnType<ByteBuffer.(Int) -> Buffer>(ByteBuffer::position).invokeVirtual(),
|
||||
TypeInsnNode(Opcodes.CHECKCAST, memType.asmName),
|
||||
// TODO: Is there a cheaper bulk approach instead of manually building
|
||||
// a byte array? What's the harm of using a String in the constant pool instead?
|
||||
bytes.size.const,
|
||||
IntInsnNode(Opcodes.NEWARRAY, Opcodes.T_BYTE)
|
||||
).
|
||||
addInsns(bytes.withIndex().flatMap { (index, byte) ->
|
||||
listOf(InsnNode(Opcodes.DUP), index.const, byte.toInt().const, InsnNode(Opcodes.BASTORE))
|
||||
}).
|
||||
addInsns(
|
||||
// We're going to do this as an LDC string in ISO-8859 and read it back at runtime
|
||||
LdcInsnNode(bytes.toString(Charsets.ISO_8859_1)),
|
||||
LdcInsnNode("ISO-8859-1"),
|
||||
// Ug, can't do func refs on native types here...
|
||||
MethodInsnNode(Opcodes.INVOKEVIRTUAL, String::class.ref.asmName,
|
||||
"getBytes", "(Ljava/lang/String;)[B", false),
|
||||
0.const,
|
||||
bytes.size.const,
|
||||
forceFnType<ByteBuffer.(ByteArray, Int, Int) -> ByteBuffer>(ByteBuffer::put).invokeVirtual(),
|
||||
@ -252,5 +249,7 @@ open class ByteBufferMem(val direct: Boolean = true) : Mem {
|
||||
}
|
||||
}
|
||||
|
||||
override val storeLeavesMemOnStack get() = true
|
||||
|
||||
companion object : ByteBufferMem()
|
||||
}
|
@ -13,7 +13,8 @@ data class Func(
|
||||
val stack: List<TypeRef> = emptyList(),
|
||||
val blockStack: List<Block> = emptyList(),
|
||||
// Contains index of JumpInsnNode that has a null label initially
|
||||
val ifStack: List<Int> = emptyList()
|
||||
val ifStack: List<Int> = emptyList(),
|
||||
val lastStackIsMemLeftover: Boolean = false
|
||||
) {
|
||||
|
||||
val desc: String get() = ret.asMethodRetDesc(*params.toTypedArray())
|
||||
|
@ -93,7 +93,7 @@ open class FuncBuilder {
|
||||
is Insn.ThisNeededOnStack ->
|
||||
fn.addInsns(VarInsnNode(Opcodes.ALOAD, 0)).push(ctx.cls.thisRef)
|
||||
is Insn.MemNeededOnStack ->
|
||||
putMemoryOnStackIfNecessary(ctx, fn)
|
||||
putMemoryOnStack(ctx, fn)
|
||||
}
|
||||
|
||||
fun applyNodeInsn(ctx: FuncContext, fn: Func, i: Node.Instr, index: Int) = when (i) {
|
||||
@ -1069,7 +1069,7 @@ open class FuncBuilder {
|
||||
// Curr mem is not specially injected, so we have to put the memory on the
|
||||
// stack since we need it
|
||||
ctx.cls.assertHasMemory().let {
|
||||
putMemoryOnStackIfNecessary(ctx, fn).let { fn -> ctx.cls.mem.currentMemory(ctx, fn) }
|
||||
putMemoryOnStack(ctx, fn).let { fn -> ctx.cls.mem.currentMemory(ctx, fn) }
|
||||
}
|
||||
|
||||
fun applyStoreOp(ctx: FuncContext, fn: Func, insn: Node.Instr.Args.AlignOffset, insnIndex: Int) =
|
||||
@ -1078,7 +1078,15 @@ open class FuncBuilder {
|
||||
// so we pop it if we need to
|
||||
ctx.cls.assertHasMemory().let {
|
||||
ctx.cls.mem.storeOp(ctx, fn, insn).let { fn ->
|
||||
popMemoryIfNecessary(ctx, fn, ctx.insns.getOrNull(insnIndex + 1))
|
||||
// As a special case, if this leaves the mem on the stack
|
||||
// and we need it in the future, we mark it as leftover and
|
||||
// reuse
|
||||
if (!ctx.cls.mem.storeLeavesMemOnStack) fn else ctx.insns.getOrNull(insnIndex + 1).let { nextInsn ->
|
||||
if (nextInsn is Insn.MemNeededOnStack) {
|
||||
fn.peekExpecting(ctx.cls.mem.memType)
|
||||
fn.copy(lastStackIsMemLeftover = true)
|
||||
} else fn.popExpecting(ctx.cls.mem.memType).addInsns(InsnNode(Opcodes.POP))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1089,10 +1097,11 @@ open class FuncBuilder {
|
||||
ctx.cls.mem.loadOp(ctx, fn, insn)
|
||||
}
|
||||
|
||||
fun putMemoryOnStackIfNecessary(ctx: FuncContext, fn: Func) =
|
||||
if (fn.stack.lastOrNull() == ctx.cls.mem.memType) fn
|
||||
fun putMemoryOnStack(ctx: FuncContext, fn: Func) =
|
||||
// Only put it if it's not already leftover
|
||||
if (fn.lastStackIsMemLeftover) fn.copy(lastStackIsMemLeftover = false)
|
||||
else if (ctx.memIsLocalVar)
|
||||
// Assume it's just past the locals
|
||||
// Assume it's just past the locals
|
||||
fn.addInsns(VarInsnNode(Opcodes.ALOAD, ctx.actualLocalIndex(ctx.node.localsSize))).
|
||||
push(ctx.cls.mem.memType)
|
||||
else fn.addInsns(
|
||||
@ -1100,19 +1109,6 @@ open class FuncBuilder {
|
||||
FieldInsnNode(Opcodes.GETFIELD, ctx.cls.thisRef.asmName, "memory", ctx.cls.mem.memType.asmDesc)
|
||||
).push(ctx.cls.mem.memType)
|
||||
|
||||
fun popMemoryIfNecessary(ctx: FuncContext, fn: Func, nextInsn: Insn?) =
|
||||
// We pop the mem if it's there and not a mem op next
|
||||
if (fn.stack.lastOrNull() != ctx.cls.mem.memType) fn else {
|
||||
val nextInstrRequiresMemOnStack = when (nextInsn) {
|
||||
is Insn.Node -> nextInsn.insn is Node.Instr.Args.AlignOffset ||
|
||||
nextInsn.insn is Node.Instr.CurrentMemory || nextInsn.insn is Node.Instr.GrowMemory
|
||||
is Insn.MemNeededOnStack -> true
|
||||
else -> false
|
||||
}
|
||||
if (nextInstrRequiresMemOnStack) fn
|
||||
else fn.popExpecting(ctx.cls.mem.memType).addInsns(InsnNode(Opcodes.POP))
|
||||
}
|
||||
|
||||
fun applySetGlobal(ctx: FuncContext, fn: Func, index: Int) = ctx.cls.globalAtIndex(index).let {
|
||||
when (it) {
|
||||
is Either.Left -> applyImportSetGlobal(ctx, fn, index, it.v.kind as Node.Import.Kind.Global)
|
||||
|
@ -230,14 +230,14 @@ open class InsnReworker {
|
||||
is Node.Instr.F32Add, is Node.Instr.F32Sub, is Node.Instr.F32Mul, is Node.Instr.F32Div,
|
||||
is Node.Instr.F32Eq, is Node.Instr.F32Ne, is Node.Instr.F32Lt, is Node.Instr.F32Le,
|
||||
is Node.Instr.F32Gt, is Node.Instr.F32Ge, is Node.Instr.F32Sqrt, is Node.Instr.F32Min,
|
||||
is Node.Instr.F32Max -> POP_PARAM + POP_PARAM + PUSH_RESULT
|
||||
is Node.Instr.F32Abs, is Node.Instr.F32Neg, is Node.Instr.F32CopySign, is Node.Instr.F32Ceil,
|
||||
is Node.Instr.F32Max, is Node.Instr.F32CopySign -> POP_PARAM + POP_PARAM + PUSH_RESULT
|
||||
is Node.Instr.F32Abs, is Node.Instr.F32Neg, is Node.Instr.F32Ceil,
|
||||
is Node.Instr.F32Floor, is Node.Instr.F32Trunc, is Node.Instr.F32Nearest -> POP_PARAM + PUSH_RESULT
|
||||
is Node.Instr.F64Add, is Node.Instr.F64Sub, is Node.Instr.F64Mul, is Node.Instr.F64Div,
|
||||
is Node.Instr.F64Eq, is Node.Instr.F64Ne, is Node.Instr.F64Lt, is Node.Instr.F64Le,
|
||||
is Node.Instr.F64Gt, is Node.Instr.F64Ge, is Node.Instr.F64Sqrt, is Node.Instr.F64Min,
|
||||
is Node.Instr.F64Max -> POP_PARAM + POP_PARAM + PUSH_RESULT
|
||||
is Node.Instr.F64Abs, is Node.Instr.F64Neg, is Node.Instr.F64CopySign, is Node.Instr.F64Ceil,
|
||||
is Node.Instr.F64Max, is Node.Instr.F64CopySign -> POP_PARAM + POP_PARAM + PUSH_RESULT
|
||||
is Node.Instr.F64Abs, is Node.Instr.F64Neg, is Node.Instr.F64Ceil,
|
||||
is Node.Instr.F64Floor, is Node.Instr.F64Trunc, is Node.Instr.F64Nearest -> POP_PARAM + PUSH_RESULT
|
||||
is Node.Instr.I32WrapI64, is Node.Instr.I32TruncSF32, is Node.Instr.I32TruncUF32,
|
||||
is Node.Instr.I32TruncSF64, is Node.Instr.I32TruncUF64, is Node.Instr.I64ExtendSI32,
|
||||
|
@ -35,10 +35,12 @@ interface Mem {
|
||||
fun loadOp(ctx: FuncContext, func: Func, insn: Node.Instr.Args.AlignOffset): Func
|
||||
|
||||
// Caller can trust the mem instance is on the stack followed
|
||||
// by the value. If it's already there after call anyways, this can
|
||||
// leave the mem inst on the stack and it will be reused or popped.
|
||||
// by the value. If storeLeavesMemOnStack is true, this should leave the mem
|
||||
// on the stack after every call.
|
||||
fun storeOp(ctx: FuncContext, func: Func, insn: Node.Instr.Args.AlignOffset): Func
|
||||
|
||||
val storeLeavesMemOnStack: Boolean
|
||||
|
||||
companion object {
|
||||
const val PAGE_SIZE = 65536
|
||||
}
|
||||
|
147
src/test/resources/local-spec/large-mem.wast
Normal file
147
src/test/resources/local-spec/large-mem.wast
Normal file
File diff suppressed because one or more lines are too long
Loading…
x
Reference in New Issue
Block a user