const std = @import("std"); const module = @import("../module.zig"); const binary = @import("../binary.zig"); pub const VReg = u32; pub const StackifyError = error{ TypeMismatch, StackUnderflow, UndefinedFunction, UndefinedLocal, UndefinedGlobal, UndefinedMemory, UndefinedTable, InvalidLabelDepth, ImmutableGlobal, InvalidTypeIndex, InvalidFunctionIndex, ElseWithoutIf, InvalidAlignment, UnsupportedOpcode, InvalidValueType, OutOfMemory, UnexpectedEof, }; pub const StackEffect = struct { pops: []const VReg = &.{}, pushes: []const VReg = &.{}, pub fn deinit(self: *StackEffect, allocator: std.mem.Allocator) void { allocator.free(self.pops); allocator.free(self.pushes); self.* = .{}; } }; pub const Immediate = union(enum) { none, u32: u32, u64: u64, i32: i32, i64: i64, f32: f32, f64: f64, two_u32: struct { a: u32, b: u32 }, br_table: []u32, mem: struct { @"align": u32, offset: u32 }, }; pub const AnnotatedInstr = struct { opcode: u8, imm: Immediate, effect: StackEffect, instr_idx: u32, result_type: ?module.ValType = null, pub fn deinit(self: *AnnotatedInstr, allocator: std.mem.Allocator) void { if (self.imm == .br_table) allocator.free(self.imm.br_table); self.effect.deinit(allocator); } }; pub fn deinitInstrs(allocator: std.mem.Allocator, instrs: []AnnotatedInstr) void { for (instrs) |*ins| ins.deinit(allocator); allocator.free(instrs); } const StackVal = struct { vreg: VReg, valtype: module.ValType, }; const Frame = struct { kind: Kind, start_height: usize, label_types: []const module.ValType, result_types: []const module.ValType, reachable: bool, const Kind = enum { block, loop, @"if", @"else" }; }; /// Walk bytecode, simulate typed operand stack, assign vRegs to each produced value. /// Returns a slice of AnnotatedInstr owned by the caller. Use `deinitInstrs` to free it. pub fn stackify( allocator: std.mem.Allocator, body: *const module.FunctionBody, func_type: *const module.FuncType, mod: *const module.Module, ) StackifyError![]AnnotatedInstr { var imported_globals: std.ArrayList(module.GlobalType) = .empty; defer imported_globals.deinit(allocator); var num_imported_funcs: u32 = 0; var total_tables: u32 = 0; var total_memories: u32 = 0; for (mod.imports) |imp| { switch (imp.desc) { .func => num_imported_funcs += 1, .table => total_tables += 1, .memory => total_memories += 1, .global => |gt| try imported_globals.append(allocator, gt), } } total_tables += @as(u32, @intCast(mod.tables.len)); total_memories += @as(u32, @intCast(mod.memories.len)); const total_funcs: u32 = num_imported_funcs + @as(u32, @intCast(mod.functions.len)); var local_types: std.ArrayList(module.ValType) = .empty; defer local_types.deinit(allocator); try local_types.appendSlice(allocator, func_type.params); for (body.locals) |decl| for (0..decl.count) |_| try local_types.append(allocator, decl.valtype); var instrs: std.ArrayList(AnnotatedInstr) = .empty; errdefer { for (instrs.items) |*ins| ins.deinit(allocator); instrs.deinit(allocator); } var stack: std.ArrayList(StackVal) = .empty; defer stack.deinit(allocator); var frames: std.ArrayList(Frame) = .empty; defer frames.deinit(allocator); try frames.append(allocator, .{ .kind = .block, .start_height = 0, .label_types = func_type.results, .result_types = func_type.results, .reachable = true, }); var tmp_pops: std.ArrayList(VReg) = .empty; defer tmp_pops.deinit(allocator); var tmp_pushes: std.ArrayList(VReg) = .empty; defer tmp_pushes.deinit(allocator); var next_vreg: VReg = 0; var pos: usize = 0; const code = body.code; var instr_idx: u32 = 0; while (pos < code.len) { const op = code[pos]; pos += 1; tmp_pops.clearRetainingCapacity(); tmp_pushes.clearRetainingCapacity(); var ann = AnnotatedInstr{ .opcode = op, .imm = .none, .effect = .{}, .instr_idx = instr_idx, .result_type = null, }; instr_idx += 1; const frame = &frames.items[frames.items.len - 1]; const reachable = frame.reachable; switch (op) { 0x00 => { // unreachable if (reachable) { stack.shrinkRetainingCapacity(frame.start_height); frame.reachable = false; } }, 0x01 => {}, 0x02 => { const bt = try readBlockType(code, &pos); const res = try blockTypeResults(mod, bt); try frames.append(allocator, .{ .kind = .block, .start_height = stack.items.len, .label_types = res, .result_types = res, .reachable = reachable, }); }, 0x03 => { const bt = try readBlockType(code, &pos); const params = try blockTypeParams(mod, bt); const res = try blockTypeResults(mod, bt); try frames.append(allocator, .{ .kind = .loop, .start_height = stack.items.len, .label_types = params, .result_types = res, .reachable = reachable, }); }, 0x04 => { const bt = try readBlockType(code, &pos); const res = try blockTypeResults(mod, bt); if (reachable) _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); try frames.append(allocator, .{ .kind = .@"if", .start_height = stack.items.len, .label_types = res, .result_types = res, .reachable = reachable, }); ann.imm = .{ .i32 = @intCast(bt) }; }, 0x05 => { const cur = &frames.items[frames.items.len - 1]; if (cur.kind != .@"if") return error.ElseWithoutIf; if (cur.reachable) try checkStackTypes(&stack, cur.start_height, cur.result_types); stack.shrinkRetainingCapacity(cur.start_height); cur.kind = .@"else"; cur.reachable = frames.items[frames.items.len - 2].reachable; }, 0x0B => { if (frames.items.len == 1) { if (frames.items[0].reachable) try checkStackTypes(&stack, 0, frames.items[0].result_types); if (!frames.items[0].reachable) { // If the function tail is polymorphic-unreachable, materialize typed results. try emitMergeResults(allocator, &stack, &tmp_pushes, frames.items[0].result_types, &next_vreg, &ann.result_type); } else if (frames.items[0].result_types.len == 1) { ann.result_type = frames.items[0].result_types[0]; } frame.reachable = true; pos = code.len; } else { const cur = frames.pop().?; if (cur.reachable) { try preserveBlockResults(allocator, &stack, cur.start_height, cur.result_types); if (cur.result_types.len == 1) ann.result_type = cur.result_types[0]; } else { stack.shrinkRetainingCapacity(cur.start_height); try emitMergeResults(allocator, &stack, &tmp_pushes, cur.result_types, &next_vreg, &ann.result_type); } } }, 0x0C => { const depth = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = depth }; if (depth >= frames.items.len) return error.InvalidLabelDepth; if (reachable) { const target = &frames.items[frames.items.len - 1 - depth]; try popLabelTypes(allocator, &stack, &tmp_pops, frame.start_height, target.label_types); } stack.shrinkRetainingCapacity(frame.start_height); frame.reachable = false; }, 0x0D => { const depth = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = depth }; if (depth >= frames.items.len) return error.InvalidLabelDepth; if (reachable) { _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); const target = &frames.items[frames.items.len - 1 - depth]; try checkLabelTypes(&stack, frame.start_height, target.label_types); } }, 0x0E => { const n = try readULEB128(u32, code, &pos); const entries = try allocator.alloc(u32, n + 1); errdefer allocator.free(entries); var label_types: ?[]const module.ValType = null; var i: u32 = 0; while (i <= n) : (i += 1) { const depth = try readULEB128(u32, code, &pos); entries[i] = depth; if (depth >= frames.items.len) return error.InvalidLabelDepth; const target = &frames.items[frames.items.len - 1 - depth]; if (label_types == null) { label_types = target.label_types; } else if (!sameValTypeSlice(label_types.?, target.label_types)) { return error.TypeMismatch; } } ann.imm = .{ .br_table = entries }; if (reachable) _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); stack.shrinkRetainingCapacity(frame.start_height); frame.reachable = false; }, 0x0F => { if (reachable) try popLabelTypes(allocator, &stack, &tmp_pops, frame.start_height, frames.items[0].result_types); stack.shrinkRetainingCapacity(frame.start_height); frame.reachable = false; }, 0x10 => { const fidx = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = fidx }; if (fidx >= total_funcs) return error.UndefinedFunction; const ft = try getFuncType(mod, fidx, num_imported_funcs); if (reachable) { try popTypesReverse(allocator, &stack, &tmp_pops, frame.start_height, ft.params); try pushResultTypes(allocator, &stack, &tmp_pushes, ft.results, &next_vreg, &ann.result_type); } }, 0x11 => { const type_idx = try readULEB128(u32, code, &pos); const table_idx = try readULEB128(u32, code, &pos); ann.imm = .{ .two_u32 = .{ .a = type_idx, .b = table_idx } }; if (type_idx >= mod.types.len) return error.InvalidTypeIndex; if (table_idx >= total_tables) return error.UndefinedTable; if (reachable) { _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); const ft = &mod.types[type_idx]; try popTypesReverse(allocator, &stack, &tmp_pops, frame.start_height, ft.params); try pushResultTypes(allocator, &stack, &tmp_pushes, ft.results, &next_vreg, &ann.result_type); } }, 0x1A => { if (reachable) _ = try popAnyVReg(allocator, &stack, &tmp_pops, frame.start_height); }, 0x1B => { if (reachable) { _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); const rhs = try popAnyVReg(allocator, &stack, &tmp_pops, frame.start_height); const lhs = try popAnyVReg(allocator, &stack, &tmp_pops, frame.start_height); if (lhs.valtype != rhs.valtype) return error.TypeMismatch; try pushExisting(allocator, &stack, &tmp_pushes, lhs.vreg, lhs.valtype); ann.result_type = lhs.valtype; } }, 0x1C => { const n = try readULEB128(u32, code, &pos); if (n != 1) return error.TypeMismatch; const t = try decodeValType(try readByte(code, &pos)); if (reachable) { _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, t); _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, t); try pushNew(allocator, &stack, &tmp_pushes, t, &next_vreg); ann.result_type = t; } }, 0x20 => { const idx = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = idx }; if (idx >= local_types.items.len) return error.UndefinedLocal; if (reachable) { const vt = local_types.items[idx]; try pushNew(allocator, &stack, &tmp_pushes, vt, &next_vreg); ann.result_type = vt; } }, 0x21 => { const idx = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = idx }; if (idx >= local_types.items.len) return error.UndefinedLocal; if (reachable) _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, local_types.items[idx]); }, 0x22 => { const idx = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = idx }; if (idx >= local_types.items.len) return error.UndefinedLocal; if (reachable) { const v = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, local_types.items[idx]); try pushExisting(allocator, &stack, &tmp_pushes, v.vreg, v.valtype); ann.result_type = v.valtype; } }, 0x23 => { const idx = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = idx }; const gt = try getGlobalType(mod, imported_globals.items, idx); if (reachable) { try pushNew(allocator, &stack, &tmp_pushes, gt.valtype, &next_vreg); ann.result_type = gt.valtype; } }, 0x24 => { const idx = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = idx }; const gt = try getGlobalType(mod, imported_globals.items, idx); if (!gt.mutable) return error.ImmutableGlobal; if (reachable) _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, gt.valtype); }, 0x28...0x35 => { const mem_align = try readULEB128(u32, code, &pos); const offset = try readULEB128(u32, code, &pos); ann.imm = .{ .mem = .{ .@"align" = mem_align, .offset = offset } }; if (total_memories == 0) return error.UndefinedMemory; if (mem_align > naturalAlignmentLog2ForLoad(op)) return error.InvalidAlignment; if (reachable) { _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); const rt = memLoadResultType(op); try pushNew(allocator, &stack, &tmp_pushes, rt, &next_vreg); ann.result_type = rt; } }, 0x36...0x3E => { const mem_align = try readULEB128(u32, code, &pos); const offset = try readULEB128(u32, code, &pos); ann.imm = .{ .mem = .{ .@"align" = mem_align, .offset = offset } }; if (total_memories == 0) return error.UndefinedMemory; if (mem_align > naturalAlignmentLog2ForStore(op)) return error.InvalidAlignment; if (reachable) { _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, memStoreValType(op)); _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); } }, 0x3F, 0x40 => { _ = try readByte(code, &pos); if (total_memories == 0) return error.UndefinedMemory; if (reachable) { if (op == 0x40) _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); try pushNew(allocator, &stack, &tmp_pushes, .i32, &next_vreg); ann.result_type = .i32; } }, 0x41 => { const val = try readSLEB128(i32, code, &pos); ann.imm = .{ .i32 = val }; if (reachable) { try pushNew(allocator, &stack, &tmp_pushes, .i32, &next_vreg); ann.result_type = .i32; } }, 0x42 => { const val = try readSLEB128(i64, code, &pos); ann.imm = .{ .i64 = val }; if (reachable) { try pushNew(allocator, &stack, &tmp_pushes, .i64, &next_vreg); ann.result_type = .i64; } }, 0x43 => { if (pos + 4 > code.len) return error.UnexpectedEof; const raw = std.mem.readInt(u32, code[pos..][0..4], .little); pos += 4; ann.imm = .{ .f32 = @bitCast(raw) }; if (reachable) { try pushNew(allocator, &stack, &tmp_pushes, .f32, &next_vreg); ann.result_type = .f32; } }, 0x44 => { if (pos + 8 > code.len) return error.UnexpectedEof; const raw = std.mem.readInt(u64, code[pos..][0..8], .little); pos += 8; ann.imm = .{ .f64 = @bitCast(raw) }; if (reachable) { try pushNew(allocator, &stack, &tmp_pushes, .f64, &next_vreg); ann.result_type = .f64; } }, 0x45 => if (reachable) try unaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i32, .i32, &next_vreg, &ann), 0x46...0x4F => if (reachable) try binaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i32, .i32, &next_vreg, &ann), 0x50 => if (reachable) try unaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i64, .i32, &next_vreg, &ann), 0x51...0x5A => if (reachable) try binaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i64, .i32, &next_vreg, &ann), 0x5B...0x60 => if (reachable) try binaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .f32, .i32, &next_vreg, &ann), 0x61...0x66 => if (reachable) try binaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .f64, .i32, &next_vreg, &ann), 0x67...0x69 => if (reachable) try unaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i32, .i32, &next_vreg, &ann), 0x6A...0x78 => if (reachable) try binaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i32, .i32, &next_vreg, &ann), 0x79...0x7B => if (reachable) try unaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i64, .i64, &next_vreg, &ann), 0x7C...0x8A => if (reachable) try binaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i64, .i64, &next_vreg, &ann), 0x8B...0x91 => if (reachable) try unaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .f32, .f32, &next_vreg, &ann), 0x92...0x98 => if (reachable) try binaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .f32, .f32, &next_vreg, &ann), 0x99...0x9F => if (reachable) try unaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .f64, .f64, &next_vreg, &ann), 0xA0...0xA6 => if (reachable) try binaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .f64, .f64, &next_vreg, &ann), 0xA7...0xBF => if (reachable) try conversionOp(allocator, op, &stack, &tmp_pops, &tmp_pushes, frame.start_height, &next_vreg, &ann), 0xC0, 0xC1 => if (reachable) try unaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i32, .i32, &next_vreg, &ann), 0xC2, 0xC3, 0xC4 => if (reachable) try unaryOp(allocator, &stack, &tmp_pops, &tmp_pushes, frame.start_height, .i64, .i64, &next_vreg, &ann), 0xFC => { const subop = try readULEB128(u32, code, &pos); switch (subop) { 0...7 => if (reachable) try truncSatOp(allocator, subop, &stack, &tmp_pops, &tmp_pushes, frame.start_height, &next_vreg, &ann), 8 => { const data_idx = try readULEB128(u32, code, &pos); const mem_idx = try readULEB128(u32, code, &pos); ann.imm = .{ .two_u32 = .{ .a = data_idx, .b = mem_idx } }; if (data_idx >= mod.datas.len) return error.TypeMismatch; if (total_memories == 0 or mem_idx != 0) return error.UndefinedMemory; if (reachable) { _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); } }, 9 => { const data_idx = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = data_idx }; if (data_idx >= mod.datas.len) return error.TypeMismatch; }, 10 => { const dst_mem = try readULEB128(u32, code, &pos); const src_mem = try readULEB128(u32, code, &pos); ann.imm = .{ .two_u32 = .{ .a = dst_mem, .b = src_mem } }; if (total_memories == 0 or dst_mem != 0 or src_mem != 0) return error.UndefinedMemory; if (reachable) { _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); } }, 11 => { const mem_idx = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = mem_idx }; if (total_memories == 0 or mem_idx != 0) return error.UndefinedMemory; if (reachable) { _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); _ = try popExpectVReg(allocator, &stack, &tmp_pops, frame.start_height, .i32); } }, 16 => { const table_idx = try readULEB128(u32, code, &pos); ann.imm = .{ .u32 = table_idx }; if (table_idx >= total_tables) return error.UndefinedTable; if (reachable) { try pushNew(allocator, &stack, &tmp_pushes, .i32, &next_vreg); ann.result_type = .i32; } }, else => return error.UnsupportedOpcode, } }, else => return error.UnsupportedOpcode, } ann.effect = .{ .pops = try allocator.dupe(VReg, tmp_pops.items), .pushes = try allocator.dupe(VReg, tmp_pushes.items), }; errdefer ann.deinit(allocator); try instrs.append(allocator, ann); } return instrs.toOwnedSlice(allocator); } fn pushNew( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pushes: *std.ArrayList(VReg), vt: module.ValType, next_vreg: *VReg, ) StackifyError!void { const vr = next_vreg.*; next_vreg.* += 1; try stack.append(allocator, .{ .vreg = vr, .valtype = vt }); try pushes.append(allocator, vr); } fn pushExisting( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pushes: *std.ArrayList(VReg), vr: VReg, vt: module.ValType, ) StackifyError!void { try stack.append(allocator, .{ .vreg = vr, .valtype = vt }); try pushes.append(allocator, vr); } fn popAnyVReg( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pops: *std.ArrayList(VReg), min_height: usize, ) StackifyError!StackVal { if (stack.items.len <= min_height) return error.StackUnderflow; const v = stack.pop().?; try pops.append(allocator, v.vreg); return v; } fn popExpectVReg( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pops: *std.ArrayList(VReg), min_height: usize, expected: module.ValType, ) StackifyError!StackVal { const v = try popAnyVReg(allocator, stack, pops, min_height); if (v.valtype != expected) return error.TypeMismatch; return v; } fn checkStackTypes(stack: *std.ArrayList(StackVal), base: usize, expected: []const module.ValType) StackifyError!void { if (stack.items.len < base + expected.len) return error.StackUnderflow; for (expected, 0..) |et, i| { if (stack.items[base + i].valtype != et) return error.TypeMismatch; } } fn checkLabelTypes(stack: *std.ArrayList(StackVal), base: usize, expected: []const module.ValType) StackifyError!void { if (stack.items.len < base + expected.len) return error.StackUnderflow; const start = stack.items.len - expected.len; if (start < base) return error.StackUnderflow; for (expected, 0..) |et, i| if (stack.items[start + i].valtype != et) return error.TypeMismatch; } fn popLabelTypes( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pops: *std.ArrayList(VReg), base: usize, label_types: []const module.ValType, ) StackifyError!void { var i: usize = label_types.len; while (i > 0) : (i -= 1) { _ = try popExpectVReg(allocator, stack, pops, base, label_types[i - 1]); } } fn popTypesReverse( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pops: *std.ArrayList(VReg), base: usize, types: []const module.ValType, ) StackifyError!void { var i: usize = types.len; while (i > 0) : (i -= 1) { _ = try popExpectVReg(allocator, stack, pops, base, types[i - 1]); } } fn pushResultTypes( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pushes: *std.ArrayList(VReg), results: []const module.ValType, next_vreg: *VReg, result_type: *?module.ValType, ) StackifyError!void { if (results.len > 1) return error.UnsupportedOpcode; for (results) |rt| { try pushNew(allocator, stack, pushes, rt, next_vreg); result_type.* = rt; } } fn emitMergeResults( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pushes: *std.ArrayList(VReg), results: []const module.ValType, next_vreg: *VReg, result_type: *?module.ValType, ) StackifyError!void { if (results.len > 1) return error.UnsupportedOpcode; for (results) |rt| { try pushNew(allocator, stack, pushes, rt, next_vreg); result_type.* = rt; } } fn preserveBlockResults( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), start_height: usize, results: []const module.ValType, ) StackifyError!void { if (results.len > 1) return error.UnsupportedOpcode; try checkStackTypes(stack, start_height, results); if (results.len == 0) { stack.shrinkRetainingCapacity(start_height); return; } const tail_start = stack.items.len - results.len; const saved = try allocator.dupe(StackVal, stack.items[tail_start..]); defer allocator.free(saved); stack.shrinkRetainingCapacity(start_height); try stack.appendSlice(allocator, saved); } fn unaryOp( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pops: *std.ArrayList(VReg), pushes: *std.ArrayList(VReg), base: usize, in_t: module.ValType, out_t: module.ValType, next_vreg: *VReg, ann: *AnnotatedInstr, ) StackifyError!void { _ = try popExpectVReg(allocator, stack, pops, base, in_t); try pushNew(allocator, stack, pushes, out_t, next_vreg); ann.result_type = out_t; } fn binaryOp( allocator: std.mem.Allocator, stack: *std.ArrayList(StackVal), pops: *std.ArrayList(VReg), pushes: *std.ArrayList(VReg), base: usize, in_t: module.ValType, out_t: module.ValType, next_vreg: *VReg, ann: *AnnotatedInstr, ) StackifyError!void { _ = try popExpectVReg(allocator, stack, pops, base, in_t); _ = try popExpectVReg(allocator, stack, pops, base, in_t); try pushNew(allocator, stack, pushes, out_t, next_vreg); ann.result_type = out_t; } fn conversionOp( allocator: std.mem.Allocator, op: u8, stack: *std.ArrayList(StackVal), pops: *std.ArrayList(VReg), pushes: *std.ArrayList(VReg), base: usize, next_vreg: *VReg, ann: *AnnotatedInstr, ) StackifyError!void { const in_t: module.ValType = switch (op) { 0xA7 => .i64, 0xA8, 0xA9 => .f32, 0xAA, 0xAB => .f64, 0xAC, 0xAD => .i32, 0xAE, 0xAF => .f32, 0xB0, 0xB1 => .f64, 0xB2, 0xB3 => .i32, 0xB4, 0xB5 => .i64, 0xB6 => .f64, 0xB7, 0xB8 => .i32, 0xB9, 0xBA => .i64, 0xBB => .f32, 0xBC => .f32, 0xBD => .f64, 0xBE => .i32, 0xBF => .i64, else => return error.UnsupportedOpcode, }; const out_t: module.ValType = convertResultType(op); _ = try popExpectVReg(allocator, stack, pops, base, in_t); try pushNew(allocator, stack, pushes, out_t, next_vreg); ann.result_type = out_t; } fn truncSatOp( allocator: std.mem.Allocator, subop: u32, stack: *std.ArrayList(StackVal), pops: *std.ArrayList(VReg), pushes: *std.ArrayList(VReg), base: usize, next_vreg: *VReg, ann: *AnnotatedInstr, ) StackifyError!void { const in_t: module.ValType = switch (subop) { 0, 1, 4, 5 => .f32, 2, 3, 6, 7 => .f64, else => return error.UnsupportedOpcode, }; const out_t: module.ValType = if (subop <= 3) .i32 else .i64; _ = try popExpectVReg(allocator, stack, pops, base, in_t); try pushNew(allocator, stack, pushes, out_t, next_vreg); ann.result_type = out_t; } fn convertResultType(op: u8) module.ValType { return switch (op) { 0xA7, 0xA8, 0xA9, 0xAA, 0xAB, 0xBC => .i32, 0xAC, 0xAD, 0xAE, 0xAF, 0xB0, 0xB1, 0xBD => .i64, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xBE => .f32, 0xB7, 0xB8, 0xB9, 0xBA, 0xBB, 0xBF => .f64, else => .i32, }; } fn readByte(code: []const u8, pos: *usize) StackifyError!u8 { if (pos.* >= code.len) return error.UnexpectedEof; const b = code[pos.*]; pos.* += 1; return b; } fn readULEB128(comptime T: type, code: []const u8, pos: *usize) StackifyError!T { return binary.readULEB128(T, code, pos) catch |e| switch (e) { error.UnexpectedEof => error.UnexpectedEof, else => error.TypeMismatch, }; } fn readSLEB128(comptime T: type, code: []const u8, pos: *usize) StackifyError!T { return binary.readSLEB128(T, code, pos) catch |e| switch (e) { error.UnexpectedEof => error.UnexpectedEof, else => error.TypeMismatch, }; } fn readBlockType(code: []const u8, pos: *usize) StackifyError!i33 { return readSLEB128(i33, code, pos); } fn decodeValType(b: u8) StackifyError!module.ValType { return switch (b) { 0x7F => .i32, 0x7E => .i64, 0x7D => .f32, 0x7C => .f64, else => error.InvalidValueType, }; } fn blockTypeResults(mod: *const module.Module, bt: i33) StackifyError![]const module.ValType { return switch (bt) { -1 => &[_]module.ValType{.i32}, -2 => &[_]module.ValType{.i64}, -3 => &[_]module.ValType{.f32}, -4 => &[_]module.ValType{.f64}, -64 => &.{}, else => if (bt >= 0) blk: { const idx: u32 = @intCast(bt); if (idx >= mod.types.len) return error.InvalidTypeIndex; break :blk mod.types[idx].results; } else error.InvalidTypeIndex, }; } fn blockTypeParams(mod: *const module.Module, bt: i33) StackifyError![]const module.ValType { if (bt < 0) return &.{}; const idx: u32 = @intCast(bt); if (idx >= mod.types.len) return error.InvalidTypeIndex; return mod.types[idx].params; } fn getFuncType(mod: *const module.Module, fidx: u32, num_imported: u32) StackifyError!*const module.FuncType { if (fidx < num_imported) { var count: u32 = 0; for (mod.imports) |imp| { if (imp.desc == .func) { if (count == fidx) return &mod.types[imp.desc.func]; count += 1; } } return error.InvalidFunctionIndex; } const local_idx = fidx - num_imported; if (local_idx >= mod.functions.len) return error.InvalidFunctionIndex; const type_idx = mod.functions[local_idx]; if (type_idx >= mod.types.len) return error.InvalidTypeIndex; return &mod.types[type_idx]; } fn getGlobalType(mod: *const module.Module, imported_globals: []const module.GlobalType, idx: u32) StackifyError!module.GlobalType { if (idx < imported_globals.len) return imported_globals[idx]; const local_idx = idx - @as(u32, @intCast(imported_globals.len)); if (local_idx >= mod.globals.len) return error.UndefinedGlobal; return mod.globals[local_idx].type; } fn memLoadResultType(op: u8) module.ValType { return switch (op) { 0x28, 0x2C, 0x2D, 0x2E, 0x2F => .i32, 0x29, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35 => .i64, 0x2A => .f32, 0x2B => .f64, else => .i32, }; } fn memStoreValType(op: u8) module.ValType { return switch (op) { 0x36, 0x3A, 0x3B => .i32, 0x37, 0x3C, 0x3D, 0x3E => .i64, 0x38 => .f32, 0x39 => .f64, else => .i32, }; } fn naturalAlignmentLog2ForLoad(op: u8) u32 { return switch (op) { 0x28 => 2, 0x29 => 3, 0x2A => 2, 0x2B => 3, 0x2C, 0x2D => 0, 0x2E, 0x2F => 1, 0x30, 0x31 => 0, 0x32, 0x33 => 1, 0x34, 0x35 => 2, else => 0, }; } fn naturalAlignmentLog2ForStore(op: u8) u32 { return switch (op) { 0x36 => 2, 0x37 => 3, 0x38 => 2, 0x39 => 3, 0x3A, 0x3C => 0, 0x3B, 0x3D => 1, 0x3E => 2, else => 0, }; } fn sameValTypeSlice(a: []const module.ValType, b: []const module.ValType) bool { if (a.len != b.len) return false; for (a, 0..) |vt, i| if (vt != b[i]) return false; return true; } // ── Tests ───────────────────────────────────────────────────────────────────── test "stackify straight-line function" { const code = [_]u8{ 0x41, 0x01, 0x41, 0x02, 0x6a, 0x0b }; const body = module.FunctionBody{ .locals = &.{}, .code = &code }; const ft = module.FuncType{ .params = &.{}, .results = &.{.i32} }; const mod = module.Module{ .types = &.{}, .imports = &.{}, .functions = &.{}, .tables = &.{}, .memories = &.{}, .globals = &.{}, .exports = &.{}, .start = null, .elements = &.{}, .codes = &.{}, .datas = &.{}, .allocator = std.testing.allocator, }; const ally = std.testing.allocator; const instrs = try stackify(ally, &body, &ft, &mod); defer deinitInstrs(ally, instrs); try std.testing.expectEqual(@as(usize, 4), instrs.len); const vr0 = instrs[0].effect.pushes[0]; const vr1 = instrs[1].effect.pushes[0]; try std.testing.expectEqual(@as(usize, 2), instrs[2].effect.pops.len); try std.testing.expectEqual(vr1, instrs[2].effect.pops[0]); try std.testing.expectEqual(vr0, instrs[2].effect.pops[1]); } test "stackify call uses function signature" { const code = [_]u8{ 0x41, 0x07, 0x10, 0x00, 0x0b }; const body = module.FunctionBody{ .locals = &.{}, .code = &code }; var callee_params = [_]module.ValType{.i32}; var callee_results = [_]module.ValType{.i64}; var types = [_]module.FuncType{.{ .params = &callee_params, .results = &callee_results }}; var funcs = [_]u32{0}; var codes = [_]module.FunctionBody{body}; const mod = module.Module{ .types = &types, .imports = &.{}, .functions = &funcs, .tables = &.{}, .memories = &.{}, .globals = &.{}, .exports = &.{}, .start = null, .elements = &.{}, .codes = &codes, .datas = &.{}, .allocator = std.testing.allocator, }; const ally = std.testing.allocator; const instrs = try stackify(ally, &body, &types[0], &mod); defer deinitInstrs(ally, instrs); try std.testing.expectEqual(@as(usize, 1), instrs[1].effect.pops.len); try std.testing.expectEqual(@as(usize, 1), instrs[1].effect.pushes.len); try std.testing.expectEqual(module.ValType.i64, instrs[1].result_type.?); }