Fixed macOS compilation and refactored mods/ir to use std.Io.Reader.
Signed-off-by: Lorenzo Torres <lorenzotorres@outlook.it>
This commit is contained in:
parent
c12e5ef485
commit
939cdb5f09
4 changed files with 189 additions and 216 deletions
|
|
@ -83,8 +83,8 @@ fn vulkan_init_instance(allocator: std.mem.Allocator, handle: *c.VkInstance) !vo
|
|||
_ = c.vkEnumerateInstanceLayerProperties(&avaliableLayersCount, availableLayers.ptr);
|
||||
|
||||
// Every layer we do have we add to this list, if we don't have it no worries just print a message and continue
|
||||
var newLayers = std.ArrayList([*c]const u8).init(allocator);
|
||||
defer newLayers.deinit();
|
||||
var newLayers = std.ArrayList([*c]const u8).empty;
|
||||
defer newLayers.deinit(allocator);
|
||||
// Loop over layers we want
|
||||
for (validation_layers) |want_layer| {
|
||||
var found = false;
|
||||
|
|
@ -99,7 +99,7 @@ fn vulkan_init_instance(allocator: std.mem.Allocator, handle: *c.VkInstance) !vo
|
|||
std.debug.print("WARNING: Compiled in debug mode, but wanted validation layer {s} not found.\n", .{want_layer});
|
||||
std.debug.print("NOTE: Validation layer will be removed from the wanted validation layers\n", .{});
|
||||
} else {
|
||||
try newLayers.append(want_layer);
|
||||
try newLayers.append(allocator, want_layer);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -28,7 +28,6 @@ pub const Error = error{
|
|||
Overflow,
|
||||
ReadFailed,
|
||||
EndOfStream,
|
||||
invalid_instruction,
|
||||
invalid_magic,
|
||||
invalid_version,
|
||||
invalid_section,
|
||||
|
|
@ -49,17 +48,16 @@ pub const Error = error{
|
|||
duplicated_tablesec,
|
||||
duplicated_elemsec,
|
||||
unresolved_branch,
|
||||
unterminated_wasm,
|
||||
};
|
||||
|
||||
pub fn init(allocator: Allocator, reader: std.fs.File.Reader) !Parser {
|
||||
pub fn init(allocator: Allocator, reader: *std.Io.Reader) !Parser {
|
||||
return .{
|
||||
.elems = &.{},
|
||||
.tables = &.{},
|
||||
.parsedData = &.{},
|
||||
.exported_memory = 0,
|
||||
.importCount = 0,
|
||||
.reader = @constCast(&reader.interface),
|
||||
.reader = reader,
|
||||
.allocator = allocator,
|
||||
.types = &.{},
|
||||
.functions = &.{},
|
||||
|
|
@ -106,57 +104,15 @@ fn warn(self: Parser, s: []const u8) void {
|
|||
std.debug.print("[WARN]: Parsing of {s} unimplemented at byte index {d}\n", .{ s, self.reader.seek });
|
||||
}
|
||||
|
||||
// TODO: remove peek?
|
||||
pub fn peek(self: Parser) ?u8 {
|
||||
return self.reader.peekByte() catch return null;
|
||||
}
|
||||
|
||||
fn read(self: *Parser, n: usize) ![]const u8 {
|
||||
_ = self.reader.peek(n) catch {
|
||||
return Error.unterminated_wasm;
|
||||
};
|
||||
return try self.reader.readAlloc(self.allocator, n);
|
||||
}
|
||||
|
||||
// ==========
|
||||
// = VALUES =
|
||||
// ==========
|
||||
|
||||
pub fn readByte(self: *Parser) !u8 {
|
||||
return (try self.read(1))[0];
|
||||
}
|
||||
|
||||
pub fn readU32(self: *Parser) !u32 {
|
||||
return std.leb.readUleb128(u32, self);
|
||||
}
|
||||
|
||||
pub fn readI32(self: *Parser) !i32 {
|
||||
return std.leb.readIleb128(i32, self);
|
||||
}
|
||||
|
||||
pub fn readI64(self: *Parser) !i64 {
|
||||
return std.leb.readIleb128(i64, self);
|
||||
}
|
||||
|
||||
pub fn readI33(self: *Parser) !i33 {
|
||||
return std.leb.readIleb128(i33, self);
|
||||
}
|
||||
|
||||
pub fn readF32(self: *Parser) !f32 {
|
||||
const bytes = try self.read(@sizeOf(f32));
|
||||
return std.mem.bytesAsValue(f32, bytes).*;
|
||||
}
|
||||
|
||||
pub fn readF64(self: *Parser) !f64 {
|
||||
const bytes = try self.read(@sizeOf(f64));
|
||||
return std.mem.bytesAsValue(f64, bytes).*;
|
||||
}
|
||||
|
||||
fn readName(self: *Parser) ![]const u8 {
|
||||
// NOTE: This should be the only vector not parsed through parseVector
|
||||
const size = try self.readU32();
|
||||
// NOTE: This should be the only vector not parsed through parseVector for efficiency
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const str = try self.allocator.alloc(u8, size);
|
||||
@memcpy(str, try self.read(size));
|
||||
try self.reader.readSliceAll(str);
|
||||
if (!std.unicode.utf8ValidateSlice(str)) return Error.invalid_string;
|
||||
return str;
|
||||
}
|
||||
|
|
@ -179,24 +135,16 @@ fn VectorFnResult(parse_fn: anytype) type {
|
|||
};
|
||||
}
|
||||
pub fn parseVector(self: *Parser, parse_fn: anytype) ![]VectorFnResult(parse_fn) {
|
||||
const n = try self.readU32();
|
||||
const n = try self.reader.takeLeb128(u32);
|
||||
const ret = try self.allocator.alloc(VectorFnResult(parse_fn), n);
|
||||
for (ret) |*i| {
|
||||
i.* = try parse_fn(self);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
pub fn parseVectorU32(self: *Parser) ![]u32 {
|
||||
const n = try self.readU32();
|
||||
const ret = try self.allocator.alloc(u32, n);
|
||||
for (ret) |*i| {
|
||||
i.* = try self.readU32();
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
fn parseNumtype(self: *Parser) !std.wasm.Valtype {
|
||||
return switch (try self.readByte()) {
|
||||
return switch (try self.reader.takeByte()) {
|
||||
0x7F => .i32,
|
||||
0x7E => .i64,
|
||||
0x7D => .f32,
|
||||
|
|
@ -206,14 +154,14 @@ fn parseNumtype(self: *Parser) !std.wasm.Valtype {
|
|||
}
|
||||
|
||||
fn parseVectype(self: *Parser) !std.wasm.Valtype {
|
||||
return switch (try self.readByte()) {
|
||||
return switch (try self.reader.takeByte()) {
|
||||
0x7B => .v128,
|
||||
else => Error.invalid_vectype,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parseReftype(self: *Parser) !std.wasm.RefType {
|
||||
return switch (try self.readByte()) {
|
||||
fn parseReftype(self: *Parser) !std.wasm.RefType {
|
||||
return switch (try self.reader.takeByte()) {
|
||||
0x70 => .funcref,
|
||||
0x6F => .externref,
|
||||
else => Error.invalid_reftype,
|
||||
|
|
@ -223,7 +171,7 @@ pub fn parseReftype(self: *Parser) !std.wasm.RefType {
|
|||
// NOTE: Parsing of Valtype can be improved but it makes it less close to spec so...
|
||||
// TODO: Do we really need Valtype?
|
||||
fn parseValtype(self: *Parser) !vm.Valtype {
|
||||
const pb = self.peek() orelse return Error.unterminated_wasm;
|
||||
const pb = try self.reader.peekByte();
|
||||
return switch (pb) {
|
||||
0x7F, 0x7E, 0x7D, 0x7C => .{ .val = try self.parseNumtype() },
|
||||
0x7B => .{ .val = try self.parseVectype() },
|
||||
|
|
@ -237,7 +185,7 @@ fn parseResultType(self: *Parser) ![]vm.Valtype {
|
|||
}
|
||||
|
||||
fn parseFunctype(self: *Parser) !vm.Functype {
|
||||
if (try self.readByte() != 0x60) return Error.invalid_functype;
|
||||
if (try self.reader.takeByte() != 0x60) return Error.invalid_functype;
|
||||
return .{
|
||||
.parameters = try self.parseResultType(),
|
||||
.returns = try self.parseResultType(),
|
||||
|
|
@ -250,14 +198,14 @@ const Limits = struct {
|
|||
};
|
||||
|
||||
fn parseLimits(self: *Parser) !Limits {
|
||||
return switch (try self.readByte()) {
|
||||
return switch (try self.reader.takeByte()) {
|
||||
0x00 => .{
|
||||
.min = try self.readU32(),
|
||||
.min = try self.reader.takeLeb128(u32),
|
||||
.max = null,
|
||||
},
|
||||
0x01 => .{
|
||||
.min = try self.readU32(),
|
||||
.max = try self.readU32(),
|
||||
.min = try self.reader.takeLeb128(u32),
|
||||
.max = try self.reader.takeLeb128(u32),
|
||||
},
|
||||
else => Error.invalid_limits,
|
||||
};
|
||||
|
|
@ -293,7 +241,7 @@ pub const Globaltype = struct {
|
|||
fn parseGlobaltype(self: *Parser) !Globaltype {
|
||||
return .{
|
||||
.t = try self.parseValtype(),
|
||||
.m = switch (try self.readByte()) {
|
||||
.m = switch (try self.reader.takeByte()) {
|
||||
0x00 => .@"const",
|
||||
0x01 => .@"var",
|
||||
else => return Error.invalid_globaltype,
|
||||
|
|
@ -307,11 +255,12 @@ fn parseGlobaltype(self: *Parser) !Globaltype {
|
|||
// NOTE: This should not return anything but modify IR
|
||||
|
||||
pub fn parseModule(self: *Parser) !void {
|
||||
if (!std.mem.eql(u8, try self.read(4), &.{ 0x00, 0x61, 0x73, 0x6d })) return Error.invalid_magic;
|
||||
if (!std.mem.eql(u8, try self.read(4), &.{ 0x01, 0x00, 0x00, 0x00 })) return Error.invalid_version;
|
||||
if (!std.mem.eql(u8, try self.reader.takeArray(4), &.{ 0x00, 0x61, 0x73, 0x6d })) return Error.invalid_magic;
|
||||
if (!std.mem.eql(u8, try self.reader.takeArray(4), &.{ 0x01, 0x00, 0x00, 0x00 })) return Error.invalid_version;
|
||||
|
||||
// TODO: Ensure only one section of each type (except for custom section), some code depends on it
|
||||
while (self.reader.seek < self.reader.end) {
|
||||
try switch (try self.readByte()) {
|
||||
while (self.reader.takeByte()) |b| {
|
||||
try switch (b) {
|
||||
0 => self.parseCustomsec(),
|
||||
1 => self.parseTypesec(),
|
||||
2 => self.parseImportsec(),
|
||||
|
|
@ -327,23 +276,23 @@ pub fn parseModule(self: *Parser) !void {
|
|||
12 => self.parseDatacountsec(),
|
||||
else => return Error.invalid_section,
|
||||
};
|
||||
}
|
||||
if (self.exports.init != null and self.exports.init.? != 0){
|
||||
} else |_| {}
|
||||
if (self.exports.init != null and self.exports.init.? != 0) {
|
||||
self.exports.init.? -= self.importCount;
|
||||
}
|
||||
if (self.exports.deinit != null and self.exports.deinit.? != 0){
|
||||
if (self.exports.deinit != null and self.exports.deinit.? != 0) {
|
||||
self.exports.deinit.? -= self.importCount;
|
||||
}
|
||||
}
|
||||
|
||||
fn parseCustomsec(self: *Parser) !void {
|
||||
self.warn("customsec");
|
||||
const size = try self.readU32();
|
||||
_ = try self.read(size);
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
try self.reader.discardAll(size);
|
||||
}
|
||||
|
||||
fn parseTypesec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const ft = try self.parseVector(Parser.parseFunctype);
|
||||
|
|
@ -368,8 +317,8 @@ fn parseImport(self: *Parser) !Import {
|
|||
return .{
|
||||
.module = try self.readName(),
|
||||
.name = try self.readName(),
|
||||
.importdesc = switch (try self.readByte()) {
|
||||
0x00 => .{ .func = try self.readU32() },
|
||||
.importdesc = switch (try self.reader.takeByte()) {
|
||||
0x00 => .{ .func = try self.reader.takeLeb128(u32) },
|
||||
0x01 => .{ .table = try self.parseTabletype() },
|
||||
0x02 => .{ .mem = try self.parseMemtype() },
|
||||
0x03 => .{ .global = try self.parseGlobaltype() },
|
||||
|
|
@ -379,7 +328,7 @@ fn parseImport(self: *Parser) !Import {
|
|||
}
|
||||
|
||||
fn parseImportsec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const imports = try self.parseVector(Parser.parseImport);
|
||||
|
|
@ -428,10 +377,15 @@ fn parseImportsec(self: *Parser) !void {
|
|||
}
|
||||
|
||||
fn parseFuncsec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const types = try self.parseVector(Parser.readU32);
|
||||
// TODO(ernesto): ugly as fuck
|
||||
const types = try self.parseVector(struct {
|
||||
fn fun(parser: *Parser) !u32 {
|
||||
return parser.reader.takeLeb128(u32);
|
||||
}
|
||||
}.fun);
|
||||
defer self.allocator.free(types);
|
||||
|
||||
if (self.functions.len != self.importCount) return Error.duplicated_funcsec;
|
||||
|
|
@ -458,13 +412,11 @@ pub const Table = struct {
|
|||
};
|
||||
|
||||
fn parseTable(self: *Parser) !Table {
|
||||
return .{
|
||||
.t = try self.parseTabletype()
|
||||
};
|
||||
return .{ .t = try self.parseTabletype() };
|
||||
}
|
||||
|
||||
fn parseTablesec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const tables = try self.parseVector(Parser.parseTable);
|
||||
|
|
@ -476,12 +428,12 @@ fn parseTablesec(self: *Parser) !void {
|
|||
for (tables, 0..) |t, i| {
|
||||
self.tables[i] = t.t;
|
||||
}
|
||||
|
||||
|
||||
std.debug.assert(self.reader.seek == end_idx);
|
||||
}
|
||||
|
||||
fn parseMemsec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const mems = try self.parseVector(Parser.parseMemtype);
|
||||
|
|
@ -515,7 +467,7 @@ fn parseGlobal(self: *Parser) !Global {
|
|||
}
|
||||
|
||||
fn parseGlobalsec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const globals = try self.parseVector(Parser.parseGlobal);
|
||||
|
|
@ -526,7 +478,7 @@ fn parseGlobalsec(self: *Parser) !void {
|
|||
self.globalValues = try self.allocator.alloc(vm.Value, globals.len);
|
||||
self.globalTypes = try self.allocator.alloc(Globaltype, globals.len);
|
||||
|
||||
for(globals, 0..) |global, i| {
|
||||
for (globals, 0..) |global, i| {
|
||||
self.globalValues[i] = try vm.handleGlobalInit(self.allocator, global.ir);
|
||||
self.globalTypes[i] = global.t;
|
||||
}
|
||||
|
|
@ -545,18 +497,18 @@ pub const Export = struct {
|
|||
fn parseExport(self: *Parser) !Export {
|
||||
return .{
|
||||
.name = try self.readName(),
|
||||
.exportdesc = switch (try self.readByte()) {
|
||||
0x00 => .{ .func = try self.readU32() },
|
||||
0x01 => .{ .table = try self.readU32() },
|
||||
0x02 => .{ .mem = try self.readU32() },
|
||||
0x03 => .{ .global = try self.readU32() },
|
||||
.exportdesc = switch (try self.reader.takeByte()) {
|
||||
0x00 => .{ .func = try self.reader.takeLeb128(u32) },
|
||||
0x01 => .{ .table = try self.reader.takeLeb128(u32) },
|
||||
0x02 => .{ .mem = try self.reader.takeLeb128(u32) },
|
||||
0x03 => .{ .global = try self.reader.takeLeb128(u32) },
|
||||
else => return Error.invalid_exportdesc,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn parseExportsec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const exports = try self.parseVector(Parser.parseExport);
|
||||
|
|
@ -588,8 +540,8 @@ fn parseExportsec(self: *Parser) !void {
|
|||
|
||||
fn parseStartsec(self: *Parser) !void {
|
||||
self.warn("startsec");
|
||||
const size = try self.readU32();
|
||||
_ = try self.read(size);
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
try self.reader.discardAll(size);
|
||||
}
|
||||
|
||||
const Elemmode = union(enum) {
|
||||
|
|
@ -607,22 +559,20 @@ pub const Elem = struct {
|
|||
};
|
||||
|
||||
fn parseElem(self: *Parser) !Elem {
|
||||
const b: u32 = try self.readU32();
|
||||
switch (b){
|
||||
const b: u32 = try self.reader.takeLeb128(u32);
|
||||
switch (b) {
|
||||
0 => {
|
||||
// if (try self.parseReftype() != std.wasm.RefType.funcref){
|
||||
// std.debug.panic("Active function index element table was not a function reference\n", .{});
|
||||
// }
|
||||
const elemMode: Elemmode = .{
|
||||
.Active = .{
|
||||
.tableidx = 0,
|
||||
.offset = try vm.handleGlobalInit(self.allocator, try IR.parseGlobalExpr(self)),
|
||||
}
|
||||
};
|
||||
const n = try self.readU32();
|
||||
const elemMode: Elemmode = .{ .Active = .{
|
||||
.tableidx = 0,
|
||||
.offset = try vm.handleGlobalInit(self.allocator, try IR.parseGlobalExpr(self)),
|
||||
} };
|
||||
const n = try self.reader.takeLeb128(u32);
|
||||
const indices: []u32 = try self.allocator.alloc(u32, n);
|
||||
for (0..n) |i| {
|
||||
indices[i] = try self.readU32();
|
||||
indices[i] = try self.reader.takeLeb128(u32);
|
||||
}
|
||||
return .{
|
||||
.indices = indices,
|
||||
|
|
@ -631,12 +581,12 @@ fn parseElem(self: *Parser) !Elem {
|
|||
},
|
||||
else => {
|
||||
std.debug.panic("TODO: Handle elem type {any}\n", .{b});
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn parseElemsec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const elems = try self.parseVector(Parser.parseElem);
|
||||
|
|
@ -645,7 +595,7 @@ fn parseElemsec(self: *Parser) !void {
|
|||
self.elems = try self.allocator.alloc([]u32, elems.len);
|
||||
|
||||
for (elems) |elem| {
|
||||
if (elem.elemMode != Elemmode.Active){
|
||||
if (elem.elemMode != Elemmode.Active) {
|
||||
std.debug.panic("No support for non active elements\n", .{});
|
||||
}
|
||||
const tab = self.tables[elem.elemMode.Active.tableidx];
|
||||
|
|
@ -669,13 +619,13 @@ const Local = struct {
|
|||
};
|
||||
fn parseLocal(self: *Parser) !Local {
|
||||
return .{
|
||||
.n = try self.readU32(),
|
||||
.n = try self.reader.takeLeb128(u32),
|
||||
.t = try self.parseValtype(),
|
||||
};
|
||||
}
|
||||
|
||||
fn parseCode(self: *Parser) !Func {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const locals = try self.parseVector(Parser.parseLocal);
|
||||
|
|
@ -707,7 +657,7 @@ fn parseCode(self: *Parser) !Func {
|
|||
}
|
||||
|
||||
fn parseCodesec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
|
||||
const codes = try self.parseVector(Parser.parseCode);
|
||||
|
|
@ -732,34 +682,39 @@ pub const Data = struct {
|
|||
};
|
||||
|
||||
fn parseData(self: *Parser) !Data {
|
||||
const b: u32 = try self.readU32();
|
||||
const b: u32 = try self.reader.takeLeb128(u32);
|
||||
switch (b) {
|
||||
0 => {
|
||||
// TODO(ernesto): ugly (Zig, we need lambdas asap
|
||||
return .{
|
||||
.offsetVal = try vm.handleGlobalInit(self.allocator, try IR.parseGlobalExpr(self)),
|
||||
.data = try self.parseVector(readByte),
|
||||
.data = try self.parseVector(struct {
|
||||
fn fun(p: *Parser) !u8 {
|
||||
return p.reader.takeByte();
|
||||
}
|
||||
}.fun),
|
||||
};
|
||||
},
|
||||
else => {
|
||||
std.debug.panic("TODO: Handle data type {any}\n", .{b});
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn parseDatasec(self: *Parser) !void {
|
||||
const size = try self.readU32();
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
const end_idx = self.reader.seek + size;
|
||||
const datas = try self.parseVector(Parser.parseData);
|
||||
defer self.allocator.free(datas);
|
||||
for (datas) |data| {
|
||||
self.parsedData = try self.allocator.realloc(self.parsedData, @as(usize, @intCast(data.offsetVal.i32)) + data.data.len);
|
||||
@memcpy(self.parsedData[@as(usize, @intCast(data.offsetVal.i32))..@as(usize, @intCast(data.offsetVal.i32))+data.data.len], data.data);
|
||||
@memcpy(self.parsedData[@as(usize, @intCast(data.offsetVal.i32)) .. @as(usize, @intCast(data.offsetVal.i32)) + data.data.len], data.data);
|
||||
}
|
||||
std.debug.assert(self.reader.seek == end_idx);
|
||||
}
|
||||
|
||||
fn parseDatacountsec(self: *Parser) !void {
|
||||
self.warn("datacountsec");
|
||||
const size = try self.readU32();
|
||||
_ = try self.read(size);
|
||||
const size = try self.reader.takeLeb128(u32);
|
||||
try self.reader.discardAll(size);
|
||||
}
|
||||
|
|
|
|||
159
src/mods/ir.zig
159
src/mods/ir.zig
|
|
@ -6,6 +6,12 @@ const Allocator = std.mem.Allocator;
|
|||
|
||||
const IR = @This();
|
||||
|
||||
const Error = error{
|
||||
invalid_instruction,
|
||||
invalid_reftype,
|
||||
double_else,
|
||||
};
|
||||
|
||||
const VectorIndex = packed struct {
|
||||
opcode: VectorOpcode,
|
||||
laneidx: u8,
|
||||
|
|
@ -60,6 +66,8 @@ pub fn print(self: IR, writer: anytype) !void {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO(ernesto): This file is unreadable, we should avoid "0xXX" and use names
|
||||
|
||||
/// Opcodes.
|
||||
/// This is a mix of wasm opcodes mixed with a few of our own.
|
||||
/// Mainly for `0xFC` opcodes we use `0xD3` to `0xE4`.
|
||||
|
|
@ -602,11 +610,11 @@ const VectorOpcode = enum(u8) {
|
|||
};
|
||||
|
||||
const IRParserState = struct {
|
||||
parser: *Parser,
|
||||
reader: *std.Io.Reader,
|
||||
allocator: Allocator,
|
||||
|
||||
// branches: std.AutoHashMapUnmanaged(u32, u32),
|
||||
branches: std.ArrayListUnmanaged( struct { pc: u32, index: u32, table: bool } ),
|
||||
branches: std.ArrayListUnmanaged(struct { pc: u32, index: u32, table: bool }),
|
||||
br_table_vectors: std.ArrayListUnmanaged(u32),
|
||||
|
||||
opcodes: std.ArrayListUnmanaged(Opcode),
|
||||
|
|
@ -614,9 +622,9 @@ const IRParserState = struct {
|
|||
|
||||
fn parseFunction(self: *IRParserState) !void {
|
||||
while (true) {
|
||||
const op = self.parser.peek() orelse return Parser.Error.unterminated_wasm;
|
||||
const op = try self.reader.peekByte();
|
||||
if (op == 0x0B) {
|
||||
_ = try self.parser.readByte();
|
||||
self.reader.toss(1);
|
||||
break;
|
||||
} else {
|
||||
try self.parseExpression();
|
||||
|
|
@ -624,8 +632,9 @@ const IRParserState = struct {
|
|||
}
|
||||
}
|
||||
|
||||
fn parseExpression(self: *IRParserState) Parser.Error!void {
|
||||
const b = try self.parser.readByte();
|
||||
const PossibleError = std.Io.Reader.TakeLeb128Error || std.mem.Allocator.Error || IR.Error;
|
||||
fn parseExpression(self: *IRParserState) PossibleError!void {
|
||||
const b = try self.reader.takeByte();
|
||||
try switch (b) {
|
||||
0x00 => self.push(@enumFromInt(b), .{ .u64 = 0 }),
|
||||
0x01 => self.push(@enumFromInt(b), .{ .u64 = 0 }),
|
||||
|
|
@ -634,14 +643,14 @@ const IRParserState = struct {
|
|||
0x0C...0x0D => self.parseBranch(b),
|
||||
0x0E => self.parseBrTable(b),
|
||||
0x0F => self.push(@enumFromInt(b), .{ .u64 = 0 }),
|
||||
0x10 => self.push(@enumFromInt(b), .{ .u32 = try self.parser.readU32() }),
|
||||
0x11 => self.push(@enumFromInt(b), .{ .indirect = .{ .y = try self.parser.readU32(), .x = try self.parser.readU32() } }),
|
||||
0xD0 => self.push(@enumFromInt(b), .{ .reftype = try self.parser.parseReftype() }),
|
||||
0x10 => self.push(@enumFromInt(b), .{ .u32 = try self.reader.takeLeb128(u32) }),
|
||||
0x11 => self.push(@enumFromInt(b), .{ .indirect = .{ .y = try self.reader.takeLeb128(u32), .x = try self.reader.takeLeb128(u32) } }),
|
||||
0xD0 => self.push(@enumFromInt(b), .{ .reftype = try self.parseReftype() }),
|
||||
0xD1 => self.push(@enumFromInt(b), .{ .u64 = 0 }),
|
||||
0xD2 => self.push(@enumFromInt(b), .{ .u32 = try self.parser.readU32() }),
|
||||
0xD2 => self.push(@enumFromInt(b), .{ .u32 = try self.reader.takeLeb128(u32) }),
|
||||
0x1A...0x1C => self.parseParametric(b),
|
||||
0x20...0x24 => self.push(@enumFromInt(b), .{ .u32 = try self.parser.readU32() }),
|
||||
0x25...0x26 => self.push(@enumFromInt(b), .{ .u32 = try self.parser.readU32() }),
|
||||
0x20...0x24 => self.push(@enumFromInt(b), .{ .u32 = try self.reader.takeLeb128(u32) }),
|
||||
0x25...0x26 => self.push(@enumFromInt(b), .{ .u32 = try self.reader.takeLeb128(u32) }),
|
||||
0x28...0x3E => self.push(@enumFromInt(b), .{ .memarg = try self.parseMemarg() }),
|
||||
0x3F...0x40 => self.parseMemsizeorgrow(b),
|
||||
0x41...0x44 => self.parseConst(b),
|
||||
|
|
@ -649,8 +658,8 @@ const IRParserState = struct {
|
|||
0xFD => self.parseVector(),
|
||||
0xFC => self.parseMisc(),
|
||||
else => {
|
||||
std.log.err("Invalid instruction {x} at position {d}\n", .{ b, self.parser.reader.seek });
|
||||
return Parser.Error.invalid_instruction;
|
||||
std.log.err("Invalid instruction {x} at position {d}\n", .{ b, self.reader.seek });
|
||||
return IR.Error.invalid_instruction;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
|
@ -660,55 +669,70 @@ const IRParserState = struct {
|
|||
try self.indices.append(self.allocator, index);
|
||||
}
|
||||
|
||||
fn parseReftype(self: *IRParserState) !std.wasm.RefType {
|
||||
return switch (try self.reader.takeByte()) {
|
||||
0x70 => .funcref,
|
||||
0x6F => .externref,
|
||||
else => Error.invalid_reftype,
|
||||
};
|
||||
}
|
||||
|
||||
fn parseMemarg(self: *IRParserState) !Memarg {
|
||||
return .{
|
||||
// TODO: assert this intCast does not fail
|
||||
.alignment = @intCast(try self.parser.readU32()),
|
||||
.offset = try self.parser.readU32(),
|
||||
.alignment = @intCast(try self.reader.takeLeb128(u32)),
|
||||
.offset = try self.reader.takeLeb128(u32),
|
||||
};
|
||||
}
|
||||
|
||||
fn parseMemsizeorgrow(self: *IRParserState, b: u8) !void {
|
||||
if (try self.parser.readByte() != 0x00) return Parser.Error.invalid_instruction;
|
||||
if (try self.reader.takeByte() != 0x00) return IR.Error.invalid_instruction;
|
||||
try self.push(@enumFromInt(b), .{ .u64 = 0 });
|
||||
}
|
||||
|
||||
fn parseConst(self: *IRParserState, b: u8) !void {
|
||||
try switch (b) {
|
||||
0x41 => self.push(.i32_const, .{ .i32 = try self.parser.readI32() }),
|
||||
0x42 => self.push(.i64_const, .{ .i64 = try self.parser.readI64() }),
|
||||
0x43 => self.push(.f32_const, .{ .f32 = try self.parser.readF32() }),
|
||||
0x44 => self.push(.f64_const, .{ .f64 = try self.parser.readF64() }),
|
||||
0x41 => self.push(.i32_const, .{ .i32 = try self.reader.takeLeb128(i32) }),
|
||||
0x42 => self.push(.i64_const, .{ .i64 = try self.reader.takeLeb128(i64) }),
|
||||
0x43 => self.push(.f32_const, .{ .f32 = val: {
|
||||
const bytes = try self.reader.takeArray(4);
|
||||
break :val std.mem.bytesAsValue(f32, bytes).*;
|
||||
} }),
|
||||
0x44 => self.push(.f64_const, .{ .f64 = val: {
|
||||
const bytes = try self.reader.takeArray(8);
|
||||
break :val std.mem.bytesAsValue(f64, bytes).*;
|
||||
} }),
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
fn parseMisc(self: *IRParserState) !void {
|
||||
const n = try self.parser.readU32();
|
||||
const n = try self.reader.takeLeb128(u32);
|
||||
try switch (n) {
|
||||
0...7 => self.push(@enumFromInt(0xD3 + @as(u8, @intCast(n))), .{ .u64 = 0 }),
|
||||
8...9 => @panic("UNIMPLEMENTED"),
|
||||
10...11 => {
|
||||
try self.push(@enumFromInt(0xD3 + @as(u8, @intCast(n))), .{ .u64 = 0 });
|
||||
_ = try self.parser.readByte();
|
||||
// TODO(ernesto): This is sus
|
||||
try self.reader.discardAll(1);
|
||||
if (n == 10) {
|
||||
_ = try self.parser.readByte();
|
||||
try self.reader.discardAll(1);
|
||||
}
|
||||
},
|
||||
12...17 => @panic("UNIMPLEMENTED"),
|
||||
else => {
|
||||
std.log.err("Invalid misc instruction {d} at position {d}\n", .{ n, self.parser.reader.seek });
|
||||
return Parser.Error.invalid_instruction;
|
||||
std.log.err("Invalid misc instruction {d} at position {d}\n", .{ n, self.reader.seek });
|
||||
return IR.Error.invalid_instruction;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn parseBlockType(self: *IRParserState) !void {
|
||||
const b = self.parser.peek() orelse return Parser.Error.unterminated_wasm;
|
||||
const b = try self.reader.peekByte();
|
||||
switch (b) {
|
||||
0x40 => _ = try self.parser.readByte(),
|
||||
0x6F...0x70, 0x7B...0x7F => _ = try self.parser.readByte(),
|
||||
else => _ = try self.parser.readI33(),
|
||||
0x40 => self.reader.toss(1),
|
||||
0x6F...0x70, 0x7B...0x7F => self.reader.toss(1),
|
||||
else => _ = try self.reader.takeLeb128(i33),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -717,9 +741,9 @@ const IRParserState = struct {
|
|||
_ = try self.parseBlockType();
|
||||
const start: u32 = @intCast(self.opcodes.items.len);
|
||||
while (true) {
|
||||
const op = self.parser.peek() orelse return Parser.Error.unterminated_wasm;
|
||||
const op = try self.reader.peekByte();
|
||||
if (op == 0x0B) {
|
||||
_ = try self.parser.readByte();
|
||||
self.reader.toss(1);
|
||||
break;
|
||||
} else {
|
||||
try self.parseExpression();
|
||||
|
|
@ -736,9 +760,9 @@ const IRParserState = struct {
|
|||
|
||||
fn parseGlobal(self: *IRParserState) !void {
|
||||
while (true) {
|
||||
const op = self.parser.peek() orelse return Parser.Error.unterminated_wasm;
|
||||
const op = try self.reader.peekByte();
|
||||
if (op == 0x0B) {
|
||||
_ = try self.parser.readByte();
|
||||
self.reader.toss(1);
|
||||
break;
|
||||
} else {
|
||||
try self.parseExpression();
|
||||
|
|
@ -756,15 +780,14 @@ const IRParserState = struct {
|
|||
|
||||
var else_addr: u32 = 0;
|
||||
while (true) {
|
||||
const op = self.parser.peek() orelse return Parser.Error.unterminated_wasm;
|
||||
|
||||
const op = try self.reader.peekByte();
|
||||
if (op == 0x05) {
|
||||
if (else_addr != 0) return Parser.Error.double_else;
|
||||
_ = try self.parser.readByte();
|
||||
if (else_addr != 0) return IR.Error.double_else;
|
||||
self.reader.toss(1);
|
||||
else_addr = @intCast(self.opcodes.items.len);
|
||||
try self.push(.br, .{ .u32 = 0 });
|
||||
} else if (op == 0x0B) {
|
||||
_ = try self.parser.readByte();
|
||||
self.reader.toss(1);
|
||||
break;
|
||||
} else {
|
||||
try self.parseExpression();
|
||||
|
|
@ -786,7 +809,7 @@ const IRParserState = struct {
|
|||
try switch (b) {
|
||||
0x1A...0x1B => self.push(@enumFromInt(b), .{ .u64 = 0 }),
|
||||
0x1C => @panic("UNIMPLEMENTED"),
|
||||
else => return Parser.Error.invalid_instruction,
|
||||
else => return IR.Error.invalid_instruction,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -815,38 +838,47 @@ const IRParserState = struct {
|
|||
}
|
||||
|
||||
fn parseBranch(self: *IRParserState, b: u8) !void {
|
||||
const idx = try self.parser.readU32();
|
||||
const idx = try self.reader.takeLeb128(u32);
|
||||
try self.branches.append(self.allocator, .{ .pc = @intCast(self.opcodes.items.len), .index = @intCast(self.indices.items.len), .table = false });
|
||||
try self.push(@enumFromInt(b), .{ .u32 = idx });
|
||||
}
|
||||
|
||||
fn parseVectorU32(self: *IRParserState) ![]u32 {
|
||||
const n = try self.reader.takeLeb128(u32);
|
||||
const vec = try self.allocator.alloc(u32, n);
|
||||
for (vec) |*i| {
|
||||
i.* = try self.reader.takeLeb128(u32);
|
||||
}
|
||||
return vec;
|
||||
}
|
||||
|
||||
fn parseBrTable(self: *IRParserState, b: u8) !void {
|
||||
const idxs = try self.parser.parseVectorU32();
|
||||
const idxN = try self.parser.readU32();
|
||||
const idxs = try self.parseVectorU32();
|
||||
const idxN = try self.reader.takeLeb128(u32);
|
||||
const table_vectors_len = self.br_table_vectors.items.len;
|
||||
try self.br_table_vectors.appendSlice(self.allocator, idxs);
|
||||
try self.br_table_vectors.append(self.allocator, idxN);
|
||||
for (0..idxs.len+1) |i| {
|
||||
for (0..idxs.len + 1) |i| {
|
||||
try self.branches.append(self.allocator, .{ .pc = @intCast(self.opcodes.items.len), .index = @intCast(table_vectors_len + i), .table = true });
|
||||
}
|
||||
try self.push(@enumFromInt(b), .{ .indirect = .{ .x = @intCast(table_vectors_len), .y = @intCast(idxs.len) }});
|
||||
try self.push(@enumFromInt(b), .{ .indirect = .{ .x = @intCast(table_vectors_len), .y = @intCast(idxs.len) } });
|
||||
}
|
||||
|
||||
fn parseVector(self: *IRParserState) !void {
|
||||
const n = try self.parser.readU32();
|
||||
const n = try self.reader.takeLeb128(u32);
|
||||
try switch (n) {
|
||||
0...10, 92...93, 11 => self.push(.vecinst, .{ .vector = .{ .opcode = @enumFromInt(n), .memarg = try self.parseMemarg(), .laneidx = 0 } }),
|
||||
84...91 => self.push(.vecinst, .{ .vector = .{ .opcode = @enumFromInt(n), .memarg = try self.parseMemarg(), .laneidx = try self.parser.readByte() } }),
|
||||
84...91 => self.push(.vecinst, .{ .vector = .{ .opcode = @enumFromInt(n), .memarg = try self.parseMemarg(), .laneidx = try self.reader.takeByte() } }),
|
||||
12 => {},
|
||||
13 => {},
|
||||
21...34 => self.push(.vecinst, .{ .vector = .{ .opcode = @enumFromInt(n), .memarg = .{ .alignment = 0, .offset = 0 }, .laneidx = try self.parser.readByte() } }),
|
||||
21...34 => self.push(.vecinst, .{ .vector = .{ .opcode = @enumFromInt(n), .memarg = .{ .alignment = 0, .offset = 0 }, .laneidx = try self.reader.takeByte() } }),
|
||||
// Yes, there are this random gaps in wasm vector instructions don't ask me how I know...
|
||||
14...20, 35...83, 94...153, 155...161, 163...164, 167...174, 177, 181...186, 188...193, 195...196, 199...206, 209, 213...225, 227...237, 239...255 => {
|
||||
try self.push(.vecinst, .{ .vector = .{ .opcode = @enumFromInt(n), .memarg = .{ .alignment = 0, .offset = 0 }, .laneidx = 0 } });
|
||||
},
|
||||
else => {
|
||||
std.log.err("Invalid vector instruction {d} at position {d}\n", .{ n, self.parser.reader.seek });
|
||||
return Parser.Error.invalid_instruction;
|
||||
std.log.err("Invalid vector instruction {d} at position {d}\n", .{ n, self.reader.seek });
|
||||
return IR.Error.invalid_instruction;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
|
@ -858,17 +890,12 @@ pub fn parse(parser: *Parser) !IR {
|
|||
.opcodes = .{},
|
||||
.indices = .{},
|
||||
.branches = .{},
|
||||
.parser = parser,
|
||||
.reader = parser.reader,
|
||||
.allocator = parser.allocator,
|
||||
};
|
||||
try state.parseFunction();
|
||||
if (state.branches.items.len != 0) return Parser.Error.unresolved_branch;
|
||||
return .{
|
||||
.opcodes = try state.opcodes.toOwnedSlice(state.allocator),
|
||||
.indices = try state.indices.toOwnedSlice(state.allocator),
|
||||
.select_valtypes = &.{},
|
||||
.br_table_vectors = state.br_table_vectors.items
|
||||
};
|
||||
return .{ .opcodes = try state.opcodes.toOwnedSlice(state.allocator), .indices = try state.indices.toOwnedSlice(state.allocator), .select_valtypes = &.{}, .br_table_vectors = state.br_table_vectors.items };
|
||||
}
|
||||
|
||||
pub fn parseGlobalExpr(parser: *Parser) !IR {
|
||||
|
|
@ -877,16 +904,11 @@ pub fn parseGlobalExpr(parser: *Parser) !IR {
|
|||
.opcodes = .{},
|
||||
.indices = .{},
|
||||
.branches = .{},
|
||||
.parser = parser,
|
||||
.reader = parser.reader,
|
||||
.allocator = parser.allocator,
|
||||
};
|
||||
try state.parseGlobal();
|
||||
return .{
|
||||
.opcodes = try state.opcodes.toOwnedSlice(state.allocator),
|
||||
.indices = try state.indices.toOwnedSlice(state.allocator),
|
||||
.select_valtypes = &.{},
|
||||
.br_table_vectors = state.br_table_vectors.items
|
||||
};
|
||||
return .{ .opcodes = try state.opcodes.toOwnedSlice(state.allocator), .indices = try state.indices.toOwnedSlice(state.allocator), .select_valtypes = &.{}, .br_table_vectors = state.br_table_vectors.items };
|
||||
}
|
||||
|
||||
pub fn parseSingleExpr(parser: *Parser) !IR {
|
||||
|
|
@ -895,14 +917,9 @@ pub fn parseSingleExpr(parser: *Parser) !IR {
|
|||
.opcodes = .{},
|
||||
.indices = .{},
|
||||
.branches = .{},
|
||||
.parser = parser,
|
||||
.reader = parser.reader,
|
||||
.allocator = parser.allocator,
|
||||
};
|
||||
try state.parseExpression();
|
||||
return .{
|
||||
.opcodes = try state.opcodes.toOwnedSlice(state.allocator),
|
||||
.indices = try state.indices.toOwnedSlice(state.allocator),
|
||||
.select_valtypes = &.{},
|
||||
.br_table_vectors = state.br_table_vectors.items
|
||||
};
|
||||
return .{ .opcodes = try state.opcodes.toOwnedSlice(state.allocator), .indices = try state.indices.toOwnedSlice(state.allocator), .select_valtypes = &.{}, .br_table_vectors = state.br_table_vectors.items };
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,10 +30,10 @@ var loadedMods: std.ArrayListUnmanaged(ModInfo) = .{};
|
|||
|
||||
fn openOrCreateDir(fs: std.fs.Dir, path: []const u8) !std.fs.Dir {
|
||||
var dir: std.fs.Dir = undefined;
|
||||
dir = fs.openDir(path, .{.iterate = true}) catch |err| {
|
||||
dir = fs.openDir(path, .{ .iterate = true }) catch |err| {
|
||||
if (err == std.fs.Dir.OpenError.FileNotFound) {
|
||||
try fs.makeDir(path);
|
||||
dir = try fs.openDir(path, .{.iterate = true});
|
||||
dir = try fs.openDir(path, .{ .iterate = true });
|
||||
return dir;
|
||||
} else {
|
||||
return err;
|
||||
|
|
@ -44,7 +44,7 @@ fn openOrCreateDir(fs: std.fs.Dir, path: []const u8) !std.fs.Dir {
|
|||
|
||||
fn untarToDirAndGetFile(fs: std.fs.Dir, name: []const u8, unpack: []const u8) !std.fs.File {
|
||||
var buffer: [1024]u8 = undefined;
|
||||
var modDir = try openOrCreateDir(fs,unpack);
|
||||
var modDir = try openOrCreateDir(fs, unpack);
|
||||
defer modDir.close();
|
||||
var tarFile = try fs.openFile(try std.fmt.bufPrint(&buffer, "{s}.tar", .{name}), .{});
|
||||
defer tarFile.close();
|
||||
|
|
@ -56,7 +56,7 @@ fn untarToDirAndGetFile(fs: std.fs.Dir, name: []const u8, unpack: []const u8) !s
|
|||
}
|
||||
|
||||
fn loadMod(entry: std.fs.Dir.Entry) !void {
|
||||
const modName = entry.name.ptr[0..entry.name.len - 4];
|
||||
const modName = entry.name.ptr[0 .. entry.name.len - 4];
|
||||
const fullDir = std.fmt.allocPrint(allocator, "assets/mods/{s}", .{modName}) catch @panic("Failed to allocate for fullDir");
|
||||
defer allocator.free(fullDir);
|
||||
const modDir = try std.fmt.allocPrint(allocator, "{s}_siderosmod__", .{fullDir});
|
||||
|
|
@ -64,18 +64,19 @@ fn loadMod(entry: std.fs.Dir.Entry) !void {
|
|||
global_runtime.* = mods.GlobalRuntime.init(allocator);
|
||||
|
||||
std.fs.cwd().deleteTree(modDir) catch |err| {
|
||||
std.debug.panic("Failed to delete {s} (reason: {any})", .{modDir, err});
|
||||
std.debug.panic("Failed to delete {s} (reason: {any})", .{ modDir, err });
|
||||
};
|
||||
var file = untarToDirAndGetFile(std.fs.cwd(), fullDir, modDir) catch |err| {
|
||||
return err;
|
||||
};
|
||||
defer std.fs.cwd().deleteTree(modDir) catch |err| {
|
||||
std.debug.panic("Failed to delete {s} (reason: {any})", .{modDir, err});
|
||||
std.debug.panic("Failed to delete {s} (reason: {any})", .{ modDir, err });
|
||||
};
|
||||
defer file.close();
|
||||
// TODO(luccie): Make this be able to construct a buffer for the whole file
|
||||
const buffer = try allocator.alloc(u8, 1_000_000);
|
||||
var parser = mods.Parser.init(allocator, file.reader(buffer)) catch @panic("Failed to init parser");
|
||||
var reader = file.reader(buffer);
|
||||
var parser = mods.Parser.init(allocator, &reader.interface) catch @panic("Failed to init parser");
|
||||
defer parser.deinit();
|
||||
parser.parseModule() catch |err| {
|
||||
// TODO(luccie): Find a better option for the expression `parser.reader.buffer[parser.reader.seek]`
|
||||
|
|
@ -101,30 +102,30 @@ fn loadMod(entry: std.fs.Dir.Entry) !void {
|
|||
var parameters = [_]mods.VM.Value{.{ .i32 = @intCast(modIdx) }};
|
||||
runtime.externalCall(allocator, .init, ¶meters) catch @panic("Failed to call to init");
|
||||
const result = runtime.stack.pop().?.i64;
|
||||
if (result != 0){
|
||||
std.debug.print("[ERROR]: Mod {s} init returned {d}\n", .{modName, result});
|
||||
if (result != 0) {
|
||||
std.debug.print("[ERROR]: Mod {s} init returned {d}\n", .{ modName, result });
|
||||
return error.Failure;
|
||||
}
|
||||
loadedMods.append(allocator, .{.name = try allocator.dupe(u8, modName), .runtime = runtime, .modIdx = modIdx}) catch @panic("Failed to append to loadedMods");
|
||||
loadedMods.append(allocator, .{ .name = try allocator.dupe(u8, modName), .runtime = runtime, .modIdx = modIdx }) catch @panic("Failed to append to loadedMods");
|
||||
}
|
||||
|
||||
fn init_mods() void {
|
||||
var modsDir = std.fs.cwd().openDir("./assets/mods", .{.iterate = true}) catch @panic("Failed to open assets/mods");
|
||||
var modsDir = std.fs.cwd().openDir("./assets/mods", .{ .iterate = true }) catch @panic("Failed to open assets/mods");
|
||||
defer modsDir.close();
|
||||
|
||||
var modsDirIter = modsDir.iterate();
|
||||
while (modsDirIter.next() catch @panic("Failed to get next iteration of mods directory")) |entry| {
|
||||
if (std.mem.indexOf(u8, entry.name, "siderosmod") != null){
|
||||
if (std.mem.indexOf(u8, entry.name, "siderosmod") != null) {
|
||||
std.fs.cwd().deleteTree(entry.name) catch |err| {
|
||||
std.debug.panic("Failed to delete {s} (reason: {any})", .{entry.name, err});
|
||||
std.debug.panic("Failed to delete {s} (reason: {any})", .{ entry.name, err });
|
||||
};
|
||||
continue;
|
||||
}
|
||||
if (entry.kind != std.fs.File.Kind.file){
|
||||
if (entry.kind != std.fs.File.Kind.file) {
|
||||
std.debug.panic("TODO: Search recursively for mods\n", .{});
|
||||
}
|
||||
const extension = entry.name.ptr[entry.name.len - 4..entry.name.len];
|
||||
if (!std.mem.eql(u8, extension, ".tar")){
|
||||
const extension = entry.name.ptr[entry.name.len - 4 .. entry.name.len];
|
||||
if (!std.mem.eql(u8, extension, ".tar")) {
|
||||
std.debug.print("[WARNING]: Found non tar extension in mods directory\n", .{});
|
||||
continue;
|
||||
}
|
||||
|
|
@ -162,7 +163,7 @@ export fn sideros_init(init: api.GameInit) callconv(.c) void {
|
|||
|
||||
renderer.terrain_pipeline.setMaps(renderer.device, resources.terrain.texture) catch @panic("TODO: handle this");
|
||||
|
||||
pool.addSystemGroup(&[_]ecs.System{systems.render, systems.moveCamera}, true) catch @panic("TODO: Gracefuly handle error");
|
||||
pool.addSystemGroup(&[_]ecs.System{ systems.render, systems.moveCamera }, true) catch @panic("TODO: Gracefuly handle error");
|
||||
pool.resources.renderer = &renderer;
|
||||
pool.tick();
|
||||
init_mods();
|
||||
|
|
@ -178,8 +179,8 @@ export fn sideros_cleanup() callconv(.c) void {
|
|||
var runtime = info.runtime;
|
||||
runtime.externalCall(allocator, .deinit, &.{}) catch @panic("Failed to call deinit");
|
||||
const result = runtime.stack.pop().?.i64;
|
||||
if (result != 0){
|
||||
std.debug.panic("[ERROR]: Mod {s} deinit returned {d}\n", .{info.name, result});
|
||||
if (result != 0) {
|
||||
std.debug.panic("[ERROR]: Mod {s} deinit returned {d}\n", .{ info.name, result });
|
||||
}
|
||||
defer runtime.deinit(allocator);
|
||||
defer allocator.free(info.name);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue