glTF models now take buffer lengths at runtime

This commit is contained in:
Lorenzo Torres 2025-03-24 19:49:49 +01:00
parent 942bb3525d
commit 7cf43ccb8b
2 changed files with 2 additions and 4 deletions

Binary file not shown.

View file

@ -157,10 +157,8 @@ pub fn parseFile(allocator: Allocator, name: []const u8) !struct { vertices: [][
const data = (try std.json.parseFromSlice(Model.JsonChunk, allocator, @constCast(all[Model.Chunk.offset .. Model.Chunk.offset + json_chunk.length]), .{ .ignore_unknown_fields = true })).value; const data = (try std.json.parseFromSlice(Model.JsonChunk, allocator, @constCast(all[Model.Chunk.offset .. Model.Chunk.offset + json_chunk.length]), .{ .ignore_unknown_fields = true })).value;
const binary = Model.Binary{ .data = all[Model.Chunk.offset + json_chunk.length + 8 ..] }; const binary = Model.Binary{ .data = all[Model.Chunk.offset + json_chunk.length + 8 ..] };
const vertices = try binary.readVec3(allocator, data.bufferViews.?[data.meshes.?[0].primitives.?[0].attributes.?.POSITION.?], 24); const vertices = try binary.readVec3(allocator, data.bufferViews.?[data.meshes.?[0].primitives.?[0].attributes.?.POSITION.?], data.accessors.?[data.meshes.?[0].primitives.?[0].attributes.?.POSITION.?].count);
const indices = try binary.readU16(allocator, data.bufferViews.?[data.meshes.?[0].primitives.?[0].indices.?], 36); const indices = try binary.readU16(allocator, data.bufferViews.?[data.meshes.?[0].primitives.?[0].indices.?], data.accessors.?[data.meshes.?[0].primitives.?[0].indices.?].count);
std.debug.print("vertices: {any}\n", .{vertices});
std.debug.print("indices: {any}\n", .{indices});
return .{ .vertices = vertices, .indices = indices }; return .{ .vertices = vertices, .indices = indices };
} }