const std = @import("std"); const mem = std.mem; const Tokenizer = @import("tokenizer.zig"); const TokenType = Tokenizer.TokenType; const Token = Tokenizer.Token; const StringPool = @import("strings.zig"); const StringIndex = StringPool.StringIndex; const assert = std.debug.assert; const Self = @This(); pub const Error = enum { TrailingComma, MissingKey, MissingValue, UnexpectedToken, }; pub const JsonType = enum { null, bool, number, string, array, object, }; pub const JsonValue = union(JsonType) { null: void, bool: bool, number: f64, string: StringIndex, array: ArraySlice, object: ObjectEntry, }; pub const JsonInput = union(JsonType) { // data structures const Object = std.StringArrayHashMapUnmanaged(JsonInput); null: void, bool: bool, number: f64, string: []const u8, array: []JsonInput, object: Object, pub fn deinit(self: JsonInput, allocator: mem.Allocator) void { switch (self) { .array => |array| { for (array) |json_input| json_input.deinit(allocator); allocator.free(array); }, .object => |*object| { var it = object.iterator(); while (it.next()) |entry| { entry.value_ptr.deinit(allocator); } @constCast(object).deinit(allocator); }, else => {}, } } pub fn format( self: @This(), comptime fmt: []const u8, opts: std.fmt.FormatOptions, writer: anytype, ) !void { switch (self) { .null => try writer.writeAll("null"), .bool => try writer.writeAll(if (self.bool) "true" else "false"), .number => try writer.print("{d}", .{self.number}), .string => try writer.print("\"{s}\"", .{self.string}), .array => { try writer.writeByte('['); for (self.array, 0..) |val, i| { try val.format(fmt, opts, writer); if (i < self.array.len - 1) try writer.writeByte(','); } try writer.writeByte(']'); }, .object => { try writer.writeByte('{'); for (self.object.keys(), self.object.values(), 0..) |k, v, i| { try writer.print("\"{s}\"", .{k}); try writer.writeByte(':'); try v.format(fmt, opts, writer); if (i < self.object.entries.len - 1) try writer.writeByte(','); } try writer.writeByte('}'); }, } } }; /// same as ObjectEntry but simpler /// start is the offset pub const ArraySlice = struct { len: usize, start: usize, }; /// just += the properties and value indexes to get the next item /// property_idx and value_idx are the offset /// it should be ordered pub const ObjectEntry = struct { len: usize, tip: usize, }; pub const PropertyEntry = struct { tip: StringIndex, }; pub const Flags = packed struct { allow_comments: bool = false, allow_trailing_comma: bool = false, enums_are_strings: bool = false, unions_are_strings: bool = false, packed_structs_are_ints: bool = false, }; pub const Options = struct { comptime indent_len: usize = 4, comptime max_depth: usize = 256, comptime flags: Flags = .{}, }; index: std.MultiArrayList(JsonValue) = .{}, strings: StringPool = .empty, properties: StringPool = .empty, property_map: std.AutoArrayHashMapUnmanaged(usize, PropertyEntry) = .empty, options: Options = .{}, pub const init = Self{}; pub fn deinit(self: *Self, allocator: mem.Allocator) void { self.index.deinit(allocator); self.properties.deinit(allocator); self.strings.deinit(allocator); self.property_map.deinit(allocator); } fn addNumber(self: *Self, allocator: mem.Allocator, number: f64) !usize { try self.index.ensureUnusedCapacity(allocator, 1); const idx = self.index.addOneAssumeCapacity(); self.index.set(idx, .{ .number = number }); return idx; } fn addProperty(self: *Self, allocator: mem.Allocator, bytes: []const u8) !usize { const stridx = try self.properties.add(allocator, bytes); try self.index.ensureUnusedCapacity(allocator, 1); try self.property_map.ensureUnusedCapacity(allocator, 1); return @intFromEnum(stridx); } fn addString(self: *Self, allocator: mem.Allocator, bytes: []const u8) !usize { const stridx = try self.strings.add(allocator, bytes); try self.index.ensureUnusedCapacity(allocator, 1); const idx = self.index.addOneAssumeCapacity(); self.index.set(idx, .{ .string = stridx }); return idx; } fn addEmpty(self: *Self, allocator: mem.Allocator) !usize { try self.index.ensureUnusedCapacity(allocator, 1); const idx = self.index.addOneAssumeCapacity(); return idx; } fn addBool(self: *Self, allocator: mem.Allocator, value: bool) !usize { try self.index.ensureUnusedCapacity(allocator, 1); const idx = self.index.addOneAssumeCapacity(); self.index.set(idx, .{ .bool = value }); return idx; } fn addNull(self: *Self, allocator: mem.Allocator) !usize { try self.index.ensureUnusedCapacity(allocator, 1); const idx = self.index.addOneAssumeCapacity(); self.index.set(idx, .{ .null = {} }); return idx; } // Recursively compute how many index slots a node occupies (including nested) fn skipSlots(self: *Self, slot: usize) usize { const e = self.index.get(slot); switch (e) { .object => |obj| { var total: usize = 1; var v = obj.tip; for (0..obj.len) |_| { const s = skipSlots(self, v); total += s; v += s; } return total; }, .array => |arr| { var total: usize = 1; var c = arr.start; for (0..arr.len) |_| { const s = skipSlots(self, c); total += s; c += s; } return total; }, else => return 1, } } pub fn getValue( self: *Self, allocator: mem.Allocator, idx: usize, ) !JsonInput { const entry = self.index.get(idx); switch (entry) { .null => return .{ .null = {} }, .bool => |b| return .{ .bool = b }, .number => |number| return .{ .number = number }, .string => |string| { const sl = string.slice(&self.strings); return .{ .string = sl }; }, .array => |arr| { var out = try allocator.alloc(JsonInput, arr.len); errdefer allocator.free(out); var c = arr.start; for (0..arr.len) |i| { const v = try self.getValue(allocator, c); out[i] = v; c += skipSlots(self, c); } return .{ .array = out[0..arr.len] }; }, .object => |obj| { var map: JsonInput.Object = .empty; errdefer map.deinit(allocator); var tip = obj.tip; for (0..obj.len) |_| if (self.property_map.get(tip)) |pen| { const key_slice = pen.tip.slice(&self.properties); const val = try self.getValue(allocator, tip); try map.put(allocator, key_slice, val); const s = self.skipSlots(tip); tip += s; } else { //for (self.property_map.keys(), self.property_map.values()) |k, v| { //std.debug.print("{}: {s}\n", .{ v.tip, @tagName(self.index.get(k)) }); //std.debug.print("tip: {d}\n", .{tip}); //} return error.MissingKey; }; return .{ .object = map }; }, } } /// always returns 0 (root) pub fn parse(self: *Self, tokenizer: *Tokenizer) !usize { const allocator = tokenizer.allocator; var it = tokenizer.iterator(); const root = try self.addEmpty(allocator); var token = it.next() orelse return root; var query: std.BoundedArray(usize, self.options.max_depth) = try .init(0); flag: switch (token.type) { .eof => { if (root != 0) { return error.InvalidSyntax; } if (query.slice().len != 0) { return error.InvalidSyntax; } return root; }, .property => { defer tokenizer.skipWhitespace(); const scope_idx = query.get(query.len - 1); switch (self.index.get(scope_idx)) { .object => |scope| { //std.debug.print("prop: {s} \n", .{token.value.?.string}); const pidx = try self.addProperty(allocator, token.value.?.string); var accum: usize = 0; for (query.slice(), 0..) |id, i| { // std.debug.print("once\n", .{}); accum += switch (self.index.get(id)) { .object => id + i, .array => id + i, else => unreachable, }; } accum += scope.len + 1; self.property_map.putAssumeCapacity(accum, .{ .tip = @enumFromInt(pidx) }); // std.debug.print("{s}/{d}\n", .{ token.value.?.string, accum }); allocator.free(token.value.?.string); self.index.set(scope_idx, .{ .object = ObjectEntry{ .len = scope.len + 1, .tip = scope.tip, } }); }, .array => { return error.InvalidSyntax; }, else => return error.InvalidSyntax, } const next = it.next() orelse return error.InvalidSyntax; token = next; switch (next.type) { .colon => { token = it.next() orelse return error.InvalidSyntax; continue :flag token.type; }, else => continue :flag next.type, // else => return error.InvalidSyntax, } }, .object_begin => { defer tokenizer.skipWhitespace(); if (query.slice().len < 1) { const ptr = try query.addOne(); ptr.* = root; self.index.set(root, .{ .object = ObjectEntry{ .len = 0, .tip = 1, } }); } else { //order const parent_idx = query.get(query.len - 1); const idx_ptr = try query.addOne(); idx_ptr.* = try self.addEmpty(allocator); self.index.set(idx_ptr.*, .{ .object = ObjectEntry{ .len = 0, .tip = self.index.len, }, }); switch (self.index.get(parent_idx)) { .array => |slice| { self.index.set(parent_idx, .{ .array = ArraySlice{ .len = slice.len + 1, .start = if (slice.len == 0) idx_ptr.* else slice.start, } }); }, else => {}, } } const next = it.next() orelse return error.InvalidSyntax; token = next; switch (next.type) { .string => continue :flag .property, .object_end => continue :flag .object_end, else => return error.InvalidSyntax, } }, .object_end, .array_end => { tokenizer.skipWhitespace(); if (query.pop() == null) return error.InvalidSyntax; // double close if (query.slice().len == 0) return root; const next = it.next() orelse return root; token = next; switch (next.type) { .comma => continue :flag .comma, .object_end, .array_end => continue :flag next.type, else => return error.InvalidSyntax, } }, .array_begin => { defer tokenizer.skipWhitespace(); if (query.slice().len < 1) { const ptr = try query.addOne(); ptr.* = root; self.index.set(root, .{ .array = ArraySlice{ .len = 0, .start = 1, } }); } else { // order matters const parent_idx = query.get(query.len - 1); const idx_ptr = try query.addOne(); idx_ptr.* = try self.addEmpty(allocator); self.index.set(idx_ptr.*, .{ .array = ArraySlice{ .len = 0, .start = idx_ptr.* + 1, } }); switch (self.index.get(parent_idx)) { .array => |slice| { self.index.set(parent_idx, .{ .array = ArraySlice{ .len = slice.len + 1, .start = if (slice.len == 0) idx_ptr.* else slice.start, } }); }, else => {}, } } const next = it.next() orelse return error.InvalidSyntax; token = next; switch (next.type) { .property => return error.InvalidSyntax, else => continue :flag next.type, } }, .true, .false => { defer tokenizer.skipWhitespace(); const idx = try self.addBool(allocator, if (token.type == .true) true else false); if (query.len == 0) { // root self.index.set(root, .{ .bool = if (token.type == .true) true else false }); return root; } const parent_idx = query.get(query.len - 1); switch (self.index.get(parent_idx)) { .array => |slice| { self.index.set(parent_idx, .{ .array = ArraySlice{ .len = slice.len + 1, .start = if (slice.len == 0) idx else slice.start, } }); }, else => {}, } const next = it.next() orelse return error.InvalidSyntax; token = next; switch (next.type) { .comma => continue :flag .comma, .object_end, .array_end => continue :flag next.type, else => return error.InvalidSyntax, } }, .string => { defer tokenizer.skipWhitespace(); errdefer allocator.free(token.value.?.string); if (query.len == 0) { // root _ = try self.addString(allocator, token.value.?.string); allocator.free(token.value.?.string); // hardcoded shite self.index.set(root, .{ .string = @enumFromInt(0) }); return root; } const parent_idx = query.get(query.len - 1); const next = it.next() orelse return error.InvalidSyntax; switch (next.type) { .colon => { continue :flag .property; }, else => |t| { const idx = try self.addString(allocator, token.value.?.string); allocator.free(token.value.?.string); switch (self.index.get(parent_idx)) { .array => |slice| { self.index.set(parent_idx, .{ .array = ArraySlice{ .len = slice.len + 1, .start = if (slice.len == 0) idx else slice.start, } }); }, else => {}, } token = next; continue :flag t; }, } }, .number => { defer tokenizer.skipWhitespace(); if (query.len == 0) { // root _ = try self.addNumber(allocator, token.value.?.number); self.index.set(root, .{ .number = token.value.?.number }); return root; } const parent_idx = query.get(query.len - 1); const idx = try self.addNumber(allocator, token.value.?.number); switch (self.index.get(parent_idx)) { .array => |slice| { self.index.set(parent_idx, .{ .array = ArraySlice{ .len = slice.len + 1, .start = if (slice.len == 0) idx else slice.start, } }); }, else => {}, } const next = it.next() orelse return error.InvalidSyntax; token = next; switch (next.type) { .comma => continue :flag .comma, .object_end, .array_end => continue :flag next.type, else => return error.InvalidSyntax, } }, .comma => { if (!self.options.flags.allow_trailing_comma) { const next = it.next() orelse return error.InvalidSyntax; token = next; switch (next.type) { .object_end, .array_end => return error.TrailingComma, .comma => return error.InvalidSyntax, else => continue :flag token.type, } } }, .null => { defer tokenizer.skipWhitespace(); const idx = try self.addNull(allocator); if (query.len == 0) { // root self.index.set(root, .{ .null = {} }); return root; } const parent_idx = query.get(query.len - 1); switch (self.index.get(parent_idx)) { .array => |slice| { self.index.set(parent_idx, .{ .array = ArraySlice{ .len = slice.len + 1, .start = if (slice.len == 0) idx else slice.start, } }); }, else => {}, } const next = it.next() orelse return error.InvalidSyntax; token = next; switch (next.type) { .comma => continue :flag .comma, .object_end, .array_end => continue :flag next.type, else => return error.InvalidSyntax, } }, else => return error.InvalidSyntax, } return root; } test getValue { const allocator = std.testing.allocator; const text = \\{ \\ "a":"b", \\ "c":"d", \\ "f": { \\ "g": "h" \\ }, \\ "i": "j" \\} ; // eg 1: b, 2: c, 3: f, 4: g, 5: i var tokenizer: Tokenizer = try .init(allocator, text); defer tokenizer.deinit(); var self = try allocator.create(Self); self.* = Self.init; defer allocator.destroy(self); defer self.deinit(allocator); const idx: usize = try self.parse(&tokenizer); var root = try self.getValue(allocator, idx); defer root.deinit(allocator); try std.testing.expect(root == .object); std.debug.print("{}\n", .{root}); }