From 836a4b6e63ac4bd7beb406cb20edf23f0bd342a9 Mon Sep 17 00:00:00 2001 From: Jose Colon Rodriguez Date: Mon, 26 Feb 2024 12:24:42 -0400 Subject: Using separate data struct model. --- codegen/ccc.zig | 125 ++++++++++++++++++++++++++++++++++++++++++++++++++++ codegen/dwp.zig | 23 +++++----- codegen/gbp.zig | 68 ++++++++--------------------- codegen/normp.zig | 128 ------------------------------------------------------ 4 files changed, 154 insertions(+), 190 deletions(-) create mode 100644 codegen/ccc.zig delete mode 100644 codegen/normp.zig (limited to 'codegen') diff --git a/codegen/ccc.zig b/codegen/ccc.zig new file mode 100644 index 0000000..93da6a0 --- /dev/null +++ b/codegen/ccc.zig @@ -0,0 +1,125 @@ +const std = @import("std"); +const builtin = @import("builtin"); + +const block_size = 256; +const Block = [block_size]u8; + +const BlockMap = std.HashMap( + Block, + u16, + struct { + pub fn hash(_: @This(), k: Block) u64 { + var hasher = std.hash.Wyhash.init(0); + std.hash.autoHashStrat(&hasher, k, .DeepRecursive); + return hasher.final(); + } + + pub fn eql(_: @This(), a: Block, b: Block) bool { + return std.mem.eql(u8, &a, &b); + } + }, + std.hash_map.default_max_load_percentage, +); + +pub fn main() !void { + var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); + defer arena.deinit(); + const allocator = arena.allocator(); + + var flat_map = std.AutoHashMap(u21, u8).init(allocator); + defer flat_map.deinit(); + + var line_buf: [4096]u8 = undefined; + + // Process DerivedEastAsianWidth.txt + var cc_file = try std.fs.cwd().openFile("data/unicode/extracted/DerivedCombiningClass.txt", .{}); + defer cc_file.close(); + var cc_buf = std.io.bufferedReader(cc_file.reader()); + const cc_reader = cc_buf.reader(); + + while (try cc_reader.readUntilDelimiterOrEof(&line_buf, '\n')) |line| { + if (line.len == 0 or line[0] == '#') continue; + const no_comment = if (std.mem.indexOfScalar(u8, line, '#')) |octo| line[0..octo] else line; + + var field_iter = std.mem.tokenizeAny(u8, no_comment, "; "); + var current_code: [2]u21 = undefined; + + var i: usize = 0; + while (field_iter.next()) |field| : (i += 1) { + switch (i) { + 0 => { + // Code point(s) + if (std.mem.indexOf(u8, field, "..")) |dots| { + current_code = .{ + try std.fmt.parseInt(u21, field[0..dots], 16), + try std.fmt.parseInt(u21, field[dots + 2 ..], 16), + }; + } else { + const code = try std.fmt.parseInt(u21, field, 16); + current_code = .{ code, code }; + } + }, + 1 => { + // Combining Class + if (std.mem.eql(u8, field, "0")) continue; + const cc = try std.fmt.parseInt(u8, field, 10); + for (current_code[0]..current_code[1] + 1) |cp| try flat_map.put(@intCast(cp), cc); + }, + else => {}, + } + } + } + + var blocks_map = BlockMap.init(allocator); + defer blocks_map.deinit(); + + var stage1 = std.ArrayList(u16).init(allocator); + defer stage1.deinit(); + + var stage2 = std.ArrayList(u8).init(allocator); + defer stage2.deinit(); + + var block: Block = [_]u8{0} ** block_size; + var block_len: u16 = 0; + + for (0..0x110000) |i| { + const cp: u21 = @intCast(i); + const cc = flat_map.get(cp) orelse 0; + + // Process block + block[block_len] = cc; + block_len += 1; + + if (block_len < block_size and cp != 0x10ffff) continue; + + const gop = try blocks_map.getOrPut(block); + if (!gop.found_existing) { + gop.value_ptr.* = @intCast(stage2.items.len); + try stage2.appendSlice(&block); + } + + try stage1.append(gop.value_ptr.*); + block_len = 0; + } + + var args_iter = try std.process.argsWithAllocator(allocator); + defer args_iter.deinit(); + _ = args_iter.skip(); + const output_path = args_iter.next() orelse @panic("No output file arg!"); + + const compressor = std.compress.deflate.compressor; + var out_file = try std.fs.cwd().createFile(output_path, .{}); + defer out_file.close(); + var out_comp = try compressor(allocator, out_file.writer(), .{ .level = .best_compression }); + defer out_comp.deinit(); + const writer = out_comp.writer(); + + const endian = builtin.cpu.arch.endian(); + try writer.writeInt(u16, @intCast(stage1.items.len), endian); + for (stage1.items) |i| try writer.writeInt(u16, i, endian); + + try writer.writeInt(u16, @intCast(stage2.items.len), endian); + try writer.writeAll(stage2.items); + + try out_comp.flush(); +} diff --git a/codegen/dwp.zig b/codegen/dwp.zig index 9e387c6..76a14d3 100644 --- a/codegen/dwp.zig +++ b/codegen/dwp.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const builtin = @import("builtin"); const options = @import("options"); @@ -229,21 +230,19 @@ pub fn main() !void { _ = args_iter.skip(); const output_path = args_iter.next() orelse @panic("No output file arg!"); + const compressor = std.compress.deflate.compressor; var out_file = try std.fs.cwd().createFile(output_path, .{}); defer out_file.close(); - var out_buf = std.io.bufferedWriter(out_file.writer()); - const writer = out_buf.writer(); + var out_comp = try compressor(allocator, out_file.writer(), .{ .level = .best_compression }); + defer out_comp.deinit(); + const writer = out_comp.writer(); - try writer.writeAll("const std = @import(\"std\");\n"); + const endian = builtin.cpu.arch.endian(); + try writer.writeInt(u16, @intCast(stage1.items.len), endian); + for (stage1.items) |i| try writer.writeInt(u16, i, endian); - try writer.print("const Stage2Int = std.math.IntFittingRange(0, {});\n", .{stage2.items.len}); - try writer.print("pub const stage_1 = [{}]Stage2Int{{", .{stage1.items.len}); - for (stage1.items) |v| try writer.print("{},", .{v}); - try writer.writeAll("};\n"); + try writer.writeInt(u16, @intCast(stage2.items.len), endian); + for (stage2.items) |i| try writer.writeInt(i8, i, endian); - try writer.print("pub const stage_2 = [{}]i3{{", .{stage2.items.len}); - for (stage2.items) |v| try writer.print("{},", .{v}); - try writer.writeAll("};\n"); - - try out_buf.flush(); + try out_comp.flush(); } diff --git a/codegen/gbp.zig b/codegen/gbp.zig index 3bd9a4d..39e0da3 100644 --- a/codegen/gbp.zig +++ b/codegen/gbp.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const builtin = @import("builtin"); const Indic = enum { none, @@ -226,56 +227,23 @@ pub fn main() !void { _ = args_iter.skip(); const output_path = args_iter.next() orelse @panic("No output file arg!"); + const compressor = std.compress.deflate.compressor; var out_file = try std.fs.cwd().createFile(output_path, .{}); defer out_file.close(); - var out_buf = std.io.bufferedWriter(out_file.writer()); - const writer = out_buf.writer(); - - const prop_code = - \\const std = @import("std"); - \\ - \\pub const Indic = enum { - \\ none, - \\ - \\ Consonant, - \\ Extend, - \\ Linker, - \\}; - \\ - \\pub const Gbp = enum { - \\ none, - \\ Control, - \\ CR, - \\ Extend, - \\ L, - \\ LF, - \\ LV, - \\ LVT, - \\ Prepend, - \\ Regional_Indicator, - \\ SpacingMark, - \\ T, - \\ V, - \\ ZWJ, - \\}; - \\ - ; - - try writer.writeAll(prop_code); - - try writer.print("const Stage2Int = std.math.IntFittingRange(0, {});\n", .{stage2.items.len}); - try writer.print("pub const stage_1 = [{}]Stage2Int{{", .{stage1.items.len}); - for (stage1.items) |v| try writer.print("{},", .{v}); - try writer.writeAll("};\n"); - - try writer.print("const Stage3Int = std.math.IntFittingRange(0, {});\n", .{stage3_len}); - try writer.print("pub const stage_2 = [{}]Stage3Int{{", .{stage2.items.len}); - for (stage2.items) |v| try writer.print("{},", .{v}); - try writer.writeAll("};\n"); - - try writer.print("pub const stage_3 = [{}]u8{{", .{stage3_len}); - for (stage3.keys()) |v| try writer.print("{},", .{v}); - try writer.writeAll("};\n"); - - try out_buf.flush(); + var out_comp = try compressor(allocator, out_file.writer(), .{ .level = .best_compression }); + defer out_comp.deinit(); + const writer = out_comp.writer(); + + const endian = builtin.cpu.arch.endian(); + try writer.writeInt(u16, @intCast(stage1.items.len), endian); + for (stage1.items) |i| try writer.writeInt(u16, i, endian); + + try writer.writeInt(u16, @intCast(stage2.items.len), endian); + for (stage2.items) |i| try writer.writeInt(u16, i, endian); + + const props_bytes = stage3.keys(); + try writer.writeInt(u16, @intCast(props_bytes.len), endian); + try writer.writeAll(props_bytes); + + try out_comp.flush(); } diff --git a/codegen/normp.zig b/codegen/normp.zig deleted file mode 100644 index 25af65c..0000000 --- a/codegen/normp.zig +++ /dev/null @@ -1,128 +0,0 @@ -const std = @import("std"); - -const options = @import("options"); - -const block_size = 256; -const Block = [block_size]u8; - -const BlockMap = std.HashMap( - Block, - u16, - struct { - pub fn hash(_: @This(), k: Block) u64 { - var hasher = std.hash.Wyhash.init(0); - std.hash.autoHashStrat(&hasher, k, .DeepRecursive); - return hasher.final(); - } - - pub fn eql(_: @This(), a: Block, b: Block) bool { - return std.mem.eql(u8, &a, &b); - } - }, - std.hash_map.default_max_load_percentage, -); - -pub fn main() !void { - var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); - defer arena.deinit(); - const allocator = arena.allocator(); - - var flat_map = std.AutoHashMap(u21, u8).init(allocator); - defer flat_map.deinit(); - - var line_buf: [4096]u8 = undefined; - - // Process DerivedEastAsianWidth.txt - var cc_file = try std.fs.cwd().openFile("data/unicode/extracted/DerivedCombiningClass.txt", .{}); - defer cc_file.close(); - var cc_buf = std.io.bufferedReader(cc_file.reader()); - const cc_reader = cc_buf.reader(); - - while (try cc_reader.readUntilDelimiterOrEof(&line_buf, '\n')) |line| { - if (line.len == 0 or line[0] == '#') continue; - const no_comment = if (std.mem.indexOfScalar(u8, line, '#')) |octo| line[0..octo] else line; - - var field_iter = std.mem.tokenizeAny(u8, no_comment, "; "); - var current_code: [2]u21 = undefined; - - var i: usize = 0; - while (field_iter.next()) |field| : (i += 1) { - switch (i) { - 0 => { - // Code point(s) - if (std.mem.indexOf(u8, field, "..")) |dots| { - current_code = .{ - try std.fmt.parseInt(u21, field[0..dots], 16), - try std.fmt.parseInt(u21, field[dots + 2 ..], 16), - }; - } else { - const code = try std.fmt.parseInt(u21, field, 16); - current_code = .{ code, code }; - } - }, - 1 => { - // Combining Class - if (std.mem.eql(u8, field, "0")) continue; - const cc = try std.fmt.parseInt(u8, field, 10); - for (current_code[0]..current_code[1] + 1) |cp| try flat_map.put(@intCast(cp), cc); - }, - else => {}, - } - } - } - - var blocks_map = BlockMap.init(allocator); - defer blocks_map.deinit(); - - var stage1 = std.ArrayList(u16).init(allocator); - defer stage1.deinit(); - - var stage2 = std.ArrayList(u8).init(allocator); - defer stage2.deinit(); - - var block: Block = [_]u8{0} ** block_size; - var block_len: u16 = 0; - - for (0..0x110000) |i| { - const cp: u21 = @intCast(i); - const cc = flat_map.get(cp) orelse 0; - - // Process block - block[block_len] = cc; - block_len += 1; - - if (block_len < block_size and cp != 0x10ffff) continue; - - const gop = try blocks_map.getOrPut(block); - if (!gop.found_existing) { - gop.value_ptr.* = @intCast(stage2.items.len); - try stage2.appendSlice(&block); - } - - try stage1.append(gop.value_ptr.*); - block_len = 0; - } - - var args_iter = try std.process.argsWithAllocator(allocator); - defer args_iter.deinit(); - _ = args_iter.skip(); - const output_path = args_iter.next() orelse @panic("No output file arg!"); - - var out_file = try std.fs.cwd().createFile(output_path, .{}); - defer out_file.close(); - var out_buf = std.io.bufferedWriter(out_file.writer()); - const writer = out_buf.writer(); - - try writer.writeAll("const std = @import(\"std\");\n"); - - try writer.print("const Stage2Int = std.math.IntFittingRange(0, {});\n", .{stage2.items.len}); - try writer.print("pub const stage_1 = [{}]Stage2Int{{", .{stage1.items.len}); - for (stage1.items) |v| try writer.print("{},", .{v}); - try writer.writeAll("};\n"); - - try writer.print("pub const stage_2 = [{}]u8{{", .{stage2.items.len}); - for (stage2.items) |v| try writer.print("{},", .{v}); - try writer.writeAll("};\n"); - - try out_buf.flush(); -} -- cgit v1.2.3