summaryrefslogtreecommitdiff
path: root/codegen/normp.zig
diff options
context:
space:
mode:
Diffstat (limited to 'codegen/normp.zig')
-rw-r--r--codegen/normp.zig135
1 files changed, 135 insertions, 0 deletions
diff --git a/codegen/normp.zig b/codegen/normp.zig
new file mode 100644
index 0000000..a332e73
--- /dev/null
+++ b/codegen/normp.zig
@@ -0,0 +1,135 @@
1const std = @import("std");
2const builtin = @import("builtin");
3
4const block_size = 256;
5const Block = [block_size]u3;
6
7const BlockMap = std.HashMap(
8 Block,
9 u16,
10 struct {
11 pub fn hash(_: @This(), k: Block) u64 {
12 var hasher = std.hash.Wyhash.init(0);
13 std.hash.autoHashStrat(&hasher, k, .DeepRecursive);
14 return hasher.final();
15 }
16
17 pub fn eql(_: @This(), a: Block, b: Block) bool {
18 return std.mem.eql(u3, &a, &b);
19 }
20 },
21 std.hash_map.default_max_load_percentage,
22);
23
24pub fn main() !void {
25 var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
26 defer arena.deinit();
27 const allocator = arena.allocator();
28
29 var flat_map = std.AutoHashMap(u21, u3).init(allocator);
30 defer flat_map.deinit();
31
32 var line_buf: [4096]u8 = undefined;
33
34 // Process DerivedEastAsianWidth.txt
35 var in_file = try std.fs.cwd().openFile("data/unicode/DerivedNormalizationProps.txt", .{});
36 defer in_file.close();
37 var in_buf = std.io.bufferedReader(in_file.reader());
38 const in_reader = in_buf.reader();
39
40 while (try in_reader.readUntilDelimiterOrEof(&line_buf, '\n')) |line| {
41 if (line.len == 0 or line[0] == '#') continue;
42
43 const no_comment = if (std.mem.indexOfScalar(u8, line, '#')) |octo| line[0..octo] else line;
44
45 var field_iter = std.mem.tokenizeAny(u8, no_comment, "; ");
46 var current_code: [2]u21 = undefined;
47
48 var i: usize = 0;
49 while (field_iter.next()) |field| : (i += 1) {
50 switch (i) {
51 0 => {
52 // Code point(s)
53 if (std.mem.indexOf(u8, field, "..")) |dots| {
54 current_code = .{
55 try std.fmt.parseInt(u21, field[0..dots], 16),
56 try std.fmt.parseInt(u21, field[dots + 2 ..], 16),
57 };
58 } else {
59 const code = try std.fmt.parseInt(u21, field, 16);
60 current_code = .{ code, code };
61 }
62 },
63 1 => {
64 // Norm props
65 for (current_code[0]..current_code[1] + 1) |cp| {
66 const gop = try flat_map.getOrPut(@intCast(cp));
67 if (!gop.found_existing) gop.value_ptr.* = 0;
68
69 if (std.mem.eql(u8, field, "NFD_QC")) {
70 gop.value_ptr.* |= 1;
71 } else if (std.mem.eql(u8, field, "NFKD_QC")) {
72 gop.value_ptr.* |= 2;
73 } else if (std.mem.eql(u8, field, "Full_Composition_Exclusion")) {
74 gop.value_ptr.* |= 4;
75 }
76 }
77 },
78 else => {},
79 }
80 }
81 }
82
83 var blocks_map = BlockMap.init(allocator);
84 defer blocks_map.deinit();
85
86 var stage1 = std.ArrayList(u16).init(allocator);
87 defer stage1.deinit();
88
89 var stage2 = std.ArrayList(u3).init(allocator);
90 defer stage2.deinit();
91
92 var block: Block = [_]u3{0} ** block_size;
93 var block_len: u16 = 0;
94
95 for (0..0x110000) |i| {
96 const cp: u21 = @intCast(i);
97 const props = flat_map.get(cp) orelse 0;
98
99 // Process block
100 block[block_len] = props;
101 block_len += 1;
102
103 if (block_len < block_size and cp != 0x10ffff) continue;
104
105 const gop = try blocks_map.getOrPut(block);
106 if (!gop.found_existing) {
107 gop.value_ptr.* = @intCast(stage2.items.len);
108 try stage2.appendSlice(&block);
109 }
110
111 try stage1.append(gop.value_ptr.*);
112 block_len = 0;
113 }
114
115 var args_iter = try std.process.argsWithAllocator(allocator);
116 defer args_iter.deinit();
117 _ = args_iter.skip();
118 const output_path = args_iter.next() orelse @panic("No output file arg!");
119
120 const compressor = std.compress.deflate.compressor;
121 var out_file = try std.fs.cwd().createFile(output_path, .{});
122 defer out_file.close();
123 var out_comp = try compressor(allocator, out_file.writer(), .{ .level = .best_compression });
124 defer out_comp.deinit();
125 const writer = out_comp.writer();
126
127 const endian = builtin.cpu.arch.endian();
128 try writer.writeInt(u16, @intCast(stage1.items.len), endian);
129 for (stage1.items) |i| try writer.writeInt(u16, i, endian);
130
131 try writer.writeInt(u16, @intCast(stage2.items.len), endian);
132 for (stage2.items) |i| try writer.writeInt(u8, i, endian);
133
134 try out_comp.flush();
135}