summaryrefslogtreecommitdiff
path: root/codegen/ccc.zig
diff options
context:
space:
mode:
Diffstat (limited to 'codegen/ccc.zig')
-rw-r--r--codegen/ccc.zig125
1 files changed, 125 insertions, 0 deletions
diff --git a/codegen/ccc.zig b/codegen/ccc.zig
new file mode 100644
index 0000000..93da6a0
--- /dev/null
+++ b/codegen/ccc.zig
@@ -0,0 +1,125 @@
1const std = @import("std");
2const builtin = @import("builtin");
3
4const block_size = 256;
5const Block = [block_size]u8;
6
7const BlockMap = std.HashMap(
8 Block,
9 u16,
10 struct {
11 pub fn hash(_: @This(), k: Block) u64 {
12 var hasher = std.hash.Wyhash.init(0);
13 std.hash.autoHashStrat(&hasher, k, .DeepRecursive);
14 return hasher.final();
15 }
16
17 pub fn eql(_: @This(), a: Block, b: Block) bool {
18 return std.mem.eql(u8, &a, &b);
19 }
20 },
21 std.hash_map.default_max_load_percentage,
22);
23
24pub fn main() !void {
25 var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
26 defer arena.deinit();
27 const allocator = arena.allocator();
28
29 var flat_map = std.AutoHashMap(u21, u8).init(allocator);
30 defer flat_map.deinit();
31
32 var line_buf: [4096]u8 = undefined;
33
34 // Process DerivedEastAsianWidth.txt
35 var cc_file = try std.fs.cwd().openFile("data/unicode/extracted/DerivedCombiningClass.txt", .{});
36 defer cc_file.close();
37 var cc_buf = std.io.bufferedReader(cc_file.reader());
38 const cc_reader = cc_buf.reader();
39
40 while (try cc_reader.readUntilDelimiterOrEof(&line_buf, '\n')) |line| {
41 if (line.len == 0 or line[0] == '#') continue;
42 const no_comment = if (std.mem.indexOfScalar(u8, line, '#')) |octo| line[0..octo] else line;
43
44 var field_iter = std.mem.tokenizeAny(u8, no_comment, "; ");
45 var current_code: [2]u21 = undefined;
46
47 var i: usize = 0;
48 while (field_iter.next()) |field| : (i += 1) {
49 switch (i) {
50 0 => {
51 // Code point(s)
52 if (std.mem.indexOf(u8, field, "..")) |dots| {
53 current_code = .{
54 try std.fmt.parseInt(u21, field[0..dots], 16),
55 try std.fmt.parseInt(u21, field[dots + 2 ..], 16),
56 };
57 } else {
58 const code = try std.fmt.parseInt(u21, field, 16);
59 current_code = .{ code, code };
60 }
61 },
62 1 => {
63 // Combining Class
64 if (std.mem.eql(u8, field, "0")) continue;
65 const cc = try std.fmt.parseInt(u8, field, 10);
66 for (current_code[0]..current_code[1] + 1) |cp| try flat_map.put(@intCast(cp), cc);
67 },
68 else => {},
69 }
70 }
71 }
72
73 var blocks_map = BlockMap.init(allocator);
74 defer blocks_map.deinit();
75
76 var stage1 = std.ArrayList(u16).init(allocator);
77 defer stage1.deinit();
78
79 var stage2 = std.ArrayList(u8).init(allocator);
80 defer stage2.deinit();
81
82 var block: Block = [_]u8{0} ** block_size;
83 var block_len: u16 = 0;
84
85 for (0..0x110000) |i| {
86 const cp: u21 = @intCast(i);
87 const cc = flat_map.get(cp) orelse 0;
88
89 // Process block
90 block[block_len] = cc;
91 block_len += 1;
92
93 if (block_len < block_size and cp != 0x10ffff) continue;
94
95 const gop = try blocks_map.getOrPut(block);
96 if (!gop.found_existing) {
97 gop.value_ptr.* = @intCast(stage2.items.len);
98 try stage2.appendSlice(&block);
99 }
100
101 try stage1.append(gop.value_ptr.*);
102 block_len = 0;
103 }
104
105 var args_iter = try std.process.argsWithAllocator(allocator);
106 defer args_iter.deinit();
107 _ = args_iter.skip();
108 const output_path = args_iter.next() orelse @panic("No output file arg!");
109
110 const compressor = std.compress.deflate.compressor;
111 var out_file = try std.fs.cwd().createFile(output_path, .{});
112 defer out_file.close();
113 var out_comp = try compressor(allocator, out_file.writer(), .{ .level = .best_compression });
114 defer out_comp.deinit();
115 const writer = out_comp.writer();
116
117 const endian = builtin.cpu.arch.endian();
118 try writer.writeInt(u16, @intCast(stage1.items.len), endian);
119 for (stage1.items) |i| try writer.writeInt(u16, i, endian);
120
121 try writer.writeInt(u16, @intCast(stage2.items.len), endian);
122 try writer.writeAll(stage2.items);
123
124 try out_comp.flush();
125}