summaryrefslogtreecommitdiff
path: root/codegen/compat.zig
diff options
context:
space:
mode:
Diffstat (limited to 'codegen/compat.zig')
-rw-r--r--codegen/compat.zig121
1 files changed, 101 insertions, 20 deletions
diff --git a/codegen/compat.zig b/codegen/compat.zig
index debb83d..a9d1f92 100644
--- a/codegen/compat.zig
+++ b/codegen/compat.zig
@@ -1,58 +1,82 @@
1const std = @import("std"); 1const std = @import("std");
2const builtin = @import("builtin"); 2const builtin = @import("builtin");
3 3
4const block_size = 256;
5const Block = [block_size][]const u21;
6
7const BlockMap = std.HashMap(
8 Block,
9 u16,
10 struct {
11 pub fn hash(_: @This(), k: Block) u64 {
12 var hasher = std.hash.Wyhash.init(0);
13 std.hash.autoHashStrat(&hasher, k, .DeepRecursive);
14 return hasher.final();
15 }
16
17 pub fn eql(_: @This(), aBlock: Block, bBlock: Block) bool {
18 return for (aBlock, bBlock) |a, b| {
19 if (a.len != b.len) return false;
20 for (a, b) |a_cp, b_cp| {
21 if (a_cp != b_cp) return false;
22 }
23 } else true;
24 }
25 },
26 std.hash_map.default_max_load_percentage,
27);
28
4pub fn main() anyerror!void { 29pub fn main() anyerror!void {
5 var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); 30 var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
6 defer arena.deinit(); 31 defer arena.deinit();
7 const allocator = arena.allocator(); 32 const allocator = arena.allocator();
8 33
9 // Process UnicodeData.txt 34 // Process UnicodeData.txt
10 var write_buf: [4096]u8 = undefined;
11
12 var in_reader = std.io.Reader.fixed(@embedFile("UnicodeData.txt")); 35 var in_reader = std.io.Reader.fixed(@embedFile("UnicodeData.txt"));
13 var args_iter = try std.process.argsWithAllocator(allocator); 36 var args_iter = try std.process.argsWithAllocator(allocator);
14 defer args_iter.deinit(); 37 defer args_iter.deinit();
15 _ = args_iter.skip(); 38 _ = args_iter.skip();
16 const output_path = args_iter.next() orelse @panic("No output file arg!"); 39 const output_path = args_iter.next() orelse @panic("No output file arg!");
17 40
18 var out_file = try std.fs.cwd().createFile(output_path, .{}); 41 var compat_map = std.AutoHashMap(u21, []u21).init(allocator);
19 defer out_file.close(); 42 defer compat_map.deinit();
20 var writer = out_file.writer(&write_buf);
21 43
22 const endian = builtin.cpu.arch.endian(); 44 while (in_reader.takeDelimiterInclusive('\n')) |line| {
23
24 lines: while (in_reader.takeDelimiterInclusive('\n')) |took| {
25 const line = std.mem.trimRight(u8, took, "\n");
26 if (line.len == 0) continue; 45 if (line.len == 0) continue;
27 46
28 var field_iter = std.mem.splitScalar(u8, line, ';'); 47 var field_iter = std.mem.splitScalar(u8, line, ';');
29 var cps: [19]u24 = undefined; 48 var cp: u21 = undefined;
30 var len: u8 = 1;
31 49
32 var i: usize = 0; 50 var i: usize = 0;
33 while (field_iter.next()) |field| : (i += 1) { 51 while (field_iter.next()) |field| : (i += 1) {
52 if (field.len == 0) continue;
53
34 switch (i) { 54 switch (i) {
35 0 => cps[0] = try std.fmt.parseInt(u24, field, 16), 55 0 => {
56 cp = try std.fmt.parseInt(u21, field, 16);
57 },
36 58
37 5 => { 59 5 => {
38 // Not compatibility. 60 // Not compatibility.
39 if (field.len == 0 or field[0] != '<') continue :lines; 61 if (field[0] != '<') continue;
62
40 var cp_iter = std.mem.tokenizeScalar(u8, field, ' '); 63 var cp_iter = std.mem.tokenizeScalar(u8, field, ' ');
41 _ = cp_iter.next(); // <compat type> 64 _ = cp_iter.next(); // <compat type>
42 65
66 var cps: [18]u21 = undefined;
67 var len: u8 = 0;
68
43 while (cp_iter.next()) |cp_str| : (len += 1) { 69 while (cp_iter.next()) |cp_str| : (len += 1) {
44 cps[len] = try std.fmt.parseInt(u24, cp_str, 16); 70 cps[len] = try std.fmt.parseInt(u21, cp_str, 16);
45 } 71 }
46 },
47 72
48 2 => if (line[0] == '<') continue :lines, 73 const slice = try allocator.dupe(u21, cps[0..len]);
74 try compat_map.put(cp, slice);
75 },
49 76
50 else => {}, 77 else => {},
51 } 78 }
52 } 79 }
53
54 try writer.interface.writeInt(u8, @intCast(len), endian);
55 for (cps[0..len]) |cp| try writer.interface.writeInt(u24, cp, endian);
56 } else |err| switch (err) { 80 } else |err| switch (err) {
57 error.EndOfStream => {}, 81 error.EndOfStream => {},
58 else => { 82 else => {
@@ -60,6 +84,63 @@ pub fn main() anyerror!void {
60 }, 84 },
61 } 85 }
62 86
63 try writer.interface.writeInt(u16, 0, endian); 87 // Build multi-tiered lookup tables for compatibility decompositions
88 var blocks_map = BlockMap.init(allocator);
89 defer blocks_map.deinit();
90
91 var stage1 = std.array_list.Managed(u16).init(allocator);
92 defer stage1.deinit();
93
94 var stage2 = std.array_list.Managed([]const u21).init(allocator);
95 defer stage2.deinit();
96
97 var block: Block = [_][]const u21{&[_]u21{}} ** block_size;
98 var block_len: u16 = 0;
99
100 for (0..0x110000) |i| {
101 const cp: u21 = @intCast(i);
102 const compat: []const u21 = compat_map.get(cp) orelse &[_]u21{};
103
104 block[block_len] = compat;
105 block_len += 1;
106
107 if (block_len < block_size and cp != 0x10ffff) continue;
108
109 const gop = try blocks_map.getOrPut(block);
110 if (!gop.found_existing) {
111 gop.value_ptr.* = @intCast(stage2.items.len);
112 try stage2.appendSlice(&block);
113 }
114
115 try stage1.append(gop.value_ptr.*);
116 block_len = 0;
117 }
118 // Write out
119 var write_buf: [4096]u8 = undefined;
120 var out_file = try std.fs.cwd().createFile(output_path, .{});
121 defer out_file.close();
122 var writer = out_file.writer(&write_buf);
123
124 try writer.interface.print(
125 \\//! This file is auto-generated. Do not edit.
126 \\
127 \\pub const s1: [{}]u16 = .{{
128 , .{stage1.items.len});
129 for (stage1.items) |entry| try writer.interface.print("{}, ", .{entry});
130
131 try writer.interface.print(
132 \\
133 \\}};
134 \\
135 \\pub const s2: [{}][]const u21 = .{{
136 , .{stage2.items.len});
137 for (stage2.items) |entry| {
138 try writer.interface.print("&.{any}, ", .{entry});
139 }
140
141 try writer.interface.writeAll(
142 \\};
143 );
144
64 try writer.interface.flush(); 145 try writer.interface.flush();
65} 146}