diff options
| author | 2024-02-27 09:26:40 -0400 | |
|---|---|---|
| committer | 2024-02-27 09:26:40 -0400 | |
| commit | 32c68059a05dde8a57a330db6d14a32506081516 (patch) | |
| tree | c2b3b9bbbf48330db3570135d371cb92b552f1cb | |
| parent | Using NormData nfkd (diff) | |
| download | zg-32c68059a05dde8a57a330db6d14a32506081516.tar.gz zg-32c68059a05dde8a57a330db6d14a32506081516.tar.xz zg-32c68059a05dde8a57a330db6d14a32506081516.zip | |
Using HangulData in NormData
| -rw-r--r-- | build.zig | 19 | ||||
| -rw-r--r-- | codegen/hangul.zig | 134 | ||||
| -rw-r--r-- | src/HangulData.zig | 52 | ||||
| -rw-r--r-- | src/NormData.zig | 4 | ||||
| -rw-r--r-- | src/Normalizer.zig | 23 |
5 files changed, 219 insertions, 13 deletions
| @@ -52,6 +52,15 @@ pub fn build(b: *std.Build) void { | |||
| 52 | const run_compat_gen_exe = b.addRunArtifact(compat_gen_exe); | 52 | const run_compat_gen_exe = b.addRunArtifact(compat_gen_exe); |
| 53 | const compat_gen_out = run_compat_gen_exe.addOutputFileArg("compat.bin.z"); | 53 | const compat_gen_out = run_compat_gen_exe.addOutputFileArg("compat.bin.z"); |
| 54 | 54 | ||
| 55 | const hangul_gen_exe = b.addExecutable(.{ | ||
| 56 | .name = "hangul", | ||
| 57 | .root_source_file = .{ .path = "codegen/hangul.zig" }, | ||
| 58 | .target = b.host, | ||
| 59 | .optimize = .Debug, | ||
| 60 | }); | ||
| 61 | const run_hangul_gen_exe = b.addRunArtifact(hangul_gen_exe); | ||
| 62 | const hangul_gen_out = run_hangul_gen_exe.addOutputFileArg("hangul.bin.z"); | ||
| 63 | |||
| 55 | const ccc_gen_exe = b.addExecutable(.{ | 64 | const ccc_gen_exe = b.addExecutable(.{ |
| 56 | .name = "ccc", | 65 | .name = "ccc", |
| 57 | .root_source_file = .{ .path = "codegen/ccc.zig" }, | 66 | .root_source_file = .{ .path = "codegen/ccc.zig" }, |
| @@ -133,14 +142,22 @@ pub fn build(b: *std.Build) void { | |||
| 133 | }); | 142 | }); |
| 134 | compat_data.addAnonymousImport("compat", .{ .root_source_file = compat_gen_out }); | 143 | compat_data.addAnonymousImport("compat", .{ .root_source_file = compat_gen_out }); |
| 135 | 144 | ||
| 145 | const hangul_data = b.createModule(.{ | ||
| 146 | .root_source_file = .{ .path = "src/HangulData.zig" }, | ||
| 147 | .target = target, | ||
| 148 | .optimize = optimize, | ||
| 149 | }); | ||
| 150 | hangul_data.addAnonymousImport("hangul", .{ .root_source_file = hangul_gen_out }); | ||
| 151 | |||
| 136 | const norm_data = b.createModule(.{ | 152 | const norm_data = b.createModule(.{ |
| 137 | .root_source_file = .{ .path = "src/NormData.zig" }, | 153 | .root_source_file = .{ .path = "src/NormData.zig" }, |
| 138 | .target = target, | 154 | .target = target, |
| 139 | .optimize = optimize, | 155 | .optimize = optimize, |
| 140 | }); | 156 | }); |
| 141 | norm_data.addImport("CanonData", canon_data); | 157 | norm_data.addImport("CanonData", canon_data); |
| 142 | norm_data.addImport("CompatData", compat_data); | ||
| 143 | norm_data.addImport("CombiningData", ccc_data); | 158 | norm_data.addImport("CombiningData", ccc_data); |
| 159 | norm_data.addImport("CompatData", compat_data); | ||
| 160 | norm_data.addImport("HangulData", hangul_data); | ||
| 144 | 161 | ||
| 145 | const norm = b.addModule("Normalizer", .{ | 162 | const norm = b.addModule("Normalizer", .{ |
| 146 | .root_source_file = .{ .path = "src/Normalizer.zig" }, | 163 | .root_source_file = .{ .path = "src/Normalizer.zig" }, |
diff --git a/codegen/hangul.zig b/codegen/hangul.zig new file mode 100644 index 0000000..ab1a861 --- /dev/null +++ b/codegen/hangul.zig | |||
| @@ -0,0 +1,134 @@ | |||
| 1 | const std = @import("std"); | ||
| 2 | const builtin = @import("builtin"); | ||
| 3 | |||
| 4 | const Syllable = enum { | ||
| 5 | none, | ||
| 6 | L, | ||
| 7 | LV, | ||
| 8 | LVT, | ||
| 9 | V, | ||
| 10 | T, | ||
| 11 | }; | ||
| 12 | |||
| 13 | const block_size = 256; | ||
| 14 | const Block = [block_size]u3; | ||
| 15 | |||
| 16 | const BlockMap = std.HashMap( | ||
| 17 | Block, | ||
| 18 | u16, | ||
| 19 | struct { | ||
| 20 | pub fn hash(_: @This(), k: Block) u64 { | ||
| 21 | var hasher = std.hash.Wyhash.init(0); | ||
| 22 | std.hash.autoHashStrat(&hasher, k, .DeepRecursive); | ||
| 23 | return hasher.final(); | ||
| 24 | } | ||
| 25 | |||
| 26 | pub fn eql(_: @This(), a: Block, b: Block) bool { | ||
| 27 | return std.mem.eql(u3, &a, &b); | ||
| 28 | } | ||
| 29 | }, | ||
| 30 | std.hash_map.default_max_load_percentage, | ||
| 31 | ); | ||
| 32 | |||
| 33 | pub fn main() !void { | ||
| 34 | var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); | ||
| 35 | defer arena.deinit(); | ||
| 36 | const allocator = arena.allocator(); | ||
| 37 | |||
| 38 | var flat_map = std.AutoHashMap(u21, u3).init(allocator); | ||
| 39 | defer flat_map.deinit(); | ||
| 40 | |||
| 41 | var line_buf: [4096]u8 = undefined; | ||
| 42 | |||
| 43 | // Process DerivedEastAsianWidth.txt | ||
| 44 | var in_file = try std.fs.cwd().openFile("data/unicode/HangulSyllableType.txt", .{}); | ||
| 45 | defer in_file.close(); | ||
| 46 | var in_buf = std.io.bufferedReader(in_file.reader()); | ||
| 47 | const in_reader = in_buf.reader(); | ||
| 48 | |||
| 49 | while (try in_reader.readUntilDelimiterOrEof(&line_buf, '\n')) |line| { | ||
| 50 | if (line.len == 0 or line[0] == '#') continue; | ||
| 51 | |||
| 52 | const no_comment = if (std.mem.indexOfScalar(u8, line, '#')) |octo| line[0..octo] else line; | ||
| 53 | |||
| 54 | var field_iter = std.mem.tokenizeAny(u8, no_comment, "; "); | ||
| 55 | var current_code: [2]u21 = undefined; | ||
| 56 | |||
| 57 | var i: usize = 0; | ||
| 58 | while (field_iter.next()) |field| : (i += 1) { | ||
| 59 | switch (i) { | ||
| 60 | 0 => { | ||
| 61 | // Code point(s) | ||
| 62 | if (std.mem.indexOf(u8, field, "..")) |dots| { | ||
| 63 | current_code = .{ | ||
| 64 | try std.fmt.parseInt(u21, field[0..dots], 16), | ||
| 65 | try std.fmt.parseInt(u21, field[dots + 2 ..], 16), | ||
| 66 | }; | ||
| 67 | } else { | ||
| 68 | const code = try std.fmt.parseInt(u21, field, 16); | ||
| 69 | current_code = .{ code, code }; | ||
| 70 | } | ||
| 71 | }, | ||
| 72 | 1 => { | ||
| 73 | // Syllable type | ||
| 74 | const st: Syllable = std.meta.stringToEnum(Syllable, field) orelse .none; | ||
| 75 | for (current_code[0]..current_code[1] + 1) |cp| try flat_map.put(@intCast(cp), @intFromEnum(st)); | ||
| 76 | }, | ||
| 77 | else => {}, | ||
| 78 | } | ||
| 79 | } | ||
| 80 | } | ||
| 81 | |||
| 82 | var blocks_map = BlockMap.init(allocator); | ||
| 83 | defer blocks_map.deinit(); | ||
| 84 | |||
| 85 | var stage1 = std.ArrayList(u16).init(allocator); | ||
| 86 | defer stage1.deinit(); | ||
| 87 | |||
| 88 | var stage2 = std.ArrayList(u3).init(allocator); | ||
| 89 | defer stage2.deinit(); | ||
| 90 | |||
| 91 | var block: Block = [_]u3{0} ** block_size; | ||
| 92 | var block_len: u16 = 0; | ||
| 93 | |||
| 94 | for (0..0x110000) |i| { | ||
| 95 | const cp: u21 = @intCast(i); | ||
| 96 | const st = flat_map.get(cp) orelse 0; | ||
| 97 | |||
| 98 | // Process block | ||
| 99 | block[block_len] = st; | ||
| 100 | block_len += 1; | ||
| 101 | |||
| 102 | if (block_len < block_size and cp != 0x10ffff) continue; | ||
| 103 | |||
| 104 | const gop = try blocks_map.getOrPut(block); | ||
| 105 | if (!gop.found_existing) { | ||
| 106 | gop.value_ptr.* = @intCast(stage2.items.len); | ||
| 107 | try stage2.appendSlice(&block); | ||
| 108 | } | ||
| 109 | |||
| 110 | try stage1.append(gop.value_ptr.*); | ||
| 111 | block_len = 0; | ||
| 112 | } | ||
| 113 | |||
| 114 | var args_iter = try std.process.argsWithAllocator(allocator); | ||
| 115 | defer args_iter.deinit(); | ||
| 116 | _ = args_iter.skip(); | ||
| 117 | const output_path = args_iter.next() orelse @panic("No output file arg!"); | ||
| 118 | |||
| 119 | const compressor = std.compress.deflate.compressor; | ||
| 120 | var out_file = try std.fs.cwd().createFile(output_path, .{}); | ||
| 121 | defer out_file.close(); | ||
| 122 | var out_comp = try compressor(allocator, out_file.writer(), .{ .level = .best_compression }); | ||
| 123 | defer out_comp.deinit(); | ||
| 124 | const writer = out_comp.writer(); | ||
| 125 | |||
| 126 | const endian = builtin.cpu.arch.endian(); | ||
| 127 | try writer.writeInt(u16, @intCast(stage1.items.len), endian); | ||
| 128 | for (stage1.items) |i| try writer.writeInt(u16, i, endian); | ||
| 129 | |||
| 130 | try writer.writeInt(u16, @intCast(stage2.items.len), endian); | ||
| 131 | for (stage2.items) |i| try writer.writeInt(u8, i, endian); | ||
| 132 | |||
| 133 | try out_comp.flush(); | ||
| 134 | } | ||
diff --git a/src/HangulData.zig b/src/HangulData.zig new file mode 100644 index 0000000..4d80c99 --- /dev/null +++ b/src/HangulData.zig | |||
| @@ -0,0 +1,52 @@ | |||
| 1 | const std = @import("std"); | ||
| 2 | const builtin = @import("builtin"); | ||
| 3 | const compress = std.compress; | ||
| 4 | const mem = std.mem; | ||
| 5 | const testing = std.testing; | ||
| 6 | |||
| 7 | pub const Syllable = enum { | ||
| 8 | none, | ||
| 9 | L, | ||
| 10 | LV, | ||
| 11 | LVT, | ||
| 12 | V, | ||
| 13 | T, | ||
| 14 | }; | ||
| 15 | |||
| 16 | allocator: mem.Allocator, | ||
| 17 | s1: []u16 = undefined, | ||
| 18 | s2: []Syllable = undefined, | ||
| 19 | |||
| 20 | const Self = @This(); | ||
| 21 | |||
| 22 | pub fn init(allocator: mem.Allocator) !Self { | ||
| 23 | const decompressor = compress.deflate.decompressor; | ||
| 24 | const in_bytes = @embedFile("hangul"); | ||
| 25 | var in_fbs = std.io.fixedBufferStream(in_bytes); | ||
| 26 | var in_decomp = try decompressor(allocator, in_fbs.reader(), null); | ||
| 27 | defer in_decomp.deinit(); | ||
| 28 | var reader = in_decomp.reader(); | ||
| 29 | |||
| 30 | const endian = builtin.cpu.arch.endian(); | ||
| 31 | var self = Self{ .allocator = allocator }; | ||
| 32 | |||
| 33 | const stage_1_len: u16 = try reader.readInt(u16, endian); | ||
| 34 | self.s1 = try allocator.alloc(u16, stage_1_len); | ||
| 35 | for (0..stage_1_len) |i| self.s1[i] = try reader.readInt(u16, endian); | ||
| 36 | |||
| 37 | const stage_2_len: u16 = try reader.readInt(u16, endian); | ||
| 38 | self.s2 = try allocator.alloc(Syllable, stage_2_len); | ||
| 39 | for (0..stage_2_len) |i| self.s2[i] = @enumFromInt(try reader.readInt(u8, endian)); | ||
| 40 | |||
| 41 | return self; | ||
| 42 | } | ||
| 43 | |||
| 44 | pub fn deinit(self: *Self) void { | ||
| 45 | self.allocator.free(self.s1); | ||
| 46 | self.allocator.free(self.s2); | ||
| 47 | } | ||
| 48 | |||
| 49 | /// Returns the Hangul syllable type for `cp`. | ||
| 50 | pub inline fn syllable(self: Self, cp: u21) Syllable { | ||
| 51 | return self.s2[self.s1[cp >> 8] + (cp & 0xff)]; | ||
| 52 | } | ||
diff --git a/src/NormData.zig b/src/NormData.zig index 83110f0..8923382 100644 --- a/src/NormData.zig +++ b/src/NormData.zig | |||
| @@ -4,10 +4,12 @@ const mem = std.mem; | |||
| 4 | const CanonData = @import("CanonData"); | 4 | const CanonData = @import("CanonData"); |
| 5 | const CccData = @import("CombiningData"); | 5 | const CccData = @import("CombiningData"); |
| 6 | const CompatData = @import("CompatData"); | 6 | const CompatData = @import("CompatData"); |
| 7 | const HangulData = @import("HangulData"); | ||
| 7 | 8 | ||
| 8 | canon_data: CanonData, | 9 | canon_data: CanonData, |
| 9 | ccc_data: CccData, | 10 | ccc_data: CccData, |
| 10 | compat_data: CompatData, | 11 | compat_data: CompatData, |
| 12 | hangul_data: HangulData, | ||
| 11 | 13 | ||
| 12 | const Self = @This(); | 14 | const Self = @This(); |
| 13 | 15 | ||
| @@ -16,6 +18,7 @@ pub fn init(allocator: std.mem.Allocator) !Self { | |||
| 16 | .canon_data = try CanonData.init(allocator), | 18 | .canon_data = try CanonData.init(allocator), |
| 17 | .ccc_data = try CccData.init(allocator), | 19 | .ccc_data = try CccData.init(allocator), |
| 18 | .compat_data = try CompatData.init(allocator), | 20 | .compat_data = try CompatData.init(allocator), |
| 21 | .hangul_data = try HangulData.init(allocator), | ||
| 19 | }; | 22 | }; |
| 20 | } | 23 | } |
| 21 | 24 | ||
| @@ -23,4 +26,5 @@ pub fn deinit(self: *Self) void { | |||
| 23 | self.canon_data.deinit(); | 26 | self.canon_data.deinit(); |
| 24 | self.ccc_data.deinit(); | 27 | self.ccc_data.deinit(); |
| 25 | self.compat_data.deinit(); | 28 | self.compat_data.deinit(); |
| 29 | self.hangul_data.deinit(); | ||
| 26 | } | 30 | } |
diff --git a/src/Normalizer.zig b/src/Normalizer.zig index 1434043..0670cae 100644 --- a/src/Normalizer.zig +++ b/src/Normalizer.zig | |||
| @@ -7,7 +7,6 @@ const testing = std.testing; | |||
| 7 | 7 | ||
| 8 | const CodePointIterator = @import("code_point").Iterator; | 8 | const CodePointIterator = @import("code_point").Iterator; |
| 9 | const case_fold_map = @import("ziglyph").case_folding; | 9 | const case_fold_map = @import("ziglyph").case_folding; |
| 10 | const hangul_map = @import("ziglyph").hangul; | ||
| 11 | const norm_props = @import("ziglyph").normalization_props; | 10 | const norm_props = @import("ziglyph").normalization_props; |
| 12 | 11 | ||
| 13 | pub const NormData = @import("NormData"); | 12 | pub const NormData = @import("NormData"); |
| @@ -17,9 +16,9 @@ norm_data: *NormData, | |||
| 17 | const Self = @This(); | 16 | const Self = @This(); |
| 18 | 17 | ||
| 19 | // Hangul processing utilities. | 18 | // Hangul processing utilities. |
| 20 | fn isHangulPrecomposed(cp: u21) bool { | 19 | fn isHangulPrecomposed(self: Self, cp: u21) bool { |
| 21 | if (hangul_map.syllableType(cp)) |kind| return kind == .LV or kind == .LVT; | 20 | const kind = self.norm_data.hangul_data.syllable(cp); |
| 22 | return false; | 21 | return kind == .LV or kind == .LVT; |
| 23 | } | 22 | } |
| 24 | 23 | ||
| 25 | const SBase: u21 = 0xAC00; | 24 | const SBase: u21 = 0xAC00; |
| @@ -117,7 +116,7 @@ pub fn decompose(self: Self, cp: u21, form: Form) Decomp { | |||
| 117 | } | 116 | } |
| 118 | 117 | ||
| 119 | // Hangul precomposed syllable full decomposition. | 118 | // Hangul precomposed syllable full decomposition. |
| 120 | if (isHangulPrecomposed(cp)) { | 119 | if (self.isHangulPrecomposed(cp)) { |
| 121 | const cps = decomposeHangul(cp); | 120 | const cps = decomposeHangul(cp); |
| 122 | @memcpy(dc.cps[0..cps.len], &cps); | 121 | @memcpy(dc.cps[0..cps.len], &cps); |
| 123 | return dc; | 122 | return dc; |
| @@ -335,12 +334,12 @@ test "nfkd !ASCII / alloc" { | |||
| 335 | 334 | ||
| 336 | // Composition utilities. | 335 | // Composition utilities. |
| 337 | 336 | ||
| 338 | fn isHangul(cp: u21) bool { | 337 | fn isHangul(self: Self, cp: u21) bool { |
| 339 | return cp >= 0x1100 and hangul_map.syllableType(cp) != null; | 338 | return cp >= 0x1100 and self.norm_data.hangul_data.syllable(cp) != .none; |
| 340 | } | 339 | } |
| 341 | 340 | ||
| 342 | fn isNonHangulStarter(self: Self, cp: u21) bool { | 341 | fn isNonHangulStarter(self: Self, cp: u21) bool { |
| 343 | return !isHangul(cp) and self.norm_data.ccc_data.isStarter(cp); | 342 | return !self.isHangul(cp) and self.norm_data.ccc_data.isStarter(cp); |
| 344 | } | 343 | } |
| 345 | 344 | ||
| 346 | /// Normalizes `str` to NFC. | 345 | /// Normalizes `str` to NFC. |
| @@ -395,7 +394,7 @@ fn nfxc(self: Self, allocator: std.mem.Allocator, str: []const u8, form: Form) ! | |||
| 395 | for (d_list.items[(j + 1)..i]) |B| { | 394 | for (d_list.items[(j + 1)..i]) |B| { |
| 396 | const cc_B = self.norm_data.ccc_data.ccc(B); | 395 | const cc_B = self.norm_data.ccc_data.ccc(B); |
| 397 | // Check for blocking conditions. | 396 | // Check for blocking conditions. |
| 398 | if (isHangul(C)) { | 397 | if (self.isHangul(C)) { |
| 399 | if (cc_B != 0 or self.isNonHangulStarter(B)) continue :block_check; | 398 | if (cc_B != 0 or self.isNonHangulStarter(B)) continue :block_check; |
| 400 | } | 399 | } |
| 401 | if (cc_B >= cc_C) continue :block_check; | 400 | if (cc_B >= cc_C) continue :block_check; |
| @@ -414,9 +413,9 @@ fn nfxc(self: Self, allocator: std.mem.Allocator, str: []const u8, form: Form) ! | |||
| 414 | const L = d_list.items[sidx]; | 413 | const L = d_list.items[sidx]; |
| 415 | var processed_hangul = false; | 414 | var processed_hangul = false; |
| 416 | 415 | ||
| 417 | if (isHangul(L) and isHangul(C)) { | 416 | if (self.isHangul(L) and self.isHangul(C)) { |
| 418 | const l_stype = hangul_map.syllableType(L).?; | 417 | const l_stype = self.norm_data.hangul_data.syllable(L); |
| 419 | const c_stype = hangul_map.syllableType(C).?; | 418 | const c_stype = self.norm_data.hangul_data.syllable(C); |
| 420 | 419 | ||
| 421 | if (l_stype == .LV and c_stype == .T) { | 420 | if (l_stype == .LV and c_stype == .T) { |
| 422 | // LV, T | 421 | // LV, T |