summaryrefslogtreecommitdiff
path: root/src/FoldData.zig
diff options
context:
space:
mode:
authorGravatar Sam Atman2025-04-30 12:58:26 -0400
committerGravatar Sam Atman2025-04-30 13:01:37 -0400
commit3c2c30bfbe861c6c48acd8d7507886787197a788 (patch)
tree875ba35c1954b201207452b18a189ebd70c0b596 /src/FoldData.zig
parentgrapheme now Graphemes, Data files gone (diff)
downloadzg-3c2c30bfbe861c6c48acd8d7507886787197a788.tar.gz
zg-3c2c30bfbe861c6c48acd8d7507886787197a788.tar.xz
zg-3c2c30bfbe861c6c48acd8d7507886787197a788.zip
Merge NormData with Normalize
Diffstat (limited to 'src/FoldData.zig')
-rw-r--r--src/FoldData.zig78
1 files changed, 39 insertions, 39 deletions
diff --git a/src/FoldData.zig b/src/FoldData.zig
index e44e714..b7fdceb 100644
--- a/src/FoldData.zig
+++ b/src/FoldData.zig
@@ -12,9 +12,9 @@ stage1: []u8 = undefined,
12stage2: []u8 = undefined, 12stage2: []u8 = undefined,
13stage3: []i24 = undefined, 13stage3: []i24 = undefined,
14 14
15const Self = @This(); 15const FoldData = @This();
16 16
17pub fn init(allocator: mem.Allocator) !Self { 17pub fn init(allocator: mem.Allocator) !FoldData {
18 const decompressor = compress.flate.inflate.decompressor; 18 const decompressor = compress.flate.inflate.decompressor;
19 const in_bytes = @embedFile("fold"); 19 const in_bytes = @embedFile("fold");
20 var in_fbs = std.io.fixedBufferStream(in_bytes); 20 var in_fbs = std.io.fixedBufferStream(in_bytes);
@@ -23,61 +23,61 @@ pub fn init(allocator: mem.Allocator) !Self {
23 23
24 const endian = builtin.cpu.arch.endian(); 24 const endian = builtin.cpu.arch.endian();
25 25
26 var self = Self{}; 26 var fdata = FoldData{};
27 self.cutoff = @intCast(try reader.readInt(u24, endian)); 27 fdata.cutoff = @intCast(try reader.readInt(u24, endian));
28 self.multiple_start = @intCast(try reader.readInt(u24, endian)); 28 fdata.multiple_start = @intCast(try reader.readInt(u24, endian));
29 29
30 var len = try reader.readInt(u16, endian); 30 var len = try reader.readInt(u16, endian);
31 self.stage1 = try allocator.alloc(u8, len); 31 fdata.stage1 = try allocator.alloc(u8, len);
32 errdefer allocator.free(self.stage1); 32 errdefer allocator.free(fdata.stage1);
33 for (0..len) |i| self.stage1[i] = try reader.readInt(u8, endian); 33 for (0..len) |i| fdata.stage1[i] = try reader.readInt(u8, endian);
34 34
35 len = try reader.readInt(u16, endian); 35 len = try reader.readInt(u16, endian);
36 self.stage2 = try allocator.alloc(u8, len); 36 fdata.stage2 = try allocator.alloc(u8, len);
37 errdefer allocator.free(self.stage2); 37 errdefer allocator.free(fdata.stage2);
38 for (0..len) |i| self.stage2[i] = try reader.readInt(u8, endian); 38 for (0..len) |i| fdata.stage2[i] = try reader.readInt(u8, endian);
39 39
40 len = try reader.readInt(u16, endian); 40 len = try reader.readInt(u16, endian);
41 self.stage3 = try allocator.alloc(i24, len); 41 fdata.stage3 = try allocator.alloc(i24, len);
42 errdefer allocator.free(self.stage3); 42 errdefer allocator.free(fdata.stage3);
43 for (0..len) |i| self.stage3[i] = try reader.readInt(i24, endian); 43 for (0..len) |i| fdata.stage3[i] = try reader.readInt(i24, endian);
44 44
45 self.cwcf_exceptions_min = @intCast(try reader.readInt(u24, endian)); 45 fdata.cwcf_exceptions_min = @intCast(try reader.readInt(u24, endian));
46 self.cwcf_exceptions_max = @intCast(try reader.readInt(u24, endian)); 46 fdata.cwcf_exceptions_max = @intCast(try reader.readInt(u24, endian));
47 len = try reader.readInt(u16, endian); 47 len = try reader.readInt(u16, endian);
48 self.cwcf_exceptions = try allocator.alloc(u21, len); 48 fdata.cwcf_exceptions = try allocator.alloc(u21, len);
49 errdefer allocator.free(self.cwcf_exceptions); 49 errdefer allocator.free(fdata.cwcf_exceptions);
50 for (0..len) |i| self.cwcf_exceptions[i] = @intCast(try reader.readInt(u24, endian)); 50 for (0..len) |i| fdata.cwcf_exceptions[i] = @intCast(try reader.readInt(u24, endian));
51 51
52 return self; 52 return fdata;
53} 53}
54 54
55pub fn deinit(self: *const Self, allocator: mem.Allocator) void { 55pub fn deinit(fdata: *const FoldData, allocator: mem.Allocator) void {
56 allocator.free(self.stage1); 56 allocator.free(fdata.stage1);
57 allocator.free(self.stage2); 57 allocator.free(fdata.stage2);
58 allocator.free(self.stage3); 58 allocator.free(fdata.stage3);
59 allocator.free(self.cwcf_exceptions); 59 allocator.free(fdata.cwcf_exceptions);
60} 60}
61 61
62/// Returns the case fold for `cp`. 62/// Returns the case fold for `cp`.
63pub fn caseFold(self: Self, cp: u21, buf: []u21) []const u21 { 63pub fn caseFold(fdata: *const FoldData, cp: u21, buf: []u21) []const u21 {
64 if (cp >= self.cutoff) return &.{}; 64 if (cp >= fdata.cutoff) return &.{};
65 65
66 const stage1_val = self.stage1[cp >> 8]; 66 const stage1_val = fdata.stage1[cp >> 8];
67 if (stage1_val == 0) return &.{}; 67 if (stage1_val == 0) return &.{};
68 68
69 const stage2_index = @as(usize, stage1_val) * 256 + (cp & 0xFF); 69 const stage2_index = @as(usize, stage1_val) * 256 + (cp & 0xFF);
70 const stage3_index = self.stage2[stage2_index]; 70 const stage3_index = fdata.stage2[stage2_index];
71 71
72 if (stage3_index & 0x80 != 0) { 72 if (stage3_index & 0x80 != 0) {
73 const real_index = @as(usize, self.multiple_start) + (stage3_index ^ 0x80) * 3; 73 const real_index = @as(usize, fdata.multiple_start) + (stage3_index ^ 0x80) * 3;
74 const mapping = mem.sliceTo(self.stage3[real_index..][0..3], 0); 74 const mapping = mem.sliceTo(fdata.stage3[real_index..][0..3], 0);
75 for (mapping, 0..) |c, i| buf[i] = @intCast(c); 75 for (mapping, 0..) |c, i| buf[i] = @intCast(c);
76 76
77 return buf[0..mapping.len]; 77 return buf[0..mapping.len];
78 } 78 }
79 79
80 const offset = self.stage3[stage3_index]; 80 const offset = fdata.stage3[stage3_index];
81 if (offset == 0) return &.{}; 81 if (offset == 0) return &.{};
82 82
83 buf[0] = @intCast(@as(i32, cp) + offset); 83 buf[0] = @intCast(@as(i32, cp) + offset);
@@ -86,14 +86,14 @@ pub fn caseFold(self: Self, cp: u21, buf: []u21) []const u21 {
86} 86}
87 87
88/// Returns true when caseFold(NFD(`cp`)) != NFD(`cp`). 88/// Returns true when caseFold(NFD(`cp`)) != NFD(`cp`).
89pub fn changesWhenCaseFolded(self: Self, cp: u21) bool { 89pub fn changesWhenCaseFolded(fdata: *const FoldData, cp: u21) bool {
90 var buf: [3]u21 = undefined; 90 var buf: [3]u21 = undefined;
91 const has_mapping = self.caseFold(cp, &buf).len != 0; 91 const has_mapping = fdata.caseFold(cp, &buf).len != 0;
92 return has_mapping and !self.isCwcfException(cp); 92 return has_mapping and !fdata.isCwcfException(cp);
93} 93}
94 94
95fn isCwcfException(self: Self, cp: u21) bool { 95fn isCwcfException(fdata: *const FoldData, cp: u21) bool {
96 return cp >= self.cwcf_exceptions_min and 96 return cp >= fdata.cwcf_exceptions_min and
97 cp <= self.cwcf_exceptions_max and 97 cp <= fdata.cwcf_exceptions_max and
98 std.mem.indexOfScalar(u21, self.cwcf_exceptions, cp) != null; 98 std.mem.indexOfScalar(u21, fdata.cwcf_exceptions, cp) != null;
99} 99}