1use super::internal::{at::*, *};
2use super::{buffer::*, feature::*, Direction};
3use crate::text::Script;
4
5use alloc::vec::Vec;
6use core::ops::Range;
7
8pub type FeatureBit = u16;
9
10#[derive(Copy, Clone, Default)]
11pub struct FeatureMask {
12 bits: [u64; 4],
13}
14
15impl FeatureMask {
16 pub fn is_empty(&self) -> bool {
17 self.bits.iter().all(|word| *word == 0)
18 }
19
20 pub fn set(&mut self, bit: u16) {
21 let word = bit as usize / 64;
22 let mask = 1 << (bit as u64 & 63);
23 self.bits[word] |= mask;
24 }
25
26 pub fn clear(&mut self, bit: u16) {
27 let word = bit as usize / 64;
28 let mask = 1 << (bit as u64 & 63);
29 self.bits[word] &= !mask;
30 }
31
32 pub fn test(&self, bit: u16) -> bool {
33 let word = bit as usize / 64;
34 let mask = 1 << (bit as u64 & 63);
35 self.bits[word] & mask != 0
36 }
37}
38
39impl core::ops::BitOr for FeatureMask {
40 type Output = Self;
41
42 fn bitor(self, other: Self) -> Self {
43 let mut result = FeatureMask::default();
44 for ((r, a), b) in result.bits.iter_mut().zip(&self.bits).zip(&other.bits) {
45 *r = *a | *b;
46 }
47 result
48 }
49}
50
51impl core::ops::BitOrAssign for FeatureMask {
52 fn bitor_assign(&mut self, other: Self) {
53 for (a, b) in self.bits.iter_mut().zip(&other.bits) {
54 *a |= *b;
55 }
56 }
57}
58
59#[derive(Copy, Clone, Default)]
61pub struct FeatureGroups {
62 pub default: FeatureMask,
63 pub reph: Option<FeatureBit>,
64 pub pref: Option<FeatureBit>,
65 pub stage1: FeatureMask,
66 pub stage2: FeatureMask,
67 pub basic: FeatureMask,
68 pub position: FeatureMask,
69 pub vert: FeatureMask,
70 pub rtl: FeatureMask,
71}
72
73impl From<Option<FeatureBit>> for FeatureMask {
74 fn from(bit: Option<u16>) -> Self {
75 bit.map(|bit| {
76 let mut mask = FeatureMask::default();
77 mask.set(bit);
78 mask
79 })
80 .unwrap_or_default()
81 }
82}
83
84#[derive(Copy, Clone, Default)]
86pub struct StageOffsets {
87 pub base: u32,
88 pub lang: u32,
89 pub var: u32,
90}
91
92impl StageOffsets {
93 pub fn new(
94 b: &Bytes,
95 base: u32,
96 script: RawTag,
97 lang: Option<RawTag>,
98 ) -> Option<(Self, [RawTag; 2])> {
99 let (lang, tags) = language_or_default_by_tags(b, base, script, lang)?;
100 let var = feature_var_offset(b, base);
101 Some((Self { base, lang, var }, tags))
102 }
103}
104
105const MAX_CACHED_FEATURES: usize = 256;
107
108const MAX_NESTED_LOOKUPS: usize = 4;
109const MAX_SEQUENCE: usize = 32;
110
111#[derive(Clone, Default)]
114pub struct FeatureStore {
115 pub sub_features: Vec<(RawTag, FeatureBit)>,
116 pub pos_features: Vec<(RawTag, FeatureBit)>,
117 pub lookups: Vec<LookupData>,
118 pub subtables: Vec<SubtableData>,
119 pub coverage: Vec<u16>,
120 pub pos_start: usize,
121 pub sub_count: usize,
122 pub truncated: bool,
123 pub groups: FeatureGroups,
124}
125
126impl FeatureStore {
127 pub fn clear(&mut self) {
128 self.sub_features.clear();
129 self.pos_features.clear();
130 self.lookups.clear();
131 self.subtables.clear();
132 self.coverage.clear();
133 self.pos_start = 0;
134 self.sub_count = 0;
135 self.truncated = false;
136 self.groups = FeatureGroups::default();
137 }
138
139 pub fn sub_bit(&self, feature: RawTag) -> Option<FeatureBit> {
140 match self.sub_features.binary_search_by(|x| x.0.cmp(&feature)) {
141 Ok(index) => Some(self.sub_features[index].1),
142 _ => None,
143 }
144 }
145
146 pub fn pos_bit(&self, feature: RawTag) -> Option<FeatureBit> {
147 match self.pos_features.binary_search_by(|x| x.0.cmp(&feature)) {
148 Ok(index) => Some(self.pos_features[index].1),
149 _ => None,
150 }
151 }
152
153 pub fn custom_masks(
156 &self,
157 features: &[(RawTag, u16)],
158 sub_args: &mut Vec<u16>,
159 pos_args: &mut Vec<u16>,
160 dir: Direction,
161 ) -> (FeatureMask, FeatureMask) {
162 let sub_count = self.sub_features.len();
163 sub_args.clear();
164 sub_args.resize(sub_count, 0);
165 let pos_count = self.pos_features.len();
166 pos_args.clear();
167 pos_args.resize(pos_count, 0);
168 let mut sub = self.groups.basic;
169 if dir == Direction::RightToLeft {
170 sub |= self.groups.rtl;
171 }
172 let sub = Self::custom_masks_for_stage(
173 &self.sub_features,
174 features,
175 self.groups.basic,
176 sub_args.as_mut_slice(),
177 );
178 let pos = Self::custom_masks_for_stage(
179 &self.pos_features,
180 features,
181 self.groups.position,
182 pos_args.as_mut_slice(),
183 );
184 (sub, pos)
185 }
186
187 fn custom_masks_for_stage(
188 stage_features: &[(RawTag, FeatureBit)],
189 requested_features: &[(RawTag, u16)],
190 mut mask: FeatureMask,
191 args: &mut [u16],
192 ) -> FeatureMask {
193 for req_feature in requested_features {
194 if let Ok(index) = stage_features.binary_search_by(|x| x.0.cmp(&req_feature.0)) {
195 let stage_feature = stage_features[index];
196 let bit_ix = stage_feature.1;
197 let arg = req_feature.1;
198 args[bit_ix as usize] = arg;
199 if arg != 0 {
200 mask.set(bit_ix);
201 } else {
202 mask.clear(bit_ix);
203 }
204 }
205 }
206 mask
207 }
208
209 pub fn groups(&self, script: Script) -> FeatureGroups {
210 let mut g = FeatureGroups::default();
211 feature_masks(self, Some(&mut g.vert), Some(&mut g.position), &[VRT2]);
212 feature_masks(self, Some(&mut g.rtl), Some(&mut g.position), &[RTLM]);
213 if g.vert.is_empty() {
214 feature_masks(self, Some(&mut g.vert), Some(&mut g.position), &[VERT]);
215 }
216 if script.is_complex() {
217 match script {
218 Script::Myanmar => {
219 feature_masks(
220 self,
221 Some(&mut g.default),
222 Some(&mut g.position),
223 &[CALT, CCMP, LOCL, RVRN],
224 );
225 g.reph = self.sub_bit(RPHF);
226 g.pref = self.sub_bit(PREF);
227 feature_masks(
228 self,
229 Some(&mut g.stage1),
230 Some(&mut g.position),
231 &[BLWF, PSTF],
232 );
233 feature_masks(
234 self,
235 Some(&mut g.stage2),
236 Some(&mut g.position),
237 &[PRES, ABVS, BLWS, PSTS],
238 );
239 feature_masks(
240 self,
241 Some(&mut g.basic),
242 Some(&mut g.position),
243 &[DIST, KERN, MARK, MKMK],
244 );
245 }
246 _ => {
247 feature_masks(
248 self,
249 Some(&mut g.default),
250 Some(&mut g.position),
251 &[AKHN, CALT, CCMP, LOCL, NUKT, RVRN],
252 );
253 g.reph = self.sub_bit(RPHF);
254 g.pref = self.sub_bit(PREF);
255 feature_masks(
256 self,
257 Some(&mut g.stage1),
258 Some(&mut g.position),
259 &[ABVF, BLWF, CJCT, HALF, PSTF, RKRF, VATU],
260 );
261 if script.is_joined() {
262 feature_masks(
263 self,
264 Some(&mut g.stage2),
265 Some(&mut g.position),
266 &[FIN2, FIN3, FINA, INIT, ISOL, MED2, MEDI],
267 );
268 }
269 feature_masks(
270 self,
271 Some(&mut g.basic),
272 Some(&mut g.position),
273 &[ABVS, BLWS, CALT, CLIG, HALN, LIGA, PRES, PSTS, RCLT, RLIG],
274 );
275 feature_masks(
276 self,
277 Some(&mut g.basic),
278 Some(&mut g.position),
279 &[ABVM, BLWM, CURS, DIST, KERN, MARK, MKMK],
280 );
281 }
282 }
283 } else {
284 match script {
285 Script::Hangul => {
286 feature_masks(
287 self,
288 Some(&mut g.basic),
289 Some(&mut g.position),
290 &[CCMP, LJMO, RVRN, TJMO, VJMO],
291 );
292 }
293 _ => {
294 if script.is_joined() {
295 feature_masks(
296 self,
297 Some(&mut g.basic),
298 Some(&mut g.position),
299 &[
300 CALT, CCMP, CLIG, FIN2, FIN3, FINA, INIT, ISOL, LIGA, LOCL, MED2,
301 MEDI, MSET, RLIG, RVRN,
302 ],
303 );
304 } else {
305 feature_masks(
306 self,
307 Some(&mut g.basic),
308 Some(&mut g.position),
309 &[CALT, CCMP, CLIG, LIGA, LOCL, RVRN],
310 );
311 }
312 feature_masks(
313 self,
314 Some(&mut g.basic),
315 Some(&mut g.position),
316 &[CURS, DIST, KERN, MARK, MKMK],
317 );
318 }
319 }
320 }
321 g
322 }
323
324 fn test(&self, key: u32, glyph_id: u16) -> bool {
325 if key == !0 {
326 return true;
327 }
328 let cache = &self.coverage;
329 let base = key as usize;
330 let first = cache[base];
331 if glyph_id >= first && glyph_id <= cache[base + 1] {
332 let bit = glyph_id - first;
333 let idx = base + 2 + bit as usize / 16;
334 cache[idx] & (1 << (bit & 15)) != 0
335 } else {
336 false
337 }
338 }
339
340 }
345
346fn feature_masks(
347 store: &FeatureStore,
348 sub_mask: Option<&mut FeatureMask>,
349 pos_mask: Option<&mut FeatureMask>,
350 features: &[RawTag],
351) {
352 if let Some(sub_mask) = sub_mask {
353 for feature in features {
354 if let Some(bit) = store.sub_bit(*feature) {
355 sub_mask.set(bit);
356 }
357 }
358 }
359 if let Some(pos_mask) = pos_mask {
360 for feature in features {
361 if let Some(bit) = store.pos_bit(*feature) {
362 pos_mask.set(bit);
363 }
364 }
365 }
366}
367
368#[derive(Default)]
370pub struct FeatureStoreBuilder {
371 indices: Vec<(u16, FeatureBit, u8)>,
372 coverage: CoverageBuilder,
373 next_bit: FeatureBit,
374}
375
376impl FeatureStoreBuilder {
377 pub fn build(
378 &mut self,
379 cache: &mut FeatureStore,
380 data: &[u8],
381 coords: &[i16],
382 gdef: &Gdef,
383 gsub: &StageOffsets,
384 gpos: &StageOffsets,
385 ) {
386 let b = Bytes::new(data);
387 cache.clear();
388 if gsub.base != 0 {
389 self.build_stage(cache, &b, coords, gdef, gsub, 0);
390 cache.sub_features.sort_unstable_by(|a, b| a.0.cmp(&b.0));
391 }
392 cache.sub_count = cache.sub_features.len();
393 cache.pos_start = cache.lookups.len();
394 if gpos.base != 0 {
395 self.build_stage(cache, &b, coords, gdef, gpos, 1);
396 cache.pos_features.sort_unstable_by(|a, b| a.0.cmp(&b.0));
397 }
398 }
399
400 fn build_stage(
401 &mut self,
402 cache: &mut FeatureStore,
403 b: &Bytes,
404 coords: &[i16],
405 gdef: &Gdef,
406 offsets: &StageOffsets,
407 stage: u8,
408 ) -> Option<()> {
409 self.next_bit = 0;
410 self.indices.clear();
411 let gdef = if gdef.ok() { Some(gdef) } else { None };
412 let base = offsets.base;
413 let lbase = offsets.lang as usize;
414 let list_base = b.read_u16(base as usize + 8)? as u32 + base;
415 let vars = FeatureSubsts::new(b, offsets.var, coords);
416 let fbase = b.read_u16(base as usize + 6)? as usize + base as usize;
417 let count = b.read_u16(lbase + 4)? as usize;
418 let actual_count = count.min(MAX_CACHED_FEATURES);
419 if actual_count < count {
420 cache.truncated = true;
421 }
422 let features = if stage == 0 {
423 &mut cache.sub_features
424 } else {
425 &mut cache.pos_features
426 };
427 for i in 0..actual_count {
428 let findex = b.read_u16(lbase + 6 + i * 2)? as usize;
429 let rec = fbase + 2 + findex * 6;
430 let ftag = b.read_u32(rec)?;
431 let fbit = self.next_bit;
432 self.next_bit += 1;
433 let mask = if stage == 0 {
434 match ftag {
435 ISOL => ISOL_MASK,
437 INIT => INIT_MASK,
438 MEDI => MEDI_MASK,
439 FINA => FINA_MASK,
440 MED2 => MED2_MASK,
441 FIN2 => FIN2_MASK,
442 FIN3 => FIN3_MASK,
443 LJMO => LJMO_MASK,
445 VJMO => VJMO_MASK,
446 TJMO => TJMO_MASK,
447 _ => 0,
448 }
449 } else {
450 0
451 };
452 features.push((ftag, fbit));
453 let foffset = if let Some(v) = vars {
454 if let Some(offset) = v.apply(b, findex as u16) {
455 offset
456 } else {
457 fbase + b.read::<u16>(rec + 4)? as usize
458 }
459 } else {
460 fbase + b.read::<u16>(rec + 4)? as usize
461 };
462 let lcount = b.read_u16(foffset + 2)? as usize;
463 for i in 0..lcount {
464 let lookup_index = b.read_u16(foffset + 4 + i * 2)?;
465 self.indices.push((lookup_index, fbit, mask));
466 }
467 }
468 self.indices.sort_unstable_by(|a, b| a.0.cmp(&b.0));
469 let mut last_index = None;
471 for (index, feature, mask) in &self.indices {
472 if last_index == Some(*index) {
473 let mut lookup = *cache.lookups.last().unwrap();
474 lookup.feature = *feature;
475 cache.lookups.push(lookup);
476 continue;
477 }
478 if let Some(ref mut lookup) = lookup_data(b, stage, list_base, *index, *mask, gdef) {
479 let start = cache.subtables.len();
480 self.coverage.begin();
481 if Self::collect_subtables(b, cache, &mut self.coverage, lookup) == Some(true) {
482 lookup.coverage = self.coverage.finish(&mut cache.coverage);
483 lookup.feature = *feature;
484 cache.lookups.push(*lookup);
485 last_index = Some(*index);
486 } else {
487 cache.subtables.truncate(start);
488 }
489 }
490 }
491 Some(())
492 }
493
494 fn collect_subtables(
495 b: &Bytes,
496 cache: &mut FeatureStore,
497 coverage: &mut CoverageBuilder,
498 lookup: &mut LookupData,
499 ) -> Option<bool> {
500 let start = cache.subtables.len();
501 if start >= u16::MAX as usize {
502 return None;
503 }
504 lookup.subtables.0 = start as u16;
505 let base = lookup.offset as usize;
506 let subtable_base = base + 6;
507 let count = lookup.count as usize;
508 let ext = lookup.is_ext;
509 let kind = lookup.kind;
510 for i in 0..count {
511 let mut subtable = base + b.read::<u16>(subtable_base + i * 2)? as usize;
512 if ext {
513 subtable = subtable + b.read::<u32>(subtable + 4)? as usize;
514 }
515 let fmt = b.read::<u16>(subtable)?;
516 if let Some(ref s) = subtable_data(b, subtable as u32, kind, fmt) {
517 coverage.add_coverage(b, s.offset as usize + s.coverage as usize)?;
518 cache.subtables.push(*s);
519 }
520 }
521 let end = cache.subtables.len();
522 if end >= u16::MAX as usize {
523 return None;
524 }
525 lookup.subtables.1 = end as u16;
526 Some(lookup.subtables.1 > lookup.subtables.0)
527 }
528}
529
530#[derive(Default)]
531struct CoverageBuilder {
532 coverage: BitSet,
533 min: u16,
534 max: u16,
535}
536
537impl CoverageBuilder {
538 fn begin(&mut self) {
539 self.coverage.clear();
540 self.min = u16::MAX;
541 self.max = 0;
542 }
543
544 fn add_coverage(&mut self, b: &Bytes, base: usize) -> Option<()> {
545 let fmt = b.read::<u16>(base)?;
546 let len = b.read::<u16>(base + 2)? as usize;
547 let arr = base + 4;
548 if fmt == 1 {
549 for g in b.read_array::<u16>(arr, len)?.iter() {
550 self.add(g);
551 }
552 } else if fmt == 2 {
553 for i in 0..len {
554 let rec = arr + i * 6;
555 let first = b.read::<u16>(rec)?;
556 let last = b.read::<u16>(rec + 2)?;
557 for g in first..=last {
558 self.add(g);
559 }
560 }
561 } else {
562 return None;
563 }
564 Some(())
565 }
566
567 fn finish(&self, coverage: &mut Vec<u16>) -> u32 {
568 let key = coverage.len() as u32;
569 coverage.push(self.min);
570 coverage.push(self.max);
571 let bit_base = coverage.len();
572 let range_len = (self.max - self.min) as usize + 1;
573 coverage.resize(coverage.len() + (range_len + 15) / 16, 0);
574 for g in &self.coverage.list {
575 let bit = g - self.min;
576 let idx = bit_base + bit as usize / 16;
577 coverage[idx] |= 1 << (bit & 15);
578 }
579 key
580 }
581
582 #[inline]
583 fn add(&mut self, glyph_id: u16) {
584 if self.coverage.insert(glyph_id) {
585 self.min = glyph_id.min(self.min);
586 self.max = glyph_id.max(self.max);
587 }
588 }
589}
590
591#[derive(Default)]
592pub struct BitSet {
593 list: Vec<u16>,
594 bits: Vec<u64>,
595}
596
597impl BitSet {
598 pub fn clear(&mut self) {
599 self.list.clear();
600 for b in &mut self.bits {
601 *b = 0;
602 }
603 }
604
605 pub fn insert(&mut self, value: u16) -> bool {
606 let value = value as usize;
607 let index = value / 64;
608 let shift = value & 63;
609 let bit = 1u64 << shift;
610 if index >= self.bits.len() {
611 self.bits.resize(index + 8, 0);
612 self.bits[index] |= bit;
613 self.list.push(value as u16);
614 true
615 } else {
616 let word_ptr = &mut self.bits[index];
617 if *word_ptr & bit != 0 {
618 false
619 } else {
620 *word_ptr |= bit;
621 self.list.push(value as u16);
622 true
623 }
624 }
625 }
626}
627
628pub fn apply(
629 stage: u8,
630 data: &Bytes,
631 gsubgpos: u32,
632 coords: &[i16],
633 gdef: &Gdef,
634 storage: &mut Storage,
635 cache: &FeatureStore,
636 feature_mask: FeatureMask,
637 buffer: &mut Buffer,
638 buffer_range: Option<Range<usize>>,
639) -> Option<bool> {
640 if gsubgpos == 0 || feature_mask.is_empty() {
641 return Some(false);
642 }
643 let buffer_range = if let Some(range) = buffer_range {
644 range
645 } else {
646 0..buffer.len()
647 };
648 let mut acx = ApplyContext::new(
649 stage,
650 data,
651 gsubgpos,
652 gdef,
653 coords,
654 cache,
655 storage,
656 buffer,
657 buffer_range.clone(),
658 );
659 let lookups = if stage == 0 {
660 &cache.lookups[..cache.pos_start]
661 } else {
662 &cache.lookups[cache.pos_start..]
663 };
664 let mut applied = false;
665 for lookup in lookups {
666 if !feature_mask.test(lookup.feature) {
667 continue;
668 }
669 let table_range = lookup.subtables.0 as usize..lookup.subtables.1 as usize;
670 let tables = cache.subtables.get(table_range)?;
671 if let Some(true) = acx.apply(lookup, tables, buffer_range.start, None, 0) {
672 applied = true;
673 }
674 }
675 Some(applied)
676}
677
678#[derive(Copy, Clone, Default)]
679struct LookupState {
680 skip_state: SkipState,
681 cur: usize,
682 end: usize,
683}
684
685struct ApplyContext<'a, 'b, 'c> {
686 stage: u8,
687 data: &'a Bytes<'a>,
688 gsubgpos: u32,
689 defs: &'a Gdef<'a>,
690 coords: &'a [i16],
691 enable_var: bool,
692 cache: &'a FeatureStore,
693 storage: &'b mut Storage,
694 top: u8,
695 arg: u16,
696 start: usize,
697 end: usize,
698 s: LookupState,
699 buf: &'c mut Buffer,
700}
701
702impl<'a, 'b, 'c> ApplyContext<'a, 'b, 'c> {
703 pub fn new(
704 stage: u8,
705 data: &'a Bytes<'a>,
706 gsubgpos: u32,
707 defs: &'a Gdef<'a>,
708 coords: &'a [i16],
709 cache: &'a FeatureStore,
710 storage: &'b mut Storage,
711 buffer: &'c mut Buffer,
712 range: Range<usize>,
713 ) -> Self {
714 Self {
715 stage,
716 data,
717 gsubgpos,
718 defs,
719 coords,
720 enable_var: defs.has_var_store() && !coords.is_empty(),
721 cache,
722 storage,
723 top: 0,
724 arg: 0,
725 start: range.start,
726 end: range.end,
727 s: LookupState::default(),
728 buf: buffer,
729 }
730 }
731
732 fn apply_skip_state(&mut self) {
733 if self.s.skip_state == self.buf.skip_state {
734 return;
735 }
736 self.buf.skip_state = self.s.skip_state;
737 self.update_glyphs_skip(None);
738 }
739
740 fn update_glyphs_skip(&mut self, range: Option<Range<usize>>) {
741 let range = range.unwrap_or(0..self.buf.glyphs.len());
742 let ss = &self.s.skip_state;
743 let mask = ss.mask;
744 if ss.mark_check != 0 {
745 if ss.mark_set != 0 {
746 for g in self.buf.glyphs[range].iter_mut() {
747 g.skip = (ss.flags & (1 << g.class) != 0) || (g.mask & mask != mask);
748 if !g.skip && g.class == 3 {
749 g.skip = self.defs.mark_set_coverage(ss.mark_set, g.id).is_none();
750 }
751 }
752 } else {
753 for g in self.buf.glyphs[range].iter_mut() {
754 g.skip = (ss.flags & (1 << g.class) != 0) || (g.mask & mask != mask);
755 if !g.skip && g.class == 3 {
756 g.skip = g.mark_type != ss.mark_class;
757 }
758 }
759 }
760 } else if mask != 0 {
761 for g in self.buf.glyphs[range].iter_mut() {
762 g.skip = (ss.flags & (1 << g.class) != 0) || (g.mask & mask != mask);
763 }
764 } else {
765 for g in self.buf.glyphs[range].iter_mut() {
766 g.skip = ss.flags & (1 << g.class) != 0;
767 }
768 }
769 }
770
771 fn update_glyphs(&mut self, start: usize, end: usize) {
772 if self.defs.has_mark_classes() {
773 for g in &mut self.buf.glyphs[start..end] {
774 let class = self.defs.class(g.id) as u8;
775 g.class = class;
776 g.mark_type = if class == 3 {
777 self.defs.mark_class(g.id) as u8
778 } else {
779 0
780 };
781 }
782 } else {
783 for g in &mut self.buf.glyphs[start..end] {
784 g.class = self.defs.class(g.id) as u8;
785 }
786 }
787 self.update_glyphs_skip(Some(start..end));
788 }
789
790 fn update_glyph(&mut self, index: usize) {
791 let ss = &self.s.skip_state;
792 let mask = ss.mask;
793 let g = &mut self.buf.glyphs[index];
794 let class = self.defs.class(g.id) as u8;
795 g.class = class;
796 g.skip = (ss.flags & (1 << class) != 0) || (g.mask & mask != mask);
797 if class == 3 {
798 g.mark_type = self.defs.mark_class(g.id) as u8;
799 if ss.mark_check != 0 && !g.skip {
800 if ss.mark_set != 0 {
801 g.skip = self.defs.mark_set_coverage(ss.mark_set, g.id).is_none();
802 } else {
803 g.skip = g.mark_type != ss.mark_class;
804 }
805 }
806 } else {
807 g.mark_type = 0;
808 }
809 }
810
811 #[inline(always)]
812 fn ignored(&self, index: usize) -> bool {
813 self.buf.glyphs[index].skip
814 }
815
816 fn next(&self, index: usize) -> Option<usize> {
817 ((index + 1)..self.s.end).find(|&i| !self.ignored(i))
818 }
819
820 fn previous(&self, index: usize) -> Option<usize> {
821 if index > self.start {
822 for i in (self.start..=(index - 1)).rev() {
823 if !self.ignored(i) {
824 return Some(i);
825 }
826 }
827 }
828 None
829 }
830
831 fn previous_base(&self, index: usize) -> Option<usize> {
832 if index > self.start {
833 for i in (self.start..=(index - 1)).rev() {
834 if !self.ignored(i) {
835 let class = self.buf.glyphs[i].class;
836 if class != 3 {
837 return Some(i);
838 }
839 }
840 }
841 }
842 None
843 }
844
845 fn move_first(&mut self) -> bool {
846 while self.s.cur < self.s.end {
847 if !self.buf.glyphs[self.s.cur].skip {
848 break;
849 }
850 self.s.cur += 1;
851 }
852 self.s.cur < self.s.end
853 }
854
855 fn move_last(&mut self) -> bool {
856 if self.s.end == 0 {
857 return false;
858 }
859 self.s.cur = self.s.end - 1;
860 loop {
861 if !self.ignored(self.s.cur) {
862 break;
863 }
864 if self.s.cur == 0 {
865 return false;
866 }
867 self.s.cur -= 1;
868 }
869 true
870 }
871
872 fn move_next(&mut self) -> bool {
873 self.s.cur += 1;
874 while self.s.cur < self.s.end {
875 if !self.buf.glyphs[self.s.cur].skip {
876 break;
877 }
878 self.s.cur += 1;
879 }
880 self.s.cur < self.s.end
881 }
882
883 fn _move_previous(&mut self) -> bool {
884 if self.s.cur == self.start {
885 return false;
886 }
887 for i in (self.start..=(self.s.cur - 1)).rev() {
888 if !self.ignored(i) {
889 self.s.cur = i;
890 return true;
891 }
892 }
893 false
894 }
895
896 fn move_to(&mut self, index: usize) -> bool {
897 if !self.move_first() {
898 return false;
899 }
900 for _ in 0..index {
901 if !self.move_next() {
902 return false;
903 }
904 }
905 true
906 }
907
908 fn collect_sequence(&mut self, len: usize) -> bool {
909 let mut collected = 0usize;
910 let avail = self.s.end - self.s.cur;
911 if avail < (len + 1) {
912 return false;
913 }
914 let mut i = self.s.cur + 1;
915 for g in &self.buf.glyphs[self.s.cur + 1..self.s.end] {
916 if !g.skip {
917 self.storage.indices[collected] = i;
918 self.storage.ids[collected] = g.id;
919 collected += 1;
920 if collected == len {
921 return true;
922 }
923 }
924 i += 1;
925 }
926 false
927 }
928
929 fn extend(&mut self, count: usize) {
930 self.end += count;
931 self.s.end += count;
932 self.s.cur += count;
933 for i in 0..self.top as usize {
934 self.storage.stack[i].end += count;
935 self.storage.stack[i].cur += count;
936 }
937 }
938
939 fn match_backtrack<F>(&self, start: usize, len: usize, pred: F) -> Option<bool>
940 where
941 F: Fn(usize, u16) -> bool,
942 {
943 let mut idx = start;
944 for i in 0..len {
945 idx = self.previous(idx)?;
946 if !pred(i, self.buf.glyphs[idx].id) {
947 return None;
948 }
949 }
950 Some(true)
951 }
952
953 fn match_sequence<F>(&self, start: usize, len: usize, pred: F) -> Option<usize>
954 where
955 F: Fn(usize, u16) -> bool,
956 {
957 let mut idx = start;
958 for i in 0..len {
959 idx = self.next(idx)?;
960 if !pred(i, self.buf.glyphs[idx].id) {
961 return None;
962 }
963 }
964 Some(idx)
965 }
966}
967
968impl<'a, 'b, 'c> ApplyContext<'a, 'b, 'c> {
969 #[inline(never)]
970 pub fn apply(
971 &mut self,
972 lookup: &LookupData,
973 subtables: &[SubtableData],
974 cur: usize,
975 end: Option<usize>,
976 first: usize,
977 ) -> Option<bool> {
978 let feature_index = lookup.feature as usize;
979 self.arg = if lookup.stage == 0 {
980 self.buf.sub_args[feature_index]
981 } else {
982 self.buf.pos_args[feature_index]
983 };
984 let b = self.data;
985 self.s.skip_state = SkipState {
986 flags: lookup.ignored,
987 mask: lookup.mask,
988 mark_check: lookup.mark_check,
989 mark_class: lookup.mark_class,
990 mark_set: lookup.mark_set,
991 };
992 self.s.cur = cur;
993 self.s.end = end.unwrap_or(self.end);
994 self.apply_skip_state();
995 let mut applied = false;
996 if lookup.kind == LookupKind::RevChainContext {
997 if !self.move_last() {
998 return Some(false);
999 }
1000 loop {
1001 let i = self.s.cur;
1002 let g = self.buf.glyphs.get(i)?;
1003 if !g.skip {
1004 let id = g.id;
1005 if self.cache.test(lookup.coverage, id) {
1006 for s in subtables {
1007 if let Some(index) = s.coverage(b, id) {
1008 if self.apply_subtable(b, s, index as usize, i, id) == Some(true) {
1009 applied = true;
1010 break;
1011 }
1012 }
1013 }
1014 }
1015 }
1016 if self.s.cur == 0 {
1017 break;
1018 }
1019 self.s.cur -= 1;
1020 }
1021 } else {
1022 if !self.move_to(first) {
1023 return Some(false);
1024 }
1025 while self.s.cur < self.s.end {
1026 let i = self.s.cur;
1027 let g = self.buf.glyphs.get(i)?;
1028 if !g.skip {
1029 let id = g.id;
1030 if self.cache.test(lookup.coverage, id) {
1031 for s in subtables {
1032 if let Some(index) = s.coverage(b, id) {
1033 if self.apply_subtable(b, s, index as usize, i, id) == Some(true) {
1034 applied = true;
1035 break;
1036 }
1037 }
1038 }
1039 }
1040 }
1041 self.s.cur += 1;
1042 }
1043 }
1044 Some(applied)
1045 }
1046
1047 #[inline(never)]
1048 fn apply_subtable(
1049 &mut self,
1050 b: &'a Bytes<'a>,
1051 subtable: &SubtableData,
1052 index: usize,
1053 cur: usize,
1054 g: u16,
1055 ) -> Option<bool> {
1056 use SubtableKind::*;
1057 let kind = subtable.kind;
1058 let base = subtable.offset as usize;
1059 match kind {
1069 SingleSub1 => {
1070 let delta = b.read::<i16>(base + 4)? as i32;
1071 let subst = (g as i32 + delta) as u16;
1072 self.buf.substitute(cur, subst);
1073 self.update_glyph(cur);
1074 return Some(true);
1075 }
1076 SingleSub2 => {
1077 let arr = base + 6;
1078 let subst = b.read::<u16>(arr + index * 2)?;
1079 self.buf.substitute(cur, subst);
1080 self.update_glyph(cur);
1081 return Some(true);
1082 }
1083 MultiSub1 => {
1084 let seqbase = base + b.read::<u16>(base + 6 + index * 2)? as usize;
1085 let seqlen = b.read::<u16>(seqbase)? as usize;
1086 if seqlen > MAX_SEQUENCE {
1087 return Some(false);
1088 }
1089 let seqarr = seqbase + 2;
1090 for i in 0..seqlen {
1091 let subst = b.read::<u16>(seqarr + i * 2)?;
1092 self.storage.ids[i] = subst;
1093 }
1094 self.buf
1095 .substitute_multiple(cur, &self.storage.ids[0..seqlen]);
1096 self.update_glyphs(cur, cur + seqlen);
1097 self.extend(seqlen - 1);
1098 return Some(true);
1099 }
1100 AltSub1 => {
1101 let offset = b.read::<u16>(base + 6 + index * 2)? as usize;
1102 if offset == 0 {
1103 return Some(false);
1104 }
1105 let arg = self.arg as usize;
1106 let setbase = base + offset;
1107 let count = b.read::<u16>(setbase)? as usize;
1108 if arg >= count {
1109 return Some(false);
1110 }
1111 let subst = b.read::<u16>(setbase + 2 + arg * 2)?;
1112 self.buf.substitute(cur, subst);
1113 self.update_glyph(cur);
1114 return Some(true);
1115 }
1116 LigSub1 => {
1117 let setbase = base + b.read::<u16>(base + 6 + index * 2)? as usize;
1118 let ligcount = b.read::<u16>(setbase)? as usize;
1119 let mut seqlen = 0usize;
1120 for i in 0..ligcount {
1121 let ligbase = setbase + b.read::<u16>(setbase + 2 + i * 2)? as usize;
1122 let mut compcount = b.read::<u16>(ligbase + 2)? as usize;
1123 if compcount == 0 {
1124 continue;
1125 }
1126 compcount -= 1;
1127 if compcount >= MAX_SEQUENCE {
1128 continue;
1129 }
1130 let arr = ligbase + 4;
1131 if seqlen < compcount {
1132 if !self.collect_sequence(compcount) {
1133 continue;
1134 }
1135 seqlen = compcount;
1136 }
1137 let components = b.read_array::<u16>(arr, compcount)?;
1138 let mut matched = true;
1139 for (a, b) in components.iter().zip(&self.storage.ids) {
1140 if a != *b {
1141 matched = false;
1142 break;
1143 }
1144 }
1145 if !matched {
1146 continue;
1147 }
1148 let glyph = b.read::<u16>(ligbase)?;
1149 self.buf
1150 .substitute_ligature(cur, glyph, &self.storage.indices[0..compcount]);
1151 self.update_glyph(cur);
1152 return Some(true);
1153 }
1154 }
1155 SingleAdj1 => {
1156 let mut pos = [0f32; 4];
1157 self.value_record(base, base + 6, b.read::<u16>(base + 4)?, &mut pos)?;
1158 self.buf.position(cur, pos[0], pos[1], pos[2], pos[3]);
1159 return Some(true);
1160 }
1161 SingleAdj2 => {
1162 let vf = b.read::<u16>(base + 4)?;
1163 let len = vf.count_ones() as usize * 2;
1164 let mut pos = [0f32; 4];
1165 self.value_record(base, base + 8 + index * len, vf, &mut pos)?;
1166 self.buf.position(cur, pos[0], pos[1], pos[2], pos[3]);
1167 return Some(true);
1168 }
1169 PairAdj1 => {
1170 let next = self.next(cur)?;
1171 let g2 = self.buf.glyphs[next].id;
1172 let vf1 = b.read::<u16>(base + 4)?;
1173 let vf2 = b.read::<u16>(base + 6)?;
1174 let len1 = vf1.count_ones() as usize * 2;
1175 let step = len1 + vf2.count_ones() as usize * 2 + 2;
1176 let setbase = base + b.read::<u16>(base + 10 + index * 2)? as usize;
1177 let count = b.read::<u16>(setbase)? as usize;
1178 let vbase = setbase + 2;
1179 let mut l = 0;
1180 let mut h = count;
1181 while l < h {
1182 use core::cmp::Ordering::*;
1183 let i = (l + h) / 2;
1184 let v = vbase + i * step;
1185 let gv = b.read::<u16>(v)?;
1186 match g2.cmp(&gv) {
1187 Greater => l = i + 1,
1188 Less => h = i,
1189 Equal => {
1190 if vf1 != 0 {
1191 let mut pos = [0f32; 4];
1192 self.value_record(setbase, v + 2, vf1, &mut pos)?;
1193 self.buf.position(cur, pos[0], pos[1], pos[2], pos[3]);
1194 }
1195 if vf2 != 0 {
1196 let mut pos = [0f32; 4];
1197 self.value_record(setbase, v + 2 + len1, vf2, &mut pos)?;
1198 self.buf.position(next, pos[0], pos[1], pos[2], pos[3]);
1199 }
1200 return Some(true);
1201 }
1202 }
1203 }
1204 }
1205 PairAdj2 => {
1206 let next = self.next(cur)?;
1207 let g2 = self.buf.glyphs[next].id;
1208 let vf1 = b.read::<u16>(base + 4)?;
1209 let vf2 = b.read::<u16>(base + 6)?;
1210 let len1 = vf1.count_ones() as usize * 2;
1211 let step = len1 + vf2.count_ones() as usize * 2;
1212 let class1 = self.class(base + b.read::<u16>(base + 8)? as usize, g) as usize;
1213 let class2 = self.class(base + b.read::<u16>(base + 10)? as usize, g2) as usize;
1214 let class2_count = b.read::<u16>(base + 14)? as usize;
1215 let v = base + 16 + (class1 * step * class2_count) + (class2 * step);
1216 if vf1 != 0 {
1217 let mut pos = [0f32; 4];
1218 self.value_record(base, v, vf1, &mut pos)?;
1219 self.buf.position(cur, pos[0], pos[1], pos[2], pos[3]);
1220 }
1221 if vf2 != 0 {
1222 let mut pos = [0f32; 4];
1223 self.value_record(base, v + len1, vf2, &mut pos)?;
1224 self.buf.position(next, pos[0], pos[1], pos[2], pos[3]);
1225 }
1226 return Some(true);
1227 }
1228 Cursive1 => {
1229 let next = self.next(cur)?;
1230 if next - cur > 255 {
1231 return Some(false);
1232 }
1233 let g2 = self.buf.glyphs[next].id;
1234 let index2 = subtable.coverage(b, g2)? as usize;
1235 let recbase = base + 6;
1236 let mut exit_offset = b.read::<u16>(recbase + index * 4 + 2)? as usize;
1237 let mut entry_offset = b.read::<u16>(recbase + index2 * 4)? as usize;
1238 if exit_offset == 0 || entry_offset == 0 {
1239 return Some(false);
1240 }
1241 exit_offset += base;
1242 entry_offset += base;
1243 let exit = self.anchor(exit_offset)?;
1244 let entry = self.anchor(entry_offset)?;
1245 let dx = entry.0 - exit.0;
1246 let dy = entry.1 - exit.1;
1247 self.buf.position_cursive(cur, next, dx, dy);
1248 return Some(true);
1249 }
1250 MarkToBase1 | MarkToMark1 => {
1251 let prev = if kind == MarkToBase1 {
1252 self.previous_base(cur)?
1253 } else {
1254 self.previous(cur)?
1255 };
1256 let diff = cur - prev;
1257 if diff > 255 {
1258 return Some(false);
1259 }
1260 let g2 = self.buf.glyphs[prev].id;
1261 let index2 = self.coverage(base + b.read::<u16>(base + 4)? as usize, g2)? as usize;
1262 let (mark_class, mark_anchor) = {
1263 let markbase = base + b.read::<u16>(base + 8)? as usize;
1264 let a = self.mark_anchor(markbase, index as u16)?;
1265 (a.0 as usize, a.1)
1266 };
1267 let base_anchor = {
1268 let class_count = b.read::<u16>(base + 6)? as usize;
1269 let basebase = base + b.read::<u16>(base + 10)? as usize;
1270 let count = b.read::<u16>(basebase)? as usize * class_count;
1271 let index = class_count * index2 + mark_class;
1272 if index >= count {
1273 return Some(false);
1274 }
1275 let abase = basebase + b.read::<u16>(basebase + 2 + index * 2)? as usize;
1276 self.anchor(abase)?
1277 };
1278 let dx = base_anchor.0 - mark_anchor.0;
1279 let dy = base_anchor.1 - mark_anchor.1;
1280 self.buf.position_mark(cur, prev, dx, dy);
1281 return Some(true);
1282 }
1283 MarkToLig1 => {
1284 let comp_index = self.buf.glyphs[cur].component as usize;
1285 if comp_index == 0xFF {
1286 return None;
1287 }
1288 let prev = self.previous_base(cur)?;
1289 let diff = cur - prev;
1290 if diff > 255 {
1291 return None;
1292 }
1293 let g2 = self.buf.glyphs[prev].id;
1294 let mark_index = index as u16;
1295 let base_index = self.coverage(base + b.read::<u16>(base + 4)? as usize, g2)?;
1296 let class_count = b.read::<u16>(base + 6)? as usize;
1297 let mark_anchor =
1298 self.mark_anchor(base + b.read::<u16>(base + 8)? as usize, mark_index)?;
1299 let mark_class = mark_anchor.0 as usize;
1300 let mark_anchor = mark_anchor.1;
1301 let mut lig_array = b.read::<u16>(base + 10)? as usize;
1302 if lig_array == 0 {
1303 return None;
1304 }
1305 lig_array += base;
1306 let lig_array_len = b.read::<u16>(lig_array)?;
1307 if base_index >= lig_array_len {
1308 return None;
1309 }
1310 let mut lig_attach =
1311 b.read::<u16>(lig_array + 2 + base_index as usize * 2)? as usize;
1312 if lig_attach == 0 {
1313 return None;
1314 }
1315
1316 lig_attach += lig_array;
1317 let comp_count = b.read::<u16>(lig_attach)? as usize;
1318 if comp_count == 0 || comp_index >= comp_count {
1319 return None;
1320 }
1321 let comp_rec = lig_attach + 2 + comp_index * class_count * 2 + mark_class * 2;
1322 let anchor_offset = b.read::<u16>(comp_rec)? as usize;
1323 if anchor_offset == 0 {
1324 return None;
1325 }
1326 let base_anchor = self.anchor(lig_attach + anchor_offset)?;
1327 let dx = base_anchor.0 - mark_anchor.0;
1328 let dy = base_anchor.1 - mark_anchor.1;
1329 self.buf.position_mark(cur, prev, dx, dy);
1330 return Some(true);
1331 }
1332 Context1 => {
1333 let set_index = index;
1334 let mut c = b.stream_at(base + 4)?;
1335 let set_count = c.read::<u16>()? as usize;
1336 let set_offsets = c.read_array::<u16>(set_count)?;
1337 let mut offset = set_offsets.get(set_index)? as usize;
1338 if offset == 0 {
1339 return Some(false);
1340 }
1341 offset += base;
1342 let mut c = b.stream_at(offset)?;
1343 let rule_count = c.read::<u16>()? as usize;
1344 let rule_offsets = c.read_array::<u16>(rule_count)?;
1345 for i in 0..rule_count {
1346 let rule_offset = offset + rule_offsets.get(i)? as usize;
1347 let mut c = b.stream_at(rule_offset)?;
1348 let mut input_count = c.read::<u16>()? as usize;
1349 let subst_count = c.read::<u16>()? as usize;
1350 let mut input_end = cur;
1351 if input_count > 1 {
1352 input_count -= 1;
1353 let seq = c.read_array::<u16>(input_count)?;
1354 if let Some(end) = self
1355 .match_sequence(cur, input_count, |i, id| id == seq.get(i).unwrap_or(0))
1356 {
1357 input_end = end;
1358 } else {
1359 continue;
1360 }
1361 }
1362 self.apply_contextual(c, subst_count, input_end);
1366 return Some(true);
1367 }
1368 }
1369 Context2 => {
1370 let mut c = b.stream_at(base + 4)?;
1371 let mut input_classdef = c.read::<u16>()? as usize;
1372 if input_classdef == 0 {
1373 return Some(false);
1374 }
1375 input_classdef += base;
1376 let set_index = self.class(input_classdef, g) as usize;
1377 let set_count = c.read::<u16>()? as usize;
1378 let set_offsets = c.read_array::<u16>(set_count)?;
1379 let mut offset = set_offsets.get(set_index)? as usize;
1380 if offset == 0 {
1381 return Some(false);
1382 }
1383 offset += base;
1384 let mut c = b.stream_at(offset)?;
1385 let rule_count = c.read::<u16>()? as usize;
1386 let rule_offsets = c.read_array::<u16>(rule_count)?;
1387 for i in 0..rule_count {
1388 let rule_offset = offset + rule_offsets.get(i)? as usize;
1389 let mut c = b.stream_at(rule_offset)?;
1390 let mut input_count = c.read::<u16>()? as usize;
1391 let subst_count = c.read::<u16>()? as usize;
1392 let mut input_end = cur;
1393 if input_count > 1 {
1394 input_count -= 1;
1395 let seq = c.read_array::<u16>(input_count)?;
1396 if let Some(end) = self.match_sequence(cur, input_count, |i, id| {
1397 self.class(input_classdef, id) == seq.get(i).unwrap_or(0)
1398 }) {
1399 input_end = end;
1400 } else {
1401 continue;
1402 }
1403 }
1404 self.apply_contextual(c, subst_count, input_end);
1408 return Some(true);
1409 }
1410 }
1411 Context3 => {
1412 let mut c = b.stream_at(base + 2)?;
1413 let mut input_count = c.read::<u16>()? as usize;
1414 if input_count == 0 {
1415 return None;
1416 }
1417 input_count -= 1;
1418 let subst_count = c.read::<u16>()? as usize;
1419 c.skip(2)?;
1420 let input = c.read_array::<u16>(input_count)?;
1421 let input_end = self.match_sequence(cur, input_count, |i, id| {
1422 self.coverage(base + input.get(i).unwrap_or(0) as usize, id)
1423 .is_some()
1424 })?;
1425 self.apply_contextual(c, subst_count, input_end);
1426 return Some(true);
1427 }
1428 ChainContext1 => {
1429 let set_index = index;
1430 let mut c = b.stream_at(base + 4)?;
1431 let set_count = c.read::<u16>()? as usize;
1432 let set_offsets = c.read_array::<u16>(set_count)?;
1433 let mut offset = set_offsets.get(set_index)? as usize;
1434 if offset == 0 {
1435 return Some(false);
1436 }
1437 offset += base;
1438 let mut c = b.stream_at(offset)?;
1439 let rule_count = c.read::<u16>()? as usize;
1440 let rule_offsets = c.read_array::<u16>(rule_count)?;
1441 for i in 0..rule_count {
1442 let rule_offset = offset + rule_offsets.get(i)? as usize;
1443 let mut c = b.stream_at(rule_offset)?;
1444 let backtrack_count = c.read::<u16>()? as usize;
1445 if backtrack_count != 0 {
1446 let seq = c.read_array::<u16>(backtrack_count)?;
1447 let pred = |i, id| id == seq.get(i).unwrap_or(0);
1448 if self.match_backtrack(cur, backtrack_count, pred).is_none() {
1449 continue;
1450 }
1451 }
1452 let mut input_count = c.read::<u16>()? as usize;
1453 let mut input_end = cur;
1454 if input_count > 1 {
1455 input_count -= 1;
1456 let seq = c.read_array::<u16>(input_count)?;
1457 if let Some(end) = self
1458 .match_sequence(cur, input_count, |i, id| id == seq.get(i).unwrap_or(0))
1459 {
1460 input_end = end;
1461 } else {
1462 continue;
1463 }
1464 }
1465 let lookahead_count = c.read::<u16>()? as usize;
1466 if lookahead_count != 0 {
1467 let seq = c.read_array::<u16>(lookahead_count)?;
1468 let pred = |i, id| id == seq.get(i).unwrap_or(0);
1469 if self
1470 .match_sequence(input_end, lookahead_count, pred)
1471 .is_none()
1472 {
1473 continue;
1474 }
1475 }
1476 let count = c.read::<u16>()? as usize;
1477 self.apply_contextual(c, count, input_end);
1481 return Some(true);
1482 }
1483 }
1484 ChainContext2 => {
1485 let mut c = b.stream_at(base + 4)?;
1486 let backtrack_classdef = base + c.read::<u16>()? as usize;
1487 let mut input_classdef = c.read::<u16>()? as usize;
1488 if input_classdef == 0 {
1489 return Some(false);
1490 }
1491 input_classdef += base;
1492 let set_index = self.class(input_classdef, g) as usize;
1493 let lookahead_classdef = base + c.read::<u16>()? as usize;
1494 let set_count = c.read::<u16>()? as usize;
1495 let set_offsets = c.read_array::<u16>(set_count)?;
1496 let mut offset = set_offsets.get(set_index)? as usize;
1497 if offset == 0 {
1498 return Some(false);
1499 }
1500 offset += base;
1501 let mut c = b.stream_at(offset)?;
1502 let rule_count = c.read::<u16>()? as usize;
1503 let rule_offsets = c.read_array::<u16>(rule_count)?;
1504 for i in 0..rule_count {
1505 let rule_offset = offset + rule_offsets.get(i)? as usize;
1506 let mut c = b.stream_at(rule_offset)?;
1507 let backtrack_count = c.read::<u16>()? as usize;
1508 if backtrack_count != 0 {
1509 let seq = c.read_array::<u16>(backtrack_count)?;
1510 let pred =
1511 |i, id| self.class(backtrack_classdef, id) == seq.get(i).unwrap_or(0);
1512 if self.match_backtrack(cur, backtrack_count, pred).is_none() {
1513 continue;
1514 }
1515 }
1516 let mut input_count = c.read::<u16>()? as usize;
1517 let mut input_end = cur;
1518 if input_count > 1 {
1519 input_count -= 1;
1520 let seq = c.read_array::<u16>(input_count)?;
1521 if let Some(end) = self.match_sequence(cur, input_count, |i, id| {
1522 self.class(input_classdef, id) == seq.get(i).unwrap_or(0)
1523 }) {
1524 input_end = end;
1525 } else {
1526 continue;
1527 }
1528 }
1529 let lookahead_count = c.read::<u16>()? as usize;
1530 if lookahead_count != 0 {
1531 let seq = c.read_array::<u16>(lookahead_count)?;
1532 let pred =
1533 |i, id| self.class(lookahead_classdef, id) == seq.get(i).unwrap_or(0);
1534 if self
1535 .match_sequence(input_end, lookahead_count, pred)
1536 .is_none()
1537 {
1538 continue;
1539 }
1540 }
1541 let count = c.read::<u16>()? as usize;
1542 self.apply_contextual(c, count, input_end);
1546 return Some(true);
1547 }
1548 }
1549 ChainContext3 => {
1550 let mut c = b.stream_at(base + 2)?;
1551 let backtrack_count = c.read::<u16>()? as usize;
1552 if backtrack_count != 0 {
1553 if backtrack_count > cur - self.start {
1554 return None;
1555 }
1556 let backtrack = c.read_array::<u16>(backtrack_count)?;
1557 self.match_backtrack(cur, backtrack_count, |i, id| {
1558 self.coverage(base + backtrack.get_or(i, 0) as usize, id)
1559 .is_some()
1560 })?;
1561 }
1562 let input_count = c.read::<u16>()? as usize - 1;
1563 c.skip(2);
1564 let mut input_end = cur;
1565 if input_count != 0 {
1566 let input = c.read_array::<u16>(input_count)?;
1567 input_end = self.match_sequence(cur, input_count, |i, id| {
1568 self.coverage(base + input.get_or(i, 0) as usize, id)
1569 .is_some()
1570 })?;
1571 }
1572 let lookahead_count = c.read::<u16>()? as usize;
1573 if lookahead_count != 0 {
1574 if lookahead_count > self.s.end - input_end {
1575 return None;
1576 }
1577 let lookahead = c.read_array::<u16>(lookahead_count)?;
1578 self.match_sequence(input_end, lookahead_count, |i, id| {
1579 self.coverage(base + lookahead.get_or(i, 0) as usize, id)
1580 .is_some()
1581 })?;
1582 }
1583 let count = c.read::<u16>()? as usize;
1584 self.apply_contextual(c, count, input_end);
1585 return Some(true);
1586 }
1587 RevChainContext1 => {
1588 let mut c = b.stream_at(base + 4)?;
1589 let backtrack_count = c.read::<u16>()? as usize;
1590 if backtrack_count != 0 {
1591 if backtrack_count > cur - self.start {
1592 return None;
1593 }
1594 let backtrack = c.read_array::<u16>(backtrack_count)?;
1595 self.match_backtrack(cur, backtrack_count, |i, id| {
1596 self.coverage(base + backtrack.get_or(i, 0) as usize, id)
1597 .is_some()
1598 })?;
1599 }
1600 let lookahead_count = c.read::<u16>()? as usize;
1601 if lookahead_count != 0 {
1602 if lookahead_count + cur + 1 > self.s.end {
1603 return None;
1604 }
1605 let lookahead = c.read_array::<u16>(lookahead_count)?;
1606 self.match_sequence(cur, lookahead_count, |i, id| {
1607 self.coverage(base + lookahead.get_or(i, 0) as usize, id)
1608 .is_some()
1609 })?;
1610 }
1611 let count = c.read::<u16>()? as usize;
1612 let substs = c.read_array::<u16>(count)?;
1613 let subst = substs.get(index)?;
1614 self.buf.substitute(cur, subst);
1615 return Some(true);
1616 }
1617 }
1618 None
1619 }
1620
1621 fn apply_nested(
1622 &mut self,
1623 index: u16,
1624 _start: usize,
1625 cur: usize,
1626 end: usize,
1627 first: usize,
1628 ) -> Option<bool> {
1629 if self.top as usize == MAX_NESTED_LOOKUPS {
1630 return None;
1631 }
1632 let b = self.data;
1633 let list_base = self.gsubgpos + b.read::<u16>(self.gsubgpos as usize + 8)? as u32;
1634 let lookup = lookup_data(self.data, self.stage, list_base, index, 0, Some(self.defs))?;
1635 self.storage.stack[self.top as usize] = self.s;
1636 self.top += 1;
1637 let v = self.apply_uncached(&lookup, cur, end + 1, first);
1638 self.top -= 1;
1639 self.s = self.storage.stack[self.top as usize];
1640 v
1641 }
1642
1643 fn apply_uncached(
1644 &mut self,
1645 lookup: &LookupData,
1646 cur: usize,
1647 end: usize,
1648 first: usize,
1649 ) -> Option<bool> {
1650 let b = self.data;
1651 let base = lookup.offset as usize;
1652 self.s.cur = cur;
1654 self.s.end = end.min(self.buf.len());
1655 let mut applied = false;
1659 let subtables = base + 6;
1660 let count = lookup.count as usize;
1661 let ext = lookup.is_ext;
1662 let kind = lookup.kind;
1663 let reverse = lookup.kind == LookupKind::RevChainContext;
1664 if reverse {
1665 if !self.move_last() {
1666 return Some(false);
1667 }
1668 } else if !self.move_to(first) {
1669 return Some(false);
1670 }
1671 let cur = self.s.cur;
1673 let g = self.buf.glyphs[cur].id;
1674 for i in 0..count {
1675 let mut subtable = base + b.read::<u16>(subtables + i * 2)? as usize;
1676 if ext {
1677 subtable = subtable + b.read::<u32>(subtable + 4)? as usize;
1678 }
1679 let fmt = b.read::<u16>(subtable)?;
1680 if let Some(ref s) = subtable_data(b, subtable as u32, kind, fmt) {
1681 if let Some(index) = s.coverage(b, g) {
1682 if let Some(true) = self.apply_subtable(b, s, index as usize, cur, g) {
1683 applied = true;
1684 break;
1685 }
1686 }
1687 }
1688 }
1689 Some(applied)
1698 }
1699
1700 fn apply_contextual(&mut self, mut c: Stream<'a>, count: usize, end: usize) -> Option<bool> {
1701 let mut applied = false;
1702 let start = self.s.cur;
1703 for _ in 0..count {
1704 let first = c.read::<u16>()? as usize;
1705 let lookup = c.read::<u16>()?;
1706 if let Some(true) = self.apply_nested(lookup, start, start, end, first) {
1707 applied = true;
1708 }
1709 }
1710 if applied {
1711 self.s.cur = end;
1712 }
1713 Some(applied)
1714 }
1715
1716 #[inline(always)]
1717 fn coverage(&self, coverage_offset: usize, glyph_id: u16) -> Option<u16> {
1718 coverage(self.data, coverage_offset as u32, glyph_id)
1719 }
1720
1721 #[inline(always)]
1722 fn class(&self, classdef_offset: usize, glyph_id: u16) -> u16 {
1723 classdef(self.data, classdef_offset as u32, glyph_id)
1724 }
1725}
1726
1727impl<'a, 'b, 'c> ApplyContext<'a, 'b, 'c> {
1728 fn value_record(
1729 &self,
1730 parent_offset: usize,
1731 mut offset: usize,
1732 format: u16,
1733 pos: &mut [f32; 4],
1734 ) -> Option<()> {
1735 let b = &self.data;
1736 if format == 4 {
1737 pos[2] = b.read_i16(offset)? as f32;
1738 return Some(());
1739 }
1740 if format & 1 != 0 {
1741 pos[0] = b.read::<i16>(offset)? as f32;
1742 offset += 2;
1743 }
1744 if format & 2 != 0 {
1745 pos[1] = b.read::<i16>(offset)? as f32;
1746 offset += 2;
1747 }
1748 if format & 4 != 0 {
1749 pos[2] = b.read::<i16>(offset)? as f32;
1750 offset += 2;
1751 }
1752 if format & 8 != 0 {
1753 pos[3] = b.read::<i16>(offset)? as f32;
1754 offset += 2;
1755 }
1756 if format & (0x10 | 0x20 | 0x40 | 0x80) == 0 {
1757 return Some(());
1758 }
1759 if self.enable_var {
1760 if format & 0x10 != 0 {
1761 pos[0] += self.value_delta(parent_offset, b.read::<u16>(offset)?)?;
1762 offset += 2;
1763 }
1764 if format & 0x20 != 0 {
1765 pos[1] += self.value_delta(parent_offset, b.read::<u16>(offset)?)?;
1766 offset += 2;
1767 }
1768 if format & 0x40 != 0 {
1769 pos[2] += self.value_delta(parent_offset, b.read::<u16>(offset)?)?;
1770 offset += 2;
1771 }
1772 if format & 0x80 != 0 {
1773 pos[3] += self.value_delta(parent_offset, b.read::<u16>(offset)?)?;
1774 }
1775 }
1776 Some(())
1777 }
1778
1779 fn value_delta(&self, parent_offset: usize, offset: u16) -> Option<f32> {
1780 if offset == 0 {
1781 return Some(0.);
1782 }
1783 let b = &self.data;
1784 let offset = parent_offset + offset as usize;
1785 let format = b.read::<u16>(offset + 4)?;
1786 if format != 0x8000 {
1787 return Some(0.);
1788 }
1789 let outer = b.read::<u16>(offset)?;
1790 let inner = b.read::<u16>(offset + 2)?;
1791 Some(self.defs.delta(outer, inner, self.coords))
1792 }
1793
1794 fn anchor(&self, offset: usize) -> Option<(f32, f32)> {
1795 let b = &self.data;
1796 let format = b.read::<u16>(offset)?;
1797 let mut x = b.read::<i16>(offset + 2)? as f32;
1798 let mut y = b.read::<i16>(offset + 4)? as f32;
1799 if format == 3 && self.defs.has_var_store() && !self.coords.is_empty() {
1800 x += self.value_delta(offset, b.read::<u16>(offset + 6)?)?;
1801 y += self.value_delta(offset, b.read::<u16>(offset + 8)?)?;
1802 }
1803 Some((x, y))
1804 }
1805
1806 fn mark_anchor(&self, marks: usize, index: u16) -> Option<(u16, (f32, f32))> {
1807 let b = &self.data;
1808 if index >= b.read::<u16>(marks)? {
1809 return None;
1810 }
1811 let rec = marks + 2 + index as usize * 4;
1812 let class = b.read::<u16>(rec)?;
1813 let offset = b.read::<u16>(rec + 2)? as usize;
1814 if offset == 0 {
1815 return None;
1816 }
1817 Some((class, self.anchor(marks + offset)?))
1818 }
1819}
1820
1821#[derive(Default)]
1822pub struct Storage {
1823 stack: [LookupState; MAX_NESTED_LOOKUPS],
1824 ids: [u16; MAX_SEQUENCE],
1825 indices: [usize; MAX_SEQUENCE],
1826}