QtBase  v6.3.1
hb-ot-layout-common.hh
Go to the documentation of this file.
1 /*
2  * Copyright © 2007,2008,2009 Red Hat, Inc.
3  * Copyright © 2010,2012 Google, Inc.
4  *
5  * This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_COMMON_HH
30 #define HB_OT_LAYOUT_COMMON_HH
31 
32 #include "hb.hh"
33 #include "hb-ot-layout.hh"
34 #include "hb-open-type.hh"
35 #include "hb-set.hh"
36 #include "hb-bimap.hh"
37 
38 
39 #ifndef HB_MAX_NESTING_LEVEL
40 #define HB_MAX_NESTING_LEVEL 64
41 #endif
42 #ifndef HB_MAX_CONTEXT_LENGTH
43 #define HB_MAX_CONTEXT_LENGTH 64
44 #endif
45 #ifndef HB_CLOSURE_MAX_STAGES
46 /*
47  * The maximum number of times a lookup can be applied during shaping.
48  * Used to limit the number of iterations of the closure algorithm.
49  * This must be larger than the number of times add_pause() is
50  * called in a collect_features call of any shaper.
51  */
52 #define HB_CLOSURE_MAX_STAGES 32
53 #endif
54 
55 #ifndef HB_MAX_SCRIPTS
56 #define HB_MAX_SCRIPTS 500
57 #endif
58 
59 #ifndef HB_MAX_LANGSYS
60 #define HB_MAX_LANGSYS 2000
61 #endif
62 
63 #ifndef HB_MAX_LANGSYS_FEATURE_COUNT
64 #define HB_MAX_LANGSYS_FEATURE_COUNT 50000
65 #endif
66 
67 #ifndef HB_MAX_FEATURES
68 #define HB_MAX_FEATURES 750
69 #endif
70 
71 #ifndef HB_MAX_FEATURE_INDICES
72 #define HB_MAX_FEATURE_INDICES 1500
73 #endif
74 
75 #ifndef HB_MAX_LOOKUP_VISIT_COUNT
76 #define HB_MAX_LOOKUP_VISIT_COUNT 35000
77 #endif
78 
79 
80 namespace OT {
81 
82 
83 #define NOT_COVERED ((unsigned int) -1)
84 
85 
86 template<typename Iterator>
87 static inline void Coverage_serialize (hb_serialize_context_t *c,
88  Iterator it);
89 
90 template<typename Iterator>
91 static inline void ClassDef_serialize (hb_serialize_context_t *c,
92  Iterator it);
93 
94 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
95  const hb_map_t &gid_klass_map,
97  const hb_set_t &klasses,
98  bool use_class_zero,
99  hb_map_t *klass_map /*INOUT*/);
100 
101 
103 {
104  hb_prune_langsys_context_t (const void *table_,
105  hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
106  const hb_map_t *duplicate_feature_map_,
107  hb_set_t *new_collected_feature_indexes_)
108  :table (table_),
109  script_langsys_map (script_langsys_map_),
110  duplicate_feature_map (duplicate_feature_map_),
111  new_feature_indexes (new_collected_feature_indexes_),
112  script_count (0),langsys_feature_count (0) {}
113 
114  bool visitScript ()
115  { return script_count++ < HB_MAX_SCRIPTS; }
116 
117  bool visitLangsys (unsigned feature_count)
118  {
119  langsys_feature_count += feature_count;
120  return langsys_feature_count < HB_MAX_LANGSYS_FEATURE_COUNT;
121  }
122 
123  public:
124  const void *table;
128 
129  private:
130  unsigned script_count;
131  unsigned langsys_feature_count;
132 };
133 
135  hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
136 {
137  const char *get_name () { return "SUBSET_LAYOUT"; }
138  static return_t default_return_value () { return hb_empty_t (); }
139 
140  bool visitScript ()
141  {
142  return script_count++ < HB_MAX_SCRIPTS;
143  }
144 
145  bool visitLangSys ()
146  {
147  return langsys_count++ < HB_MAX_LANGSYS;
148  }
149 
151  {
152  feature_index_count += count;
153  return feature_index_count < HB_MAX_FEATURE_INDICES;
154  }
155 
157  {
158  lookup_index_count++;
159  return lookup_index_count < HB_MAX_LOOKUP_VISIT_COUNT;
160  }
161 
168 
170  hb_tag_t tag_,
171  hb_map_t *lookup_map_,
172  hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
173  hb_map_t *feature_index_map_) :
174  subset_context (c_),
175  table_tag (tag_),
176  lookup_index_map (lookup_map_),
177  script_langsys_map (script_langsys_map_),
178  feature_index_map (feature_index_map_),
179  cur_script_index (0xFFFFu),
180  script_count (0),
181  langsys_count (0),
182  feature_index_count (0),
183  lookup_index_count (0)
184  {}
185 
186  private:
187  unsigned script_count;
188  unsigned langsys_count;
189  unsigned feature_index_count;
190  unsigned lookup_index_count;
191 };
192 
194  hb_dispatch_context_t<hb_collect_variation_indices_context_t>
195 {
196  template <typename T>
197  return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); }
198  static return_t default_return_value () { return hb_empty_t (); }
199 
203 
205  const hb_set_t *glyph_set_,
206  const hb_map_t *gpos_lookups_) :
207  layout_variation_indices (layout_variation_indices_),
208  glyph_set (glyph_set_),
209  gpos_lookups (gpos_lookups_) {}
210 };
211 
212 template<typename OutputArray>
214 {
216  OutputArray& out_,
217  const void *base_) : subset_context (subset_context_),
218  out (out_), base (base_) {}
219 
220  template <typename T>
222  {
223  auto snap = subset_context->serializer->snapshot ();
224  auto *o = out.serialize_append (subset_context->serializer);
225  if (unlikely (!o)) return false;
226  bool ret = o->serialize_subset (subset_context, offset, base);
227  if (!ret)
228  {
229  out.pop ();
230  subset_context->serializer->revert (snap);
231  }
232  return ret;
233  }
234 
235  private:
236  hb_subset_context_t *subset_context;
237  OutputArray &out;
238  const void *base;
239 };
240 
241 
242 template<typename OutputArray, typename Arg>
244 {
246  OutputArray& out_,
247  const void *base_,
248  Arg &&arg_) : subset_context (subset_context_), out (out_),
249  base (base_), arg (arg_) {}
250 
251  template <typename T>
253  {
254  auto snap = subset_context->serializer->snapshot ();
255  auto *o = out.serialize_append (subset_context->serializer);
256  if (unlikely (!o)) return false;
257  bool ret = o->serialize_subset (subset_context, offset, base, arg);
258  if (!ret)
259  {
260  out.pop ();
261  subset_context->serializer->revert (snap);
262  }
263  return ret;
264  }
265 
266  private:
267  hb_subset_context_t *subset_context;
268  OutputArray &out;
269  const void *base;
270  Arg &&arg;
271 };
272 
273 /*
274  * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
275  * and discards the offset in the array if the subset operation results in an empty
276  * thing.
277  */
278 struct
279 {
280  template<typename OutputArray>
281  subset_offset_array_t<OutputArray>
282  operator () (hb_subset_context_t *subset_context, OutputArray& out,
283  const void *base) const
284  { return subset_offset_array_t<OutputArray> (subset_context, out, base); }
285 
286  /* Variant with one extra argument passed to serialize_subset */
287  template<typename OutputArray, typename Arg>
288  subset_offset_array_arg_t<OutputArray, Arg>
289  operator () (hb_subset_context_t *subset_context, OutputArray& out,
290  const void *base, Arg &&arg) const
291  { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); }
292 }
293 HB_FUNCOBJ (subset_offset_array);
294 
295 template<typename OutputArray>
297 {
299  const void *base_) : subset_layout_context (c_),
300  out (out_), base (base_) {}
301 
302  template <typename T>
303  void
305  {
306  auto snap = subset_layout_context->subset_context->serializer->snapshot ();
307  bool ret = record.subset (subset_layout_context, base);
308  if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
309  else out->len++;
310  }
311 
312  private:
313  hb_subset_layout_context_t *subset_layout_context;
314  OutputArray *out;
315  const void *base;
316 };
317 
318 /*
319  * Helper to subset a RecordList/record array. Subsets each Record in the array and
320  * discards the record if the subset operation returns false.
321  */
322 struct
323 {
324  template<typename OutputArray>
325  subset_record_array_t<OutputArray>
326  operator () (hb_subset_layout_context_t *c, OutputArray* out,
327  const void *base) const
328  { return subset_record_array_t<OutputArray> (c, out, base); }
329 }
330 HB_FUNCOBJ (subset_record_array);
331 
332 
333 template<typename OutputArray>
335 {
337  OutputArray& out_,
338  const void *base_) : serialize_context (serialize_context_),
339  out (out_), base (base_) {}
340 
341  template <typename T>
343  {
344  if (!serialize_context->copy (record, base)) return false;
345  out.len++;
346  return true;
347  }
348 
349  private:
350  hb_serialize_context_t *serialize_context;
351  OutputArray &out;
352  const void *base;
353 };
354 
355 /*
356  * Helper to serialize an array of MATH records.
357  */
358 struct
359 {
360  template<typename OutputArray>
361  serialize_math_record_array_t<OutputArray>
362  operator () (hb_serialize_context_t *serialize_context, OutputArray& out,
363  const void *base) const
364  { return serialize_math_record_array_t<OutputArray> (serialize_context, out, base); }
365 
366 }
367 HB_FUNCOBJ (serialize_math_record_array);
368 
369 /*
370  *
371  * OpenType Layout Common Table Formats
372  *
373  */
374 
375 
376 /*
377  * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
378  */
379 
382  const void *list_base;
383 };
384 
385 template <typename Type>
386 struct Record
387 {
388  int cmp (hb_tag_t a) const { return tag.cmp (a); }
389 
390  bool subset (hb_subset_layout_context_t *c, const void *base) const
391  {
392  TRACE_SUBSET (this);
393  auto *out = c->subset_context->serializer->embed (this);
394  if (unlikely (!out)) return_trace (false);
395  bool ret = out->offset.serialize_subset (c->subset_context, offset, base, c, &tag);
396  return_trace (ret);
397  }
398 
399  bool sanitize (hb_sanitize_context_t *c, const void *base) const
400  {
401  TRACE_SANITIZE (this);
402  const Record_sanitize_closure_t closure = {tag, base};
403  return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
404  }
405 
406  Tag tag; /* 4-byte Tag identifier */
408  offset; /* Offset from beginning of object holding
409  * the Record */
410  public:
412 };
413 
414 template <typename Type>
415 struct RecordArrayOf : SortedArray16Of<Record<Type>>
416 {
417  const Offset16To<Type>& get_offset (unsigned int i) const
418  { return (*this)[i].offset; }
419  Offset16To<Type>& get_offset (unsigned int i)
420  { return (*this)[i].offset; }
421  const Tag& get_tag (unsigned int i) const
422  { return (*this)[i].tag; }
423  unsigned int get_tags (unsigned int start_offset,
424  unsigned int *record_count /* IN/OUT */,
425  hb_tag_t *record_tags /* OUT */) const
426  {
427  if (record_count)
428  {
429  + this->sub_array (start_offset, record_count)
431  | hb_sink (hb_array (record_tags, *record_count))
432  ;
433  }
434  return this->len;
435  }
436  bool find_index (hb_tag_t tag, unsigned int *index) const
437  {
439  }
440 };
441 
442 template <typename Type>
444 {
445  const Type& operator [] (unsigned int i) const
446  { return this+this->get_offset (i); }
447 
450  {
451  TRACE_SUBSET (this);
452  auto *out = c->serializer->start_embed (*this);
453  if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
454 
455  + this->iter ()
456  | hb_apply (subset_record_array (l, out, this))
457  ;
458  return_trace (true);
459  }
460 
462  {
463  TRACE_SANITIZE (this);
465  }
466 };
467 
468 struct Feature;
469 
471 {
474  {
475  TRACE_SUBSET (this);
476  auto *out = c->serializer->start_embed (*this);
477  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
478 
479  unsigned count = this->len;
480  + hb_zip (*this, hb_range (count))
481  | hb_filter (l->feature_index_map, hb_second)
482  | hb_map (hb_first)
483  | hb_apply (subset_record_array (l, out, this))
484  ;
485  return_trace (true);
486  }
487 };
488 
489 struct Script;
491 {
494  {
495  TRACE_SUBSET (this);
496  auto *out = c->serializer->start_embed (*this);
497  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
498 
499  unsigned count = this->len;
500  for (auto _ : + hb_zip (*this, hb_range (count)))
501  {
502  auto snap = c->serializer->snapshot ();
503  l->cur_script_index = _.second;
504  bool ret = _.first.subset (l, this);
505  if (!ret) c->serializer->revert (snap);
506  else out->len++;
507  }
508 
509  return_trace (true);
510  }
511 };
512 
514 {
515  int cmp (hb_codepoint_t g) const
516  { return g < first ? -1 : g <= last ? 0 : +1; }
517 
519  {
520  TRACE_SANITIZE (this);
521  return_trace (c->check_struct (this));
522  }
523 
524  bool intersects (const hb_set_t *glyphs) const
525  { return glyphs->intersects (first, last); }
526 
527  template <typename set_t>
528  bool collect_coverage (set_t *glyphs) const
529  { return glyphs->add_range (first, last); }
530 
531  HBGlyphID16 first; /* First GlyphID in the range */
532  HBGlyphID16 last; /* Last GlyphID in the range */
533  HBUINT16 value; /* Value */
534  public:
536 };
538 
539 
540 struct IndexArray : Array16Of<Index>
541 {
542  bool intersects (const hb_map_t *indexes) const
543  { return hb_any (*this, indexes); }
544 
545  template <typename Iterator,
546  hb_requires (hb_is_iterator (Iterator))>
549  Iterator it)
550  {
551  if (!it) return;
552  if (unlikely (!c->extend_min ((*this)))) return;
553 
554  for (const auto _ : it)
555  {
556  if (!l->visitLookupIndex()) break;
557 
558  Index i;
559  i = _;
560  c->copy (i);
561  this->len++;
562  }
563  }
564 
565  unsigned int get_indexes (unsigned int start_offset,
566  unsigned int *_count /* IN/OUT */,
567  unsigned int *_indexes /* OUT */) const
568  {
569  if (_count)
570  {
571  + this->sub_array (start_offset, _count)
572  | hb_sink (hb_array (_indexes, *_count))
573  ;
574  }
575  return this->len;
576  }
577 
578  void add_indexes_to (hb_set_t* output /* OUT */) const
579  {
580  output->add_array (as_array ());
581  }
582 };
583 
584 
585 struct LangSys
586 {
587  unsigned int get_feature_count () const
588  { return featureIndex.len; }
589  hb_tag_t get_feature_index (unsigned int i) const
590  { return featureIndex[i]; }
591  unsigned int get_feature_indexes (unsigned int start_offset,
592  unsigned int *feature_count /* IN/OUT */,
593  unsigned int *feature_indexes /* OUT */) const
594  { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
595  void add_feature_indexes_to (hb_set_t *feature_indexes) const
596  { featureIndex.add_indexes_to (feature_indexes); }
597 
598  bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
599  unsigned int get_required_feature_index () const
600  {
601  if (reqFeatureIndex == 0xFFFFu)
602  return Index::NOT_FOUND_INDEX;
603  return reqFeatureIndex;
604  }
605 
607  {
608  TRACE_SERIALIZE (this);
609  return_trace (c->embed (*this));
610  }
611 
612  bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
613  {
614  if (reqFeatureIndex != o.reqFeatureIndex)
615  return false;
616 
617  auto iter =
618  + hb_iter (featureIndex)
619  | hb_filter (feature_index_map)
620  | hb_map (feature_index_map)
621  ;
622 
623  auto o_iter =
624  + hb_iter (o.featureIndex)
625  | hb_filter (feature_index_map)
626  | hb_map (feature_index_map)
627  ;
628 
629  for (; iter && o_iter; iter++, o_iter++)
630  {
631  unsigned a = *iter;
632  unsigned b = *o_iter;
633  if (a != b) return false;
634  }
635 
636  if (iter || o_iter) return false;
637 
638  return true;
639  }
640 
642  {
643  if (!has_required_feature () && !get_feature_count ()) return;
644  if (has_required_feature () &&
645  c->duplicate_feature_map->has (reqFeatureIndex))
646  c->new_feature_indexes->add (get_required_feature_index ());
647 
648  + hb_iter (featureIndex)
649  | hb_filter (c->duplicate_feature_map)
650  | hb_sink (c->new_feature_indexes)
651  ;
652  }
653 
656  const Tag *tag = nullptr) const
657  {
658  TRACE_SUBSET (this);
659  auto *out = c->serializer->start_embed (*this);
660  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
661 
662  out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex) ? l->feature_index_map->get (reqFeatureIndex) : 0xFFFFu;
663 
664  if (!l->visitFeatureIndex (featureIndex.len))
665  return_trace (false);
666 
667  auto it =
668  + hb_iter (featureIndex)
669  | hb_filter (l->feature_index_map)
670  | hb_map (l->feature_index_map)
671  ;
672 
673  bool ret = bool (it);
674  out->featureIndex.serialize (c->serializer, l, it);
675  return_trace (ret);
676  }
677 
679  const Record_sanitize_closure_t * = nullptr) const
680  {
681  TRACE_SANITIZE (this);
682  return_trace (c->check_struct (this) && featureIndex.sanitize (c));
683  }
684 
685  Offset16 lookupOrderZ; /* = Null (reserved for an offset to a
686  * reordering table) */
687  HBUINT16 reqFeatureIndex;/* Index of a feature required for this
688  * language system--if no required features
689  * = 0xFFFFu */
690  IndexArray featureIndex; /* Array of indices into the FeatureList */
691  public:
693 };
695 
696 struct Script
697 {
698  unsigned int get_lang_sys_count () const
699  { return langSys.len; }
700  const Tag& get_lang_sys_tag (unsigned int i) const
701  { return langSys.get_tag (i); }
702  unsigned int get_lang_sys_tags (unsigned int start_offset,
703  unsigned int *lang_sys_count /* IN/OUT */,
704  hb_tag_t *lang_sys_tags /* OUT */) const
705  { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
706  const LangSys& get_lang_sys (unsigned int i) const
707  {
709  return this+langSys[i].offset;
710  }
711  bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
712  { return langSys.find_index (tag, index); }
713 
714  bool has_default_lang_sys () const { return defaultLangSys != 0; }
715  const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
716 
718  unsigned script_index) const
719  {
720  if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
721  if (!c->visitScript ()) return;
722 
723  if (!c->script_langsys_map->has (script_index))
724  {
725  hb_set_t* empty_set = hb_set_create ();
726  if (unlikely (!c->script_langsys_map->set (script_index, empty_set)))
727  {
728  hb_set_destroy (empty_set);
729  return;
730  }
731  }
732 
733  unsigned langsys_count = get_lang_sys_count ();
734  if (has_default_lang_sys ())
735  {
736  //only collect features from non-redundant langsys
737  const LangSys& d = get_default_lang_sys ();
738  if (c->visitLangsys (d.get_feature_count ())) {
739  d.collect_features (c);
740  }
741 
742  for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
743  {
744  const LangSys& l = this+_.first.offset;
745  if (!c->visitLangsys (l.get_feature_count ())) continue;
746  if (l.compare (d, c->duplicate_feature_map)) continue;
747 
748  l.collect_features (c);
749  c->script_langsys_map->get (script_index)->add (_.second);
750  }
751  }
752  else
753  {
754  for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
755  {
756  const LangSys& l = this+_.first.offset;
757  if (!c->visitLangsys (l.get_feature_count ())) continue;
758  l.collect_features (c);
759  c->script_langsys_map->get (script_index)->add (_.second);
760  }
761  }
762  }
763 
766  const Tag *tag) const
767  {
768  TRACE_SUBSET (this);
769  if (!l->visitScript ()) return_trace (false);
770 
771  auto *out = c->serializer->start_embed (*this);
772  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
773 
774  bool defaultLang = false;
775  if (has_default_lang_sys ())
776  {
777  c->serializer->push ();
778  const LangSys& ls = this+defaultLangSys;
779  bool ret = ls.subset (c, l);
780  if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
781  {
782  c->serializer->pop_discard ();
783  out->defaultLangSys = 0;
784  }
785  else
786  {
787  c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
788  defaultLang = true;
789  }
790  }
791 
792  const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
793  if (active_langsys)
794  {
795  unsigned count = langSys.len;
796  + hb_zip (langSys, hb_range (count))
797  | hb_filter (active_langsys, hb_second)
798  | hb_map (hb_first)
799  | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
800  | hb_apply (subset_record_array (l, &(out->langSys), this))
801  ;
802  }
803 
804  return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
805  }
806 
808  const Record_sanitize_closure_t * = nullptr) const
809  {
810  TRACE_SANITIZE (this);
811  return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
812  }
813 
814  protected:
816  defaultLangSys; /* Offset to DefaultLangSys table--from
817  * beginning of Script table--may be Null */
819  langSys; /* Array of LangSysRecords--listed
820  * alphabetically by LangSysTag */
821  public:
823 };
824 
826 
827 
828 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
830 {
832  {
833  TRACE_SANITIZE (this);
834  if (unlikely (!c->check_struct (this))) return_trace (false);
835 
836  /* This subtable has some "history", if you will. Some earlier versions of
837  * Adobe tools calculated the offset of the FeatureParams subtable from the
838  * beginning of the FeatureList table! Now, that is dealt with in the
839  * Feature implementation. But we still need to be able to tell junk from
840  * real data. Note: We don't check that the nameID actually exists.
841  *
842  * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
843  *
844  * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
845  * coming out soon, and that the makeotf program will build a font with a
846  * 'size' feature that is correct by the specification.
847  *
848  * The specification for this feature tag is in the "OpenType Layout Tag
849  * Registry". You can see a copy of this at:
850  * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
851  *
852  * Here is one set of rules to determine if the 'size' feature is built
853  * correctly, or as by the older versions of MakeOTF. You may be able to do
854  * better.
855  *
856  * Assume that the offset to the size feature is according to specification,
857  * and make the following value checks. If it fails, assume the size
858  * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
859  * If this fails, reject the 'size' feature. The older makeOTF's calculated the
860  * offset from the beginning of the FeatureList table, rather than from the
861  * beginning of the 'size' Feature table.
862  *
863  * If "design size" == 0:
864  * fails check
865  *
866  * Else if ("subfamily identifier" == 0 and
867  * "range start" == 0 and
868  * "range end" == 0 and
869  * "range start" == 0 and
870  * "menu name ID" == 0)
871  * passes check: this is the format used when there is a design size
872  * specified, but there is no recommended size range.
873  *
874  * Else if ("design size" < "range start" or
875  * "design size" > "range end" or
876  * "range end" <= "range start" or
877  * "menu name ID" < 256 or
878  * "menu name ID" > 32767 or
879  * menu name ID is not a name ID which is actually in the name table)
880  * fails test
881  * Else
882  * passes test.
883  */
884 
885  if (!designSize)
886  return_trace (false);
887  else if (subfamilyID == 0 &&
888  subfamilyNameID == 0 &&
889  rangeStart == 0 &&
890  rangeEnd == 0)
891  return_trace (true);
892  else if (designSize < rangeStart ||
893  designSize > rangeEnd ||
894  subfamilyNameID < 256 ||
895  subfamilyNameID > 32767)
896  return_trace (false);
897  else
898  return_trace (true);
899  }
900 
902  {
903  TRACE_SUBSET (this);
904  return_trace ((bool) c->serializer->embed (*this));
905  }
906 
907  HBUINT16 designSize; /* Represents the design size in 720/inch
908  * units (decipoints). The design size entry
909  * must be non-zero. When there is a design
910  * size but no recommended size range, the
911  * rest of the array will consist of zeros. */
912  HBUINT16 subfamilyID; /* Has no independent meaning, but serves
913  * as an identifier that associates fonts
914  * in a subfamily. All fonts which share a
915  * Preferred or Font Family name and which
916  * differ only by size range shall have the
917  * same subfamily value, and no fonts which
918  * differ in weight or style shall have the
919  * same subfamily value. If this value is
920  * zero, the remaining fields in the array
921  * will be ignored. */
922  NameID subfamilyNameID;/* If the preceding value is non-zero, this
923  * value must be set in the range 256 - 32767
924  * (inclusive). It records the value of a
925  * field in the name table, which must
926  * contain English-language strings encoded
927  * in Windows Unicode and Macintosh Roman,
928  * and may contain additional strings
929  * localized to other scripts and languages.
930  * Each of these strings is the name an
931  * application should use, in combination
932  * with the family name, to represent the
933  * subfamily in a menu. Applications will
934  * choose the appropriate version based on
935  * their selection criteria. */
936  HBUINT16 rangeStart; /* Large end of the recommended usage range
937  * (inclusive), stored in 720/inch units
938  * (decipoints). */
939  HBUINT16 rangeEnd; /* Small end of the recommended usage range
940  (exclusive), stored in 720/inch units
941  * (decipoints). */
942  public:
944 };
945 
946 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
948 {
950  {
951  TRACE_SANITIZE (this);
952  /* Right now minorVersion is at zero. Which means, any table supports
953  * the uiNameID field. */
954  return_trace (c->check_struct (this));
955  }
956 
958  {
959  TRACE_SUBSET (this);
960  return_trace ((bool) c->serializer->embed (*this));
961  }
962 
963  HBUINT16 version; /* (set to 0): This corresponds to a “minor”
964  * version number. Additional data may be
965  * added to the end of this Feature Parameters
966  * table in the future. */
967 
968  NameID uiNameID; /* The 'name' table name ID that specifies a
969  * string (or strings, for multiple languages)
970  * for a user-interface label for this
971  * feature. The values of uiLabelNameId and
972  * sampleTextNameId are expected to be in the
973  * font-specific name ID range (256-32767),
974  * though that is not a requirement in this
975  * Feature Parameters specification. The
976  * user-interface label for the feature can
977  * be provided in multiple languages. An
978  * English string should be included as a
979  * fallback. The string should be kept to a
980  * minimal length to fit comfortably with
981  * different application interfaces. */
982  public:
984 };
985 
986 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
988 {
989  unsigned
990  get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
991  {
992  if (char_count)
993  {
994  + characters.sub_array (start_offset, char_count)
995  | hb_sink (hb_array (chars, *char_count))
996  ;
997  }
998  return characters.len;
999  }
1000 
1001  unsigned get_size () const
1002  { return min_size + characters.len * HBUINT24::static_size; }
1003 
1005  {
1006  TRACE_SUBSET (this);
1007  return_trace ((bool) c->serializer->embed (*this));
1008  }
1009 
1011  {
1012  TRACE_SANITIZE (this);
1013  return_trace (c->check_struct (this) &&
1014  characters.sanitize (c));
1015  }
1016 
1017  HBUINT16 format; /* Format number is set to 0. */
1018  NameID featUILableNameID; /* The ‘name’ table name ID that
1019  * specifies a string (or strings,
1020  * for multiple languages) for a
1021  * user-interface label for this
1022  * feature. (May be NULL.) */
1023  NameID featUITooltipTextNameID;/* The ‘name’ table name ID that
1024  * specifies a string (or strings,
1025  * for multiple languages) that an
1026  * application can use for tooltip
1027  * text for this feature. (May be
1028  * nullptr.) */
1029  NameID sampleTextNameID; /* The ‘name’ table name ID that
1030  * specifies sample text that
1031  * illustrates the effect of this
1032  * feature. (May be NULL.) */
1033  HBUINT16 numNamedParameters; /* Number of named parameters. (May
1034  * be zero.) */
1035  NameID firstParamUILabelNameID;/* The first ‘name’ table name ID
1036  * used to specify strings for
1037  * user-interface labels for the
1038  * feature parameters. (Must be zero
1039  * if numParameters is zero.) */
1041  characters; /* Array of the Unicode Scalar Value
1042  * of the characters for which this
1043  * feature provides glyph variants.
1044  * (May be zero.) */
1045  public:
1047 };
1048 
1050 {
1052  {
1053 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
1054  return true;
1055 #endif
1056  TRACE_SANITIZE (this);
1057  if (tag == HB_TAG ('s','i','z','e'))
1058  return_trace (u.size.sanitize (c));
1059  if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1060  return_trace (u.stylisticSet.sanitize (c));
1061  if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1062  return_trace (u.characterVariants.sanitize (c));
1063  return_trace (true);
1064  }
1065 
1066  bool subset (hb_subset_context_t *c, const Tag* tag) const
1067  {
1068  TRACE_SUBSET (this);
1069  if (!tag) return_trace (false);
1070  if (*tag == HB_TAG ('s','i','z','e'))
1071  return_trace (u.size.subset (c));
1072  if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1073  return_trace (u.stylisticSet.subset (c));
1074  if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1075  return_trace (u.characterVariants.subset (c));
1076  return_trace (false);
1077  }
1078 
1079 #ifndef HB_NO_LAYOUT_FEATURE_PARAMS
1081  {
1082  if (tag == HB_TAG ('s','i','z','e'))
1083  return u.size;
1084  return Null (FeatureParamsSize);
1085  }
1087  {
1088  if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1089  return u.stylisticSet;
1091  }
1093  {
1094  if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1095  return u.characterVariants;
1097  }
1098 #endif
1099 
1100  private:
1101  union {
1105  } u;
1106  public:
1108 };
1109 
1110 struct Feature
1111 {
1112  unsigned int get_lookup_count () const
1113  { return lookupIndex.len; }
1114  hb_tag_t get_lookup_index (unsigned int i) const
1115  { return lookupIndex[i]; }
1116  unsigned int get_lookup_indexes (unsigned int start_index,
1117  unsigned int *lookup_count /* IN/OUT */,
1118  unsigned int *lookup_tags /* OUT */) const
1119  { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
1120  void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
1121  { lookupIndex.add_indexes_to (lookup_indexes); }
1122 
1124  { return this+featureParams; }
1125 
1126  bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const
1127  { return lookupIndex.intersects (lookup_indexes); }
1128 
1131  const Tag *tag = nullptr) const
1132  {
1133  TRACE_SUBSET (this);
1134  auto *out = c->serializer->start_embed (*this);
1135  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1136 
1137  out->featureParams.serialize_subset (c, featureParams, this, tag);
1138 
1139  auto it =
1140  + hb_iter (lookupIndex)
1141  | hb_filter (l->lookup_index_map)
1142  | hb_map (l->lookup_index_map)
1143  ;
1144 
1145  out->lookupIndex.serialize (c->serializer, l, it);
1146  // The decision to keep or drop this feature is already made before we get here
1147  // so always retain it.
1148  return_trace (true);
1149  }
1150 
1152  const Record_sanitize_closure_t *closure = nullptr) const
1153  {
1154  TRACE_SANITIZE (this);
1155  if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
1156  return_trace (false);
1157 
1158  /* Some earlier versions of Adobe tools calculated the offset of the
1159  * FeatureParams subtable from the beginning of the FeatureList table!
1160  *
1161  * If sanitizing "failed" for the FeatureParams subtable, try it with the
1162  * alternative location. We would know sanitize "failed" if old value
1163  * of the offset was non-zero, but it's zeroed now.
1164  *
1165  * Only do this for the 'size' feature, since at the time of the faulty
1166  * Adobe tools, only the 'size' feature had FeatureParams defined.
1167  */
1168 
1169  if (likely (featureParams.is_null ()))
1170  return_trace (true);
1171 
1172  unsigned int orig_offset = featureParams;
1173  if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
1174  return_trace (false);
1175 
1176  if (featureParams == 0 && closure &&
1177  closure->tag == HB_TAG ('s','i','z','e') &&
1178  closure->list_base && closure->list_base < this)
1179  {
1180  unsigned int new_offset_int = orig_offset -
1181  (((char *) this) - ((char *) closure->list_base));
1182 
1183  Offset16To<FeatureParams> new_offset;
1184  /* Check that it would not overflow. */
1185  new_offset = new_offset_int;
1186  if (new_offset == new_offset_int &&
1187  c->try_set (&featureParams, new_offset_int) &&
1188  !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
1189  return_trace (false);
1190  }
1191 
1192  return_trace (true);
1193  }
1194 
1196  featureParams; /* Offset to Feature Parameters table (if one
1197  * has been defined for the feature), relative
1198  * to the beginning of the Feature Table; = Null
1199  * if not required */
1200  IndexArray lookupIndex; /* Array of LookupList indices */
1201  public:
1203 };
1204 
1206 
1207 
1209 {
1210  enum Flags {
1211  RightToLeft = 0x0001u,
1212  IgnoreBaseGlyphs = 0x0002u,
1213  IgnoreLigatures = 0x0004u,
1214  IgnoreMarks = 0x0008u,
1215  IgnoreFlags = 0x000Eu,
1217  Reserved = 0x00E0u,
1218  MarkAttachmentType = 0xFF00u
1219  };
1220  public:
1222 };
1223 
1224 } /* namespace OT */
1225 /* This has to be outside the namespace. */
1227 namespace OT {
1228 
1229 struct Lookup
1230 {
1231  unsigned int get_subtable_count () const { return subTable.len; }
1232 
1233  template <typename TSubTable>
1235  { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); }
1236  template <typename TSubTable>
1238  { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); }
1239 
1240  template <typename TSubTable>
1241  const TSubTable& get_subtable (unsigned int i) const
1242  { return this+get_subtables<TSubTable> ()[i]; }
1243  template <typename TSubTable>
1244  TSubTable& get_subtable (unsigned int i)
1245  { return this+get_subtables<TSubTable> ()[i]; }
1246 
1247  unsigned int get_size () const
1248  {
1249  const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
1250  if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1251  return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
1252  return (const char *) &markFilteringSet - (const char *) this;
1253  }
1254 
1255  unsigned int get_type () const { return lookupType; }
1256 
1257  /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
1258  * higher 16-bit is mark-filtering-set if the lookup uses one.
1259  * Not to be confused with glyph_props which is very similar. */
1260  uint32_t get_props () const
1261  {
1262  unsigned int flag = lookupFlag;
1264  {
1265  const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1266  flag += (markFilteringSet << 16);
1267  }
1268  return flag;
1269  }
1270 
1271  template <typename TSubTable, typename context_t, typename ...Ts>
1272  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1273  {
1274  unsigned int lookup_type = get_type ();
1275  TRACE_DISPATCH (this, lookup_type);
1276  unsigned int count = get_subtable_count ();
1277  for (unsigned int i = 0; i < count; i++) {
1278  typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, std::forward<Ts> (ds)...);
1279  if (c->stop_sublookup_iteration (r))
1280  return_trace (r);
1281  }
1282  return_trace (c->default_return_value ());
1283  }
1284 
1286  unsigned int lookup_type,
1287  uint32_t lookup_props,
1288  unsigned int num_subtables)
1289  {
1290  TRACE_SERIALIZE (this);
1291  if (unlikely (!c->extend_min (this))) return_trace (false);
1292  lookupType = lookup_type;
1293  lookupFlag = lookup_props & 0xFFFFu;
1294  if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
1295  if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1296  {
1297  if (unlikely (!c->extend (this))) return_trace (false);
1298  HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1299  markFilteringSet = lookup_props >> 16;
1300  }
1301  return_trace (true);
1302  }
1303 
1304  template <typename TSubTable>
1306  {
1307  TRACE_SUBSET (this);
1308  auto *out = c->serializer->start_embed (*this);
1309  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1310  out->lookupType = lookupType;
1311  out->lookupFlag = lookupFlag;
1312 
1313  const hb_set_t *glyphset = c->plan->glyphset_gsub ();
1314  unsigned int lookup_type = get_type ();
1315  + hb_iter (get_subtables <TSubTable> ())
1316  | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
1317  | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
1318  ;
1319 
1320  if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1321  {
1322  if (unlikely (!c->serializer->extend (out))) return_trace (false);
1323  const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1324  HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
1325  outMarkFilteringSet = markFilteringSet;
1326  }
1327 
1328  return_trace (out->subTable.len);
1329  }
1330 
1331  template <typename TSubTable>
1333  {
1334  TRACE_SANITIZE (this);
1335  if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
1336 
1337  unsigned subtables = get_subtable_count ();
1338  if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
1339 
1340  if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1341  {
1342  const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1343  if (!markFilteringSet.sanitize (c)) return_trace (false);
1344  }
1345 
1346  if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
1347  return_trace (false);
1348 
1349  if (unlikely (get_type () == TSubTable::Extension && subtables && !c->get_edit_count ()))
1350  {
1351  /* The spec says all subtables of an Extension lookup should
1352  * have the same type, which shall not be the Extension type
1353  * itself (but we already checked for that).
1354  * This is specially important if one has a reverse type!
1355  *
1356  * We only do this if sanitizer edit_count is zero. Otherwise,
1357  * some of the subtables might have become insane after they
1358  * were sanity-checked by the edits of subsequent subtables.
1359  * https://bugs.chromium.org/p/chromium/issues/detail?id=960331
1360  */
1361  unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
1362  for (unsigned int i = 1; i < subtables; i++)
1363  if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
1364  return_trace (false);
1365  }
1366  return_trace (true);
1367  }
1368 
1369  private:
1370  HBUINT16 lookupType; /* Different enumerations for GSUB and GPOS */
1371  HBUINT16 lookupFlag; /* Lookup qualifiers */
1373  subTable; /* Array of SubTables */
1374 /*HBUINT16 markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
1375  * structure. This field is only present if bit
1376  * UseMarkFilteringSet of lookup flags is set. */
1377  public:
1378  DEFINE_SIZE_ARRAY (6, subTable);
1379 };
1380 
1382 
1383 template <typename TLookup>
1385 {
1388  {
1389  TRACE_SUBSET (this);
1390  auto *out = c->serializer->start_embed (this);
1391  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1392 
1393  unsigned count = this->len;
1394  + hb_zip (*this, hb_range (count))
1395  | hb_filter (l->lookup_index_map, hb_second)
1396  | hb_map (hb_first)
1397  | hb_apply (subset_offset_array (c, *out, this))
1398  ;
1399  return_trace (true);
1400  }
1401 
1403  {
1404  TRACE_SANITIZE (this);
1406  }
1407 };
1408 
1409 
1410 /*
1411  * Coverage Table
1412  */
1413 
1415 {
1416  friend struct Coverage;
1417 
1418  private:
1419  unsigned int get_coverage (hb_codepoint_t glyph_id) const
1420  {
1421  unsigned int i;
1422  glyphArray.bfind (glyph_id, &i, HB_NOT_FOUND_STORE, NOT_COVERED);
1423  return i;
1424  }
1425 
1426  template <typename Iterator,
1428  bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1429  {
1430  TRACE_SERIALIZE (this);
1431  return_trace (glyphArray.serialize (c, glyphs));
1432  }
1433 
1434  bool sanitize (hb_sanitize_context_t *c) const
1435  {
1436  TRACE_SANITIZE (this);
1437  return_trace (glyphArray.sanitize (c));
1438  }
1439 
1440  bool intersects (const hb_set_t *glyphs) const
1441  {
1442  /* TODO Speed up, using hb_set_next() and bsearch()? */
1443  for (const auto& g : glyphArray.as_array ())
1444  if (glyphs->has (g))
1445  return true;
1446  return false;
1447  }
1448  bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1449  { return glyphs->has (glyphArray[index]); }
1450 
1451  void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1452  {
1453  unsigned count = glyphArray.len;
1454  for (unsigned i = 0; i < count; i++)
1455  if (glyphs->has (glyphArray[i]))
1456  intersect_glyphs->add (glyphArray[i]);
1457  }
1458 
1459  template <typename set_t>
1460  bool collect_coverage (set_t *glyphs) const
1461  { return glyphs->add_sorted_array (glyphArray.as_array ()); }
1462 
1463  public:
1464  /* Older compilers need this to be public. */
1465  struct iter_t
1466  {
1467  void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; }
1468  void fini () {}
1469  bool more () const { return i < c->glyphArray.len; }
1470  void next () { i++; }
1471  hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
1472  bool operator != (const iter_t& o) const
1473  { return i != o.i || c != o.c; }
1474 
1475  private:
1476  const struct CoverageFormat1 *c;
1477  unsigned int i;
1478  };
1479  private:
1480 
1481  protected:
1482  HBUINT16 coverageFormat; /* Format identifier--format = 1 */
1484  glyphArray; /* Array of GlyphIDs--in numerical order */
1485  public:
1487 };
1488 
1490 {
1491  friend struct Coverage;
1492 
1493  private:
1494  unsigned int get_coverage (hb_codepoint_t glyph_id) const
1495  {
1496  const RangeRecord &range = rangeRecord.bsearch (glyph_id);
1497  return likely (range.first <= range.last)
1498  ? (unsigned int) range.value + (glyph_id - range.first)
1499  : NOT_COVERED;
1500  }
1501 
1502  template <typename Iterator,
1504  bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1505  {
1506  TRACE_SERIALIZE (this);
1507  if (unlikely (!c->extend_min (this))) return_trace (false);
1508 
1509  if (unlikely (!glyphs))
1510  {
1511  rangeRecord.len = 0;
1512  return_trace (true);
1513  }
1514 
1515  /* TODO(iter) Write more efficiently? */
1516 
1517  unsigned num_ranges = 0;
1518  hb_codepoint_t last = (hb_codepoint_t) -2;
1519  for (auto g: glyphs)
1520  {
1521  if (last + 1 != g)
1522  num_ranges++;
1523  last = g;
1524  }
1525 
1526  if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
1527 
1528  unsigned count = 0;
1529  unsigned range = (unsigned) -1;
1530  last = (hb_codepoint_t) -2;
1531  for (auto g: glyphs)
1532  {
1533  if (last + 1 != g)
1534  {
1535  range++;
1536  rangeRecord[range].first = g;
1537  rangeRecord[range].value = count;
1538  }
1539  rangeRecord[range].last = g;
1540  last = g;
1541  count++;
1542  }
1543 
1544  return_trace (true);
1545  }
1546 
1547  bool sanitize (hb_sanitize_context_t *c) const
1548  {
1549  TRACE_SANITIZE (this);
1550  return_trace (rangeRecord.sanitize (c));
1551  }
1552 
1553  bool intersects (const hb_set_t *glyphs) const
1554  {
1555  /* TODO Speed up, using hb_set_next() and bsearch()? */
1556  /* TODO(iter) Rewrite as dagger. */
1557  for (const auto& range : rangeRecord.as_array ())
1558  if (range.intersects (glyphs))
1559  return true;
1560  return false;
1561  }
1562  bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1563  {
1564  /* TODO(iter) Rewrite as dagger. */
1565  for (const auto& range : rangeRecord.as_array ())
1566  {
1567  if (range.value <= index &&
1568  index < (unsigned int) range.value + (range.last - range.first) &&
1569  range.intersects (glyphs))
1570  return true;
1571  else if (index < range.value)
1572  return false;
1573  }
1574  return false;
1575  }
1576 
1577  void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1578  {
1579  for (const auto& range : rangeRecord.as_array ())
1580  {
1581  if (!range.intersects (glyphs)) continue;
1582  for (hb_codepoint_t g = range.first; g <= range.last; g++)
1583  if (glyphs->has (g)) intersect_glyphs->add (g);
1584  }
1585  }
1586 
1587  template <typename set_t>
1588  bool collect_coverage (set_t *glyphs) const
1589  {
1590  unsigned int count = rangeRecord.len;
1591  for (unsigned int i = 0; i < count; i++)
1592  if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
1593  return false;
1594  return true;
1595  }
1596 
1597  public:
1598  /* Older compilers need this to be public. */
1599  struct iter_t
1600  {
1601  void init (const CoverageFormat2 &c_)
1602  {
1603  c = &c_;
1604  coverage = 0;
1605  i = 0;
1606  j = c->rangeRecord.len ? c->rangeRecord[0].first : 0;
1607  if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last))
1608  {
1609  /* Broken table. Skip. */
1610  i = c->rangeRecord.len;
1611  }
1612  }
1613  void fini () {}
1614  bool more () const { return i < c->rangeRecord.len; }
1615  void next ()
1616  {
1617  if (j >= c->rangeRecord[i].last)
1618  {
1619  i++;
1620  if (more ())
1621  {
1622  unsigned int old = coverage;
1623  j = c->rangeRecord[i].first;
1624  coverage = c->rangeRecord[i].value;
1625  if (unlikely (coverage != old + 1))
1626  {
1627  /* Broken table. Skip. Important to avoid DoS.
1628  * Also, our callers depend on coverage being
1629  * consecutive and monotonically increasing,
1630  * ie. iota(). */
1631  i = c->rangeRecord.len;
1632  return;
1633  }
1634  }
1635  return;
1636  }
1637  coverage++;
1638  j++;
1639  }
1640  hb_codepoint_t get_glyph () const { return j; }
1641  bool operator != (const iter_t& o) const
1642  { return i != o.i || j != o.j || c != o.c; }
1643 
1644  private:
1645  const struct CoverageFormat2 *c;
1646  unsigned int i, coverage;
1647  hb_codepoint_t j;
1648  };
1649  private:
1650 
1651  protected:
1652  HBUINT16 coverageFormat; /* Format identifier--format = 2 */
1654  rangeRecord; /* Array of glyph ranges--ordered by
1655  * Start GlyphID. rangeCount entries
1656  * long */
1657  public:
1659 };
1660 
1661 struct Coverage
1662 {
1663  /* Has interface. */
1664  static constexpr unsigned SENTINEL = NOT_COVERED;
1665  typedef unsigned int value_t;
1666  value_t operator [] (hb_codepoint_t k) const { return get (k); }
1667  bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
1668  /* Predicate. */
1669  bool operator () (hb_codepoint_t k) const { return has (k); }
1670 
1671  unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
1672  unsigned int get_coverage (hb_codepoint_t glyph_id) const
1673  {
1674  switch (u.format) {
1675  case 1: return u.format1.get_coverage (glyph_id);
1676  case 2: return u.format2.get_coverage (glyph_id);
1677  default:return NOT_COVERED;
1678  }
1679  }
1680 
1681  template <typename Iterator,
1683  bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1684  {
1685  TRACE_SERIALIZE (this);
1686  if (unlikely (!c->extend_min (this))) return_trace (false);
1687 
1688  unsigned count = 0;
1689  unsigned num_ranges = 0;
1690  hb_codepoint_t last = (hb_codepoint_t) -2;
1691  for (auto g: glyphs)
1692  {
1693  if (last + 1 != g)
1694  num_ranges++;
1695  last = g;
1696  count++;
1697  }
1698  u.format = count <= num_ranges * 3 ? 1 : 2;
1699 
1700  switch (u.format)
1701  {
1702  case 1: return_trace (u.format1.serialize (c, glyphs));
1703  case 2: return_trace (u.format2.serialize (c, glyphs));
1704  default:return_trace (false);
1705  }
1706  }
1707 
1709  {
1710  TRACE_SUBSET (this);
1711  const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1712  const hb_map_t &glyph_map = *c->plan->glyph_map;
1713 
1714  auto it =
1715  + iter ()
1716  | hb_filter (glyphset)
1717  | hb_map_retains_sorting (glyph_map)
1718  ;
1719 
1720  bool ret = bool (it);
1721  Coverage_serialize (c->serializer, it);
1722  return_trace (ret);
1723  }
1724 
1726  {
1727  TRACE_SANITIZE (this);
1728  if (!u.format.sanitize (c)) return_trace (false);
1729  switch (u.format)
1730  {
1731  case 1: return_trace (u.format1.sanitize (c));
1732  case 2: return_trace (u.format2.sanitize (c));
1733  default:return_trace (true);
1734  }
1735  }
1736 
1737  bool intersects (const hb_set_t *glyphs) const
1738  {
1739  switch (u.format)
1740  {
1741  case 1: return u.format1.intersects (glyphs);
1742  case 2: return u.format2.intersects (glyphs);
1743  default:return false;
1744  }
1745  }
1746  bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1747  {
1748  switch (u.format)
1749  {
1750  case 1: return u.format1.intersects_coverage (glyphs, index);
1751  case 2: return u.format2.intersects_coverage (glyphs, index);
1752  default:return false;
1753  }
1754  }
1755 
1756  /* Might return false if array looks unsorted.
1757  * Used for faster rejection of corrupt data. */
1758  template <typename set_t>
1759  bool collect_coverage (set_t *glyphs) const
1760  {
1761  switch (u.format)
1762  {
1763  case 1: return u.format1.collect_coverage (glyphs);
1764  case 2: return u.format2.collect_coverage (glyphs);
1765  default:return false;
1766  }
1767  }
1768 
1769  void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1770  {
1771  switch (u.format)
1772  {
1773  case 1: return u.format1.intersected_coverage_glyphs (glyphs, intersect_glyphs);
1774  case 2: return u.format2.intersected_coverage_glyphs (glyphs, intersect_glyphs);
1775  default:return ;
1776  }
1777  }
1778 
1779  struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
1780  {
1781  static constexpr bool is_sorted_iterator = true;
1782  iter_t (const Coverage &c_ = Null (Coverage))
1783  {
1784  memset (this, 0, sizeof (*this));
1785  format = c_.u.format;
1786  switch (format)
1787  {
1788  case 1: u.format1.init (c_.u.format1); return;
1789  case 2: u.format2.init (c_.u.format2); return;
1790  default: return;
1791  }
1792  }
1793  bool __more__ () const
1794  {
1795  switch (format)
1796  {
1797  case 1: return u.format1.more ();
1798  case 2: return u.format2.more ();
1799  default:return false;
1800  }
1801  }
1802  void __next__ ()
1803  {
1804  switch (format)
1805  {
1806  case 1: u.format1.next (); break;
1807  case 2: u.format2.next (); break;
1808  default: break;
1809  }
1810  }
1812  __item_t__ __item__ () const { return get_glyph (); }
1813 
1815  {
1816  switch (format)
1817  {
1818  case 1: return u.format1.get_glyph ();
1819  case 2: return u.format2.get_glyph ();
1820  default:return 0;
1821  }
1822  }
1823  bool operator != (const iter_t& o) const
1824  {
1825  if (format != o.format) return true;
1826  switch (format)
1827  {
1828  case 1: return u.format1 != o.u.format1;
1829  case 2: return u.format2 != o.u.format2;
1830  default:return false;
1831  }
1832  }
1833 
1834  private:
1835  unsigned int format;
1836  union {
1837  CoverageFormat2::iter_t format2; /* Put this one first since it's larger; helps shut up compiler. */
1839  } u;
1840  };
1841  iter_t iter () const { return iter_t (*this); }
1842 
1843  protected:
1844  union {
1845  HBUINT16 format; /* Format identifier */
1848  } u;
1849  public:
1851 };
1852 
1853 template<typename Iterator>
1854 static inline void
1855 Coverage_serialize (hb_serialize_context_t *c,
1856  Iterator it)
1857 { c->start_embed<Coverage> ()->serialize (c, it); }
1858 
1859 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
1860  const hb_map_t &gid_klass_map,
1862  const hb_set_t &klasses,
1863  bool use_class_zero,
1864  hb_map_t *klass_map /*INOUT*/)
1865 {
1866  if (!klass_map)
1867  {
1868  ClassDef_serialize (c, hb_zip (glyphs.iter (), + glyphs.iter ()
1869  | hb_map (gid_klass_map)));
1870  return;
1871  }
1872 
1873  /* any glyph not assigned a class value falls into Class zero (0),
1874  * if any glyph assigned to class 0, remapping must start with 0->0*/
1875  if (!use_class_zero)
1876  klass_map->set (0, 0);
1877 
1878  unsigned idx = klass_map->has (0) ? 1 : 0;
1879  for (const unsigned k: klasses.iter ())
1880  {
1881  if (klass_map->has (k)) continue;
1882  klass_map->set (k, idx);
1883  idx++;
1884  }
1885 
1886  auto it =
1887  + glyphs.iter ()
1888  | hb_map_retains_sorting ([&] (const HBGlyphID16& gid) -> hb_pair_t<hb_codepoint_t, unsigned>
1889  {
1890  unsigned new_klass = klass_map->get (gid_klass_map[gid]);
1891  return hb_pair ((hb_codepoint_t)gid, new_klass);
1892  })
1893  ;
1894 
1895  c->propagate_error (glyphs, klasses);
1896  ClassDef_serialize (c, it);
1897 }
1898 
1899 /*
1900  * Class Definition Table
1901  */
1902 
1904 {
1905  friend struct ClassDef;
1906 
1907  private:
1908  unsigned int get_class (hb_codepoint_t glyph_id) const
1909  {
1910  return classValue[(unsigned int) (glyph_id - startGlyph)];
1911  }
1912 
1913  template<typename Iterator,
1914  hb_requires (hb_is_iterator (Iterator))>
1915  bool serialize (hb_serialize_context_t *c,
1916  Iterator it)
1917  {
1918  TRACE_SERIALIZE (this);
1919  if (unlikely (!c->extend_min (this))) return_trace (false);
1920 
1921  if (unlikely (!it))
1922  {
1923  classFormat = 1;
1924  startGlyph = 0;
1925  classValue.len = 0;
1926  return_trace (true);
1927  }
1928 
1929  hb_codepoint_t glyph_min = (*it).first;
1930  hb_codepoint_t glyph_max = + it
1931  | hb_map (hb_first)
1932  | hb_reduce (hb_max, 0u);
1933  unsigned glyph_count = glyph_max - glyph_min + 1;
1934 
1935  startGlyph = glyph_min;
1936  if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
1937  for (const hb_pair_t<hb_codepoint_t, unsigned> gid_klass_pair : + it)
1938  {
1939  unsigned idx = gid_klass_pair.first - glyph_min;
1940  classValue[idx] = gid_klass_pair.second;
1941  }
1942  return_trace (true);
1943  }
1944 
1945  bool subset (hb_subset_context_t *c,
1946  hb_map_t *klass_map = nullptr /*OUT*/,
1947  bool keep_empty_table = true,
1948  bool use_class_zero = true,
1949  const Coverage* glyph_filter = nullptr) const
1950  {
1951  TRACE_SUBSET (this);
1952  const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1953  const hb_map_t &glyph_map = *c->plan->glyph_map;
1954 
1956  hb_set_t orig_klasses;
1957  hb_map_t gid_org_klass_map;
1958 
1961 
1962  for (const hb_codepoint_t gid : + hb_range (start, end)
1963  | hb_filter (glyphset))
1964  {
1965  if (glyph_filter && !glyph_filter->has(gid)) continue;
1966 
1967  unsigned klass = classValue[gid - start];
1968  if (!klass) continue;
1969 
1970  glyphs.push (glyph_map[gid]);
1971  gid_org_klass_map.set (glyph_map[gid], klass);
1972  orig_klasses.add (klass);
1973  }
1974 
1975  unsigned glyph_count = glyph_filter
1976  ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
1977  : glyphset.get_population ();
1978  use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
1979  ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
1980  glyphs, orig_klasses, use_class_zero, klass_map);
1981  return_trace (keep_empty_table || (bool) glyphs);
1982  }
1983 
1984  bool sanitize (hb_sanitize_context_t *c) const
1985  {
1986  TRACE_SANITIZE (this);
1987  return_trace (c->check_struct (this) && classValue.sanitize (c));
1988  }
1989 
1990  template <typename set_t>
1991  bool collect_coverage (set_t *glyphs) const
1992  {
1993  unsigned int start = 0;
1994  unsigned int count = classValue.len;
1995  for (unsigned int i = 0; i < count; i++)
1996  {
1997  if (classValue[i])
1998  continue;
1999 
2000  if (start != i)
2001  if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
2002  return false;
2003 
2004  start = i + 1;
2005  }
2006  if (start != count)
2007  if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
2008  return false;
2009 
2010  return true;
2011  }
2012 
2013  template <typename set_t>
2014  bool collect_class (set_t *glyphs, unsigned klass) const
2015  {
2016  unsigned int count = classValue.len;
2017  for (unsigned int i = 0; i < count; i++)
2018  if (classValue[i] == klass) glyphs->add (startGlyph + i);
2019  return true;
2020  }
2021 
2022  bool intersects (const hb_set_t *glyphs) const
2023  {
2024  /* TODO Speed up, using hb_set_next()? */
2027  for (hb_codepoint_t iter = startGlyph - 1;
2028  hb_set_next (glyphs, &iter) && iter < end;)
2029  if (classValue[iter - start]) return true;
2030  return false;
2031  }
2032  bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2033  {
2034  unsigned int count = classValue.len;
2035  if (klass == 0)
2036  {
2037  /* Match if there's any glyph that is not listed! */
2039  if (!hb_set_next (glyphs, &g)) return false;
2040  if (g < startGlyph) return true;
2041  g = startGlyph + count - 1;
2042  if (hb_set_next (glyphs, &g)) return true;
2043  /* Fall through. */
2044  }
2045  /* TODO Speed up, using set overlap first? */
2046  /* TODO(iter) Rewrite as dagger. */
2047  HBUINT16 k {klass};
2048  const HBUINT16 *arr = classValue.arrayZ;
2049  for (unsigned int i = 0; i < count; i++)
2050  if (arr[i] == k && glyphs->has (startGlyph + i))
2051  return true;
2052  return false;
2053  }
2054 
2055  void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2056  {
2057  unsigned count = classValue.len;
2058  if (klass == 0)
2059  {
2060  hb_codepoint_t endGlyph = startGlyph + count -1;
2061  for (hb_codepoint_t g : glyphs->iter ())
2062  if (g < startGlyph || g > endGlyph)
2063  intersect_glyphs->add (g);
2064 
2065  return;
2066  }
2067 
2068  for (unsigned i = 0; i < count; i++)
2069  if (classValue[i] == klass && glyphs->has (startGlyph + i))
2070  intersect_glyphs->add (startGlyph + i);
2071  }
2072 
2073  void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2074  {
2075  if (glyphs->is_empty ()) return;
2076  hb_codepoint_t end_glyph = startGlyph + classValue.len - 1;
2077  if (glyphs->get_min () < startGlyph ||
2078  glyphs->get_max () > end_glyph)
2079  intersect_classes->add (0);
2080 
2081  for (const auto& _ : + hb_enumerate (classValue))
2082  {
2083  hb_codepoint_t g = startGlyph + _.first;
2084  if (glyphs->has (g))
2085  intersect_classes->add (_.second);
2086  }
2087  }
2088 
2089  protected:
2090  HBUINT16 classFormat; /* Format identifier--format = 1 */
2091  HBGlyphID16 startGlyph; /* First GlyphID of the classValueArray */
2093  classValue; /* Array of Class Values--one per GlyphID */
2094  public:
2096 };
2097 
2099 {
2100  friend struct ClassDef;
2101 
2102  private:
2103  unsigned int get_class (hb_codepoint_t glyph_id) const
2104  {
2105  return rangeRecord.bsearch (glyph_id).value;
2106  }
2107 
2108  template<typename Iterator,
2109  hb_requires (hb_is_iterator (Iterator))>
2110  bool serialize (hb_serialize_context_t *c,
2111  Iterator it)
2112  {
2113  TRACE_SERIALIZE (this);
2114  if (unlikely (!c->extend_min (this))) return_trace (false);
2115 
2116  if (unlikely (!it))
2117  {
2118  classFormat = 2;
2119  rangeRecord.len = 0;
2120  return_trace (true);
2121  }
2122 
2123  unsigned num_ranges = 1;
2124  hb_codepoint_t prev_gid = (*it).first;
2125  unsigned prev_klass = (*it).second;
2126 
2127  RangeRecord range_rec;
2128  range_rec.first = prev_gid;
2129  range_rec.last = prev_gid;
2130  range_rec.value = prev_klass;
2131 
2132  RangeRecord *record = c->copy (range_rec);
2133  if (unlikely (!record)) return_trace (false);
2134 
2135  for (const auto gid_klass_pair : + (++it))
2136  {
2137  hb_codepoint_t cur_gid = gid_klass_pair.first;
2138  unsigned cur_klass = gid_klass_pair.second;
2139 
2140  if (cur_gid != prev_gid + 1 ||
2141  cur_klass != prev_klass)
2142  {
2143  if (unlikely (!record)) break;
2144  record->last = prev_gid;
2145  num_ranges++;
2146 
2147  range_rec.first = cur_gid;
2148  range_rec.last = cur_gid;
2149  range_rec.value = cur_klass;
2150 
2151  record = c->copy (range_rec);
2152  }
2153 
2154  prev_klass = cur_klass;
2155  prev_gid = cur_gid;
2156  }
2157 
2158  if (likely (record)) record->last = prev_gid;
2159  rangeRecord.len = num_ranges;
2160  return_trace (true);
2161  }
2162 
2163  bool subset (hb_subset_context_t *c,
2164  hb_map_t *klass_map = nullptr /*OUT*/,
2165  bool keep_empty_table = true,
2166  bool use_class_zero = true,
2167  const Coverage* glyph_filter = nullptr) const
2168  {
2169  TRACE_SUBSET (this);
2170  const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2171  const hb_map_t &glyph_map = *c->plan->glyph_map;
2172 
2174  hb_set_t orig_klasses;
2175  hb_map_t gid_org_klass_map;
2176 
2177  unsigned count = rangeRecord.len;
2178  for (unsigned i = 0; i < count; i++)
2179  {
2180  unsigned klass = rangeRecord[i].value;
2181  if (!klass) continue;
2182  hb_codepoint_t start = rangeRecord[i].first;
2183  hb_codepoint_t end = rangeRecord[i].last + 1;
2184  for (hb_codepoint_t g = start; g < end; g++)
2185  {
2186  if (!glyphset.has (g)) continue;
2187  if (glyph_filter && !glyph_filter->has (g)) continue;
2188  glyphs.push (glyph_map[g]);
2189  gid_org_klass_map.set (glyph_map[g], klass);
2190  orig_klasses.add (klass);
2191  }
2192  }
2193 
2194  unsigned glyph_count = glyph_filter
2195  ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
2196  : glyphset.get_population ();
2197  use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
2198  ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
2199  glyphs, orig_klasses, use_class_zero, klass_map);
2200  return_trace (keep_empty_table || (bool) glyphs);
2201  }
2202 
2203  bool sanitize (hb_sanitize_context_t *c) const
2204  {
2205  TRACE_SANITIZE (this);
2206  return_trace (rangeRecord.sanitize (c));
2207  }
2208 
2209  template <typename set_t>
2210  bool collect_coverage (set_t *glyphs) const
2211  {
2212  unsigned int count = rangeRecord.len;
2213  for (unsigned int i = 0; i < count; i++)
2214  if (rangeRecord[i].value)
2215  if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2216  return false;
2217  return true;
2218  }
2219 
2220  template <typename set_t>
2221  bool collect_class (set_t *glyphs, unsigned int klass) const
2222  {
2223  unsigned int count = rangeRecord.len;
2224  for (unsigned int i = 0; i < count; i++)
2225  {
2226  if (rangeRecord[i].value == klass)
2227  if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2228  return false;
2229  }
2230  return true;
2231  }
2232 
2233  bool intersects (const hb_set_t *glyphs) const
2234  {
2235  /* TODO Speed up, using hb_set_next() and bsearch()? */
2236  unsigned int count = rangeRecord.len;
2237  for (unsigned int i = 0; i < count; i++)
2238  {
2239  const auto& range = rangeRecord[i];
2240  if (range.intersects (glyphs) && range.value)
2241  return true;
2242  }
2243  return false;
2244  }
2245  bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2246  {
2247  unsigned int count = rangeRecord.len;
2248  if (klass == 0)
2249  {
2250  /* Match if there's any glyph that is not listed! */
2252  for (unsigned int i = 0; i < count; i++)
2253  {
2254  if (!hb_set_next (glyphs, &g))
2255  break;
2256  if (g < rangeRecord[i].first)
2257  return true;
2258  g = rangeRecord[i].last;
2259  }
2260  if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2261  return true;
2262  /* Fall through. */
2263  }
2264  /* TODO Speed up, using set overlap first? */
2265  /* TODO(iter) Rewrite as dagger. */
2266  HBUINT16 k {klass};
2267  const RangeRecord *arr = rangeRecord.arrayZ;
2268  for (unsigned int i = 0; i < count; i++)
2269  if (arr[i].value == k && arr[i].intersects (glyphs))
2270  return true;
2271  return false;
2272  }
2273 
2274  void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2275  {
2276  unsigned count = rangeRecord.len;
2277  if (klass == 0)
2278  {
2280  for (unsigned int i = 0; i < count; i++)
2281  {
2282  if (!hb_set_next (glyphs, &g))
2283  break;
2284  while (g != HB_SET_VALUE_INVALID && g < rangeRecord[i].first)
2285  {
2286  intersect_glyphs->add (g);
2287  hb_set_next (glyphs, &g);
2288  }
2289  g = rangeRecord[i].last;
2290  }
2291  while (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2292  intersect_glyphs->add (g);
2293 
2294  return;
2295  }
2296 
2298  for (unsigned int i = 0; i < count; i++)
2299  {
2300  if (rangeRecord[i].value != klass) continue;
2301 
2302  if (g != HB_SET_VALUE_INVALID)
2303  {
2304  if (g >= rangeRecord[i].first &&
2305  g <= rangeRecord[i].last)
2306  intersect_glyphs->add (g);
2307  if (g > rangeRecord[i].last)
2308  continue;
2309  }
2310 
2311  g = rangeRecord[i].first - 1;
2312  while (hb_set_next (glyphs, &g))
2313  {
2314  if (g >= rangeRecord[i].first && g <= rangeRecord[i].last)
2315  intersect_glyphs->add (g);
2316  else if (g > rangeRecord[i].last)
2317  break;
2318  }
2319  }
2320  }
2321 
2322  void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2323  {
2324  if (glyphs->is_empty ()) return;
2325 
2326  unsigned count = rangeRecord.len;
2328  for (unsigned int i = 0; i < count; i++)
2329  {
2330  if (!hb_set_next (glyphs, &g))
2331  break;
2332  if (g < rangeRecord[i].first)
2333  {
2334  intersect_classes->add (0);
2335  break;
2336  }
2337  g = rangeRecord[i].last;
2338  }
2339  if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2340  intersect_classes->add (0);
2341 
2342  for (const RangeRecord& record : rangeRecord.iter ())
2343  if (record.intersects (glyphs))
2344  intersect_classes->add (record.value);
2345  }
2346 
2347  protected:
2348  HBUINT16 classFormat; /* Format identifier--format = 2 */
2350  rangeRecord; /* Array of glyph ranges--ordered by
2351  * Start GlyphID */
2352  public:
2354 };
2355 
2356 struct ClassDef
2357 {
2358  /* Has interface. */
2359  static constexpr unsigned SENTINEL = 0;
2360  typedef unsigned int value_t;
2361  value_t operator [] (hb_codepoint_t k) const { return get (k); }
2362  bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
2363  /* Projection. */
2365 
2366  unsigned int get (hb_codepoint_t k) const { return get_class (k); }
2367  unsigned int get_class (hb_codepoint_t glyph_id) const
2368  {
2369  switch (u.format) {
2370  case 1: return u.format1.get_class (glyph_id);
2371  case 2: return u.format2.get_class (glyph_id);
2372  default:return 0;
2373  }
2374  }
2375 
2376  template<typename Iterator,
2377  hb_requires (hb_is_iterator (Iterator))>
2378  bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
2379  {
2380  TRACE_SERIALIZE (this);
2381  if (unlikely (!c->extend_min (this))) return_trace (false);
2382 
2383  auto it = + it_with_class_zero | hb_filter (hb_second);
2384 
2385  unsigned format = 2;
2386  if (likely (it))
2387  {
2388  hb_codepoint_t glyph_min = (*it).first;
2389  hb_codepoint_t glyph_max = glyph_min;
2390 
2391  unsigned num_glyphs = 0;
2392  unsigned num_ranges = 1;
2393  hb_codepoint_t prev_gid = glyph_min;
2394  unsigned prev_klass = (*it).second;
2395 
2396  for (const auto gid_klass_pair : it)
2397  {
2398  hb_codepoint_t cur_gid = gid_klass_pair.first;
2399  unsigned cur_klass = gid_klass_pair.second;
2400  num_glyphs++;
2401  if (cur_gid == glyph_min) continue;
2402  if (cur_gid > glyph_max) glyph_max = cur_gid;
2403  if (cur_gid != prev_gid + 1 ||
2404  cur_klass != prev_klass)
2405  num_ranges++;
2406 
2407  prev_gid = cur_gid;
2408  prev_klass = cur_klass;
2409  }
2410 
2411  if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
2412  format = 1;
2413  }
2414  u.format = format;
2415 
2416  switch (u.format)
2417  {
2418  case 1: return_trace (u.format1.serialize (c, it));
2419  case 2: return_trace (u.format2.serialize (c, it));
2420  default:return_trace (false);
2421  }
2422  }
2423 
2425  hb_map_t *klass_map = nullptr /*OUT*/,
2426  bool keep_empty_table = true,
2427  bool use_class_zero = true,
2428  const Coverage* glyph_filter = nullptr) const
2429  {
2430  TRACE_SUBSET (this);
2431  switch (u.format) {
2432  case 1: return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2433  case 2: return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2434  default:return_trace (false);
2435  }
2436  }
2437 
2439  {
2440  TRACE_SANITIZE (this);
2441  if (!u.format.sanitize (c)) return_trace (false);
2442  switch (u.format) {
2443  case 1: return_trace (u.format1.sanitize (c));
2444  case 2: return_trace (u.format2.sanitize (c));
2445  default:return_trace (true);
2446  }
2447  }
2448 
2449  /* Might return false if array looks unsorted.
2450  * Used for faster rejection of corrupt data. */
2451  template <typename set_t>
2452  bool collect_coverage (set_t *glyphs) const
2453  {
2454  switch (u.format) {
2455  case 1: return u.format1.collect_coverage (glyphs);
2456  case 2: return u.format2.collect_coverage (glyphs);
2457  default:return false;
2458  }
2459  }
2460 
2461  /* Might return false if array looks unsorted.
2462  * Used for faster rejection of corrupt data. */
2463  template <typename set_t>
2464  bool collect_class (set_t *glyphs, unsigned int klass) const
2465  {
2466  switch (u.format) {
2467  case 1: return u.format1.collect_class (glyphs, klass);
2468  case 2: return u.format2.collect_class (glyphs, klass);
2469  default:return false;
2470  }
2471  }
2472 
2473  bool intersects (const hb_set_t *glyphs) const
2474  {
2475  switch (u.format) {
2476  case 1: return u.format1.intersects (glyphs);
2477  case 2: return u.format2.intersects (glyphs);
2478  default:return false;
2479  }
2480  }
2481  bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
2482  {
2483  switch (u.format) {
2484  case 1: return u.format1.intersects_class (glyphs, klass);
2485  case 2: return u.format2.intersects_class (glyphs, klass);
2486  default:return false;
2487  }
2488  }
2489 
2490  void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2491  {
2492  switch (u.format) {
2493  case 1: return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2494  case 2: return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2495  default:return;
2496  }
2497  }
2498 
2499  void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2500  {
2501  switch (u.format) {
2502  case 1: return u.format1.intersected_classes (glyphs, intersect_classes);
2503  case 2: return u.format2.intersected_classes (glyphs, intersect_classes);
2504  default:return;
2505  }
2506  }
2507 
2508 
2509  protected:
2510  union {
2511  HBUINT16 format; /* Format identifier */
2514  } u;
2515  public:
2517 };
2518 
2519 template<typename Iterator>
2520 static inline void ClassDef_serialize (hb_serialize_context_t *c,
2521  Iterator it)
2522 { c->start_embed<ClassDef> ()->serialize (c, it); }
2523 
2524 
2525 /*
2526  * Item Variation Store
2527  */
2528 
2530 {
2531  float evaluate (int coord) const
2532  {
2533  int start = startCoord, peak = peakCoord, end = endCoord;
2534 
2535  /* TODO Move these to sanitize(). */
2536  if (unlikely (start > peak || peak > end))
2537  return 1.;
2538  if (unlikely (start < 0 && end > 0 && peak != 0))
2539  return 1.;
2540 
2541  if (peak == 0 || coord == peak)
2542  return 1.;
2543 
2544  if (coord <= start || end <= coord)
2545  return 0.;
2546 
2547  /* Interpolate */
2548  if (coord < peak)
2549  return float (coord - start) / (peak - start);
2550  else
2551  return float (end - coord) / (end - peak);
2552  }
2553 
2555  {
2556  TRACE_SANITIZE (this);
2557  return_trace (c->check_struct (this));
2558  /* TODO Handle invalid start/peak/end configs, so we don't
2559  * have to do that at runtime. */
2560  }
2561 
2562  public:
2566  public:
2568 };
2569 
2571 {
2572  float evaluate (unsigned int region_index,
2573  const int *coords, unsigned int coord_len) const
2574  {
2575  if (unlikely (region_index >= regionCount))
2576  return 0.;
2577 
2578  const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
2579 
2580  float v = 1.;
2581  unsigned int count = axisCount;
2582  for (unsigned int i = 0; i < count; i++)
2583  {
2584  int coord = i < coord_len ? coords[i] : 0;
2585  float factor = axes[i].evaluate (coord);
2586  if (factor == 0.f)
2587  return 0.;
2588  v *= factor;
2589  }
2590  return v;
2591  }
2592 
2594  {
2595  TRACE_SANITIZE (this);
2596  return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount));
2597  }
2598 
2599  bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t &region_map)
2600  {
2601  TRACE_SERIALIZE (this);
2602  if (unlikely (!c->extend_min (this))) return_trace (false);
2603  axisCount = src->axisCount;
2604  regionCount = region_map.get_population ();
2605  if (unlikely (hb_unsigned_mul_overflows (axisCount * regionCount,
2606  VarRegionAxis::static_size))) return_trace (false);
2607  if (unlikely (!c->extend (this))) return_trace (false);
2608  unsigned int region_count = src->regionCount;
2609  for (unsigned int r = 0; r < regionCount; r++)
2610  {
2611  unsigned int backward = region_map.backward (r);
2612  if (backward >= region_count) return_trace (false);
2613  memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
2614  }
2615 
2616  return_trace (true);
2617  }
2618 
2619  unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
2620 
2621  public:
2624  protected:
2627  public:
2629 };
2630 
2631 struct VarData
2632 {
2633  unsigned int get_region_index_count () const
2634  { return regionIndices.len; }
2635 
2636  unsigned int get_row_size () const
2637  { return shortCount + regionIndices.len; }
2638 
2639  unsigned int get_size () const
2640  { return min_size
2641  - regionIndices.min_size + regionIndices.get_size ()
2642  + itemCount * get_row_size ();
2643  }
2644 
2645  float get_delta (unsigned int inner,
2646  const int *coords, unsigned int coord_count,
2647  const VarRegionList &regions) const
2648  {
2649  if (unlikely (inner >= itemCount))
2650  return 0.;
2651 
2652  unsigned int count = regionIndices.len;
2653  unsigned int scount = shortCount;
2654 
2655  const HBUINT8 *bytes = get_delta_bytes ();
2656  const HBUINT8 *row = bytes + inner * (scount + count);
2657 
2658  float delta = 0.;
2659  unsigned int i = 0;
2660 
2661  const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (row);
2662  for (; i < scount; i++)
2663  {
2664  float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2665  delta += scalar * *scursor++;
2666  }
2667  const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
2668  for (; i < count; i++)
2669  {
2670  float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2671  delta += scalar * *bcursor++;
2672  }
2673 
2674  return delta;
2675  }
2676 
2677  void get_region_scalars (const int *coords, unsigned int coord_count,
2678  const VarRegionList &regions,
2679  float *scalars /*OUT */,
2680  unsigned int num_scalars) const
2681  {
2682  unsigned count = hb_min (num_scalars, regionIndices.len);
2683  for (unsigned int i = 0; i < count; i++)
2684  scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2685  for (unsigned int i = count; i < num_scalars; i++)
2686  scalars[i] = 0.f;
2687  }
2688 
2690  {
2691  TRACE_SANITIZE (this);
2692  return_trace (c->check_struct (this) &&
2693  regionIndices.sanitize (c) &&
2694  shortCount <= regionIndices.len &&
2695  c->check_range (get_delta_bytes (),
2696  itemCount,
2697  get_row_size ()));
2698  }
2699 
2701  const VarData *src,
2702  const hb_inc_bimap_t &inner_map,
2703  const hb_bimap_t &region_map)
2704  {
2705  TRACE_SERIALIZE (this);
2706  if (unlikely (!c->extend_min (this))) return_trace (false);
2707  itemCount = inner_map.get_next_value ();
2708 
2709  /* Optimize short count */
2710  unsigned short ri_count = src->regionIndices.len;
2711  enum delta_size_t { kZero=0, kByte, kShort };
2712  hb_vector_t<delta_size_t> delta_sz;
2713  hb_vector_t<unsigned int> ri_map; /* maps old index to new index */
2714  delta_sz.resize (ri_count);
2715  ri_map.resize (ri_count);
2716  unsigned int new_short_count = 0;
2717  unsigned int r;
2718  for (r = 0; r < ri_count; r++)
2719  {
2720  delta_sz[r] = kZero;
2721  for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2722  {
2723  unsigned int old = inner_map.backward (i);
2724  int16_t delta = src->get_item_delta (old, r);
2725  if (delta < -128 || 127 < delta)
2726  {
2727  delta_sz[r] = kShort;
2728  new_short_count++;
2729  break;
2730  }
2731  else if (delta != 0)
2732  delta_sz[r] = kByte;
2733  }
2734  }
2735  unsigned int short_index = 0;
2736  unsigned int byte_index = new_short_count;
2737  unsigned int new_ri_count = 0;
2738  for (r = 0; r < ri_count; r++)
2739  if (delta_sz[r])
2740  {
2741  ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++;
2742  new_ri_count++;
2743  }
2744 
2745  shortCount = new_short_count;
2746  regionIndices.len = new_ri_count;
2747 
2748  if (unlikely (!c->extend (this))) return_trace (false);
2749 
2750  for (r = 0; r < ri_count; r++)
2751  if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
2752 
2753  for (unsigned int i = 0; i < itemCount; i++)
2754  {
2755  unsigned int old = inner_map.backward (i);
2756  for (unsigned int r = 0; r < ri_count; r++)
2757  if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
2758  }
2759 
2760  return_trace (true);
2761  }
2762 
2763  void collect_region_refs (hb_set_t &region_indices, const hb_inc_bimap_t &inner_map) const
2764  {
2765  for (unsigned int r = 0; r < regionIndices.len; r++)
2766  {
2767  unsigned int region = regionIndices[r];
2768  if (region_indices.has (region)) continue;
2769  for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2770  if (get_item_delta (inner_map.backward (i), r) != 0)
2771  {
2772  region_indices.add (region);
2773  break;
2774  }
2775  }
2776  }
2777 
2778  protected:
2779  const HBUINT8 *get_delta_bytes () const
2780  { return &StructAfter<HBUINT8> (regionIndices); }
2781 
2783  { return &StructAfter<HBUINT8> (regionIndices); }
2784 
2785  int16_t get_item_delta (unsigned int item, unsigned int region) const
2786  {
2787  if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
2788  const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size ();
2789  if (region < shortCount)
2790  return ((const HBINT16 *)p)[region];
2791  else
2792  return (p + HBINT16::static_size * shortCount)[region - shortCount];
2793  }
2794 
2795  void set_item_delta (unsigned int item, unsigned int region, int16_t delta)
2796  {
2797  HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
2798  if (region < shortCount)
2799  ((HBINT16 *)p)[region] = delta;
2800  else
2801  (p + HBINT16::static_size * shortCount)[region - shortCount] = delta;
2802  }
2803 
2804  protected:
2808 /*UnsizedArrayOf<HBUINT8>bytesX;*/
2809  public:
2811 };
2812 
2814 {
2815  private:
2816  float get_delta (unsigned int outer, unsigned int inner,
2817  const int *coords, unsigned int coord_count) const
2818  {
2819 #ifdef HB_NO_VAR
2820  return 0.f;
2821 #endif
2822 
2823  if (unlikely (outer >= dataSets.len))
2824  return 0.f;
2825 
2826  return (this+dataSets[outer]).get_delta (inner,
2827  coords, coord_count,
2828  this+regions);
2829  }
2830 
2831  public:
2832  float get_delta (unsigned int index,
2833  const int *coords, unsigned int coord_count) const
2834  {
2835  unsigned int outer = index >> 16;
2836  unsigned int inner = index & 0xFFFF;
2837  return get_delta (outer, inner, coords, coord_count);
2838  }
2839 
2841  {
2842 #ifdef HB_NO_VAR
2843  return true;
2844 #endif
2845 
2846  TRACE_SANITIZE (this);
2847  return_trace (c->check_struct (this) &&
2848  format == 1 &&
2849  regions.sanitize (c, this) &&
2850  dataSets.sanitize (c, this));
2851  }
2852 
2854  const VariationStore *src,
2855  const hb_array_t <hb_inc_bimap_t> &inner_maps)
2856  {
2857  TRACE_SERIALIZE (this);
2858  if (unlikely (!c->extend_min (this))) return_trace (false);
2859 
2860  unsigned int set_count = 0;
2861  for (unsigned int i = 0; i < inner_maps.length; i++)
2862  if (inner_maps[i].get_population ())
2863  set_count++;
2864 
2865  format = 1;
2866 
2867  const auto &src_regions = src+src->regions;
2868 
2869  hb_set_t region_indices;
2870  for (unsigned int i = 0; i < inner_maps.length; i++)
2871  (src+src->dataSets[i]).collect_region_refs (region_indices, inner_maps[i]);
2872 
2873  if (region_indices.in_error ())
2874  return_trace (false);
2875 
2876  region_indices.del_range ((src_regions).regionCount, hb_set_t::INVALID);
2877 
2878  /* TODO use constructor when our data-structures support that. */
2879  hb_inc_bimap_t region_map;
2880  + hb_iter (region_indices)
2881  | hb_apply ([&region_map] (unsigned _) { region_map.add(_); })
2882  ;
2883  if (region_map.in_error())
2884  return_trace (false);
2885 
2886  if (unlikely (!regions.serialize_serialize (c, &src_regions, region_map)))
2887  return_trace (false);
2888 
2889  dataSets.len = set_count;
2890  if (unlikely (!c->extend (dataSets))) return_trace (false);
2891 
2892  /* TODO: The following code could be simplified when
2893  * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize () */
2894  unsigned int set_index = 0;
2895  for (unsigned int i = 0; i < inner_maps.length; i++)
2896  {
2897  if (!inner_maps[i].get_population ()) continue;
2898  if (unlikely (!dataSets[set_index++]
2899  .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
2900  return_trace (false);
2901  }
2902 
2903  return_trace (true);
2904  }
2905 
2907  {
2908  TRACE_SUBSET (this);
2909 
2910  VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> ();
2911  if (unlikely (!varstore_prime)) return_trace (false);
2912 
2913  const hb_set_t *variation_indices = c->plan->layout_variation_indices;
2914  if (variation_indices->is_empty ()) return_trace (false);
2915 
2916  hb_vector_t<hb_inc_bimap_t> inner_maps;
2917  inner_maps.resize ((unsigned) dataSets.len);
2918 
2919  for (unsigned idx : c->plan->layout_variation_indices->iter ())
2920  {
2921  uint16_t major = idx >> 16;
2922  uint16_t minor = idx & 0xFFFF;
2923 
2924  if (major >= inner_maps.length)
2925  return_trace (false);
2926  inner_maps[major].add (minor);
2927  }
2928  varstore_prime->serialize (c->serializer, this, inner_maps.as_array ());
2929 
2930  return_trace (
2931  !c->serializer->in_error()
2932  && varstore_prime->dataSets);
2933  }
2934 
2935  unsigned int get_region_index_count (unsigned int major) const
2936  { return (this+dataSets[major]).get_region_index_count (); }
2937 
2938  void get_region_scalars (unsigned int major,
2939  const int *coords, unsigned int coord_count,
2940  float *scalars /*OUT*/,
2941  unsigned int num_scalars) const
2942  {
2943 #ifdef HB_NO_VAR
2944  for (unsigned i = 0; i < num_scalars; i++)
2945  scalars[i] = 0.f;
2946  return;
2947 #endif
2948 
2949  (this+dataSets[major]).get_region_scalars (coords, coord_count,
2950  this+regions,
2951  &scalars[0], num_scalars);
2952  }
2953 
2954  unsigned int get_sub_table_count () const { return dataSets.len; }
2955 
2956  protected:
2960  public:
2962 };
2963 
2964 /*
2965  * Feature Variations
2966  */
2967 
2969 {
2970  friend struct Condition;
2971 
2973  {
2974  TRACE_SUBSET (this);
2975  auto *out = c->serializer->embed (this);
2976  if (unlikely (!out)) return_trace (false);
2977  return_trace (true);
2978  }
2979 
2980  private:
2981  bool evaluate (const int *coords, unsigned int coord_len) const
2982  {
2983  int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
2985  }
2986 
2987  bool sanitize (hb_sanitize_context_t *c) const
2988  {
2989  TRACE_SANITIZE (this);
2990  return_trace (c->check_struct (this));
2991  }
2992 
2993  protected:
2994  HBUINT16 format; /* Format identifier--format = 1 */
2998  public:
3000 };
3001 
3003 {
3004  bool evaluate (const int *coords, unsigned int coord_len) const
3005  {
3006  switch (u.format) {
3007  case 1: return u.format1.evaluate (coords, coord_len);
3008  default:return false;
3009  }
3010  }
3011 
3012  template <typename context_t, typename ...Ts>
3013  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3014  {
3015  TRACE_DISPATCH (this, u.format);
3016  if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3017  switch (u.format) {
3018  case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
3019  default:return_trace (c->default_return_value ());
3020  }
3021  }
3022 
3024  {
3025  TRACE_SANITIZE (this);
3026  if (!u.format.sanitize (c)) return_trace (false);
3027  switch (u.format) {
3028  case 1: return_trace (u.format1.sanitize (c));
3029  default:return_trace (true);
3030  }
3031  }
3032 
3033  protected:
3034  union {
3035  HBUINT16 format; /* Format identifier */
3037  } u;
3038  public:
3040 };
3041 
3043 {
3044  bool evaluate (const int *coords, unsigned int coord_len) const
3045  {
3046  unsigned int count = conditions.len;
3047  for (unsigned int i = 0; i < count; i++)
3048  if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len))
3049  return false;
3050  return true;
3051  }
3052 
3054  {
3055  TRACE_SUBSET (this);
3056  auto *out = c->serializer->start_embed (this);
3057  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3058 
3059  + conditions.iter ()
3060  | hb_apply (subset_offset_array (c, out->conditions, this))
3061  ;
3062 
3063  return_trace (bool (out->conditions));
3064  }
3065 
3067  {
3068  TRACE_SANITIZE (this);
3069  return_trace (conditions.sanitize (c, this));
3070  }
3071 
3072  protected:
3074  public:
3076 };
3077 
3079 {
3081 
3082  void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const
3083  {
3084  return (base+feature).add_lookup_indexes_to (lookup_indexes);
3085  }
3086 
3087  void closure_features (const void *base,
3088  const hb_map_t *lookup_indexes,
3089  hb_set_t *feature_indexes /* OUT */) const
3090  {
3091  if ((base+feature).intersects_lookup_indexes (lookup_indexes))
3092  feature_indexes->add (featureIndex);
3093  }
3094 
3095  bool subset (hb_subset_layout_context_t *c, const void *base) const
3096  {
3097  TRACE_SUBSET (this);
3098  if (!c->feature_index_map->has (featureIndex)) {
3099  // Feature that is being substituted is not being retained, so we don't
3100  // need this.
3101  return_trace (false);
3102  }
3103 
3104  auto *out = c->subset_context->serializer->embed (this);
3105  if (unlikely (!out)) return_trace (false);
3106 
3107  out->featureIndex = c->feature_index_map->get (featureIndex);
3108  bool ret = out->feature.serialize_subset (c->subset_context, feature, base, c);
3109  return_trace (ret);
3110  }
3111 
3112  bool sanitize (hb_sanitize_context_t *c, const void *base) const
3113  {
3114  TRACE_SANITIZE (this);
3115  return_trace (c->check_struct (this) && feature.sanitize (c, base));
3116  }
3117 
3118  protected:
3121  public:
3123 };
3124 
3126 {
3127  const Feature *find_substitute (unsigned int feature_index) const
3128  {
3129  unsigned int count = substitutions.len;
3130  for (unsigned int i = 0; i < count; i++)
3131  {
3133  if (record.featureIndex == feature_index)
3134  return &(this+record.feature);
3135  }
3136  return nullptr;
3137  }
3138 
3139  void collect_lookups (const hb_set_t *feature_indexes,
3140  hb_set_t *lookup_indexes /* OUT */) const
3141  {
3142  + hb_iter (substitutions)
3143  | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
3144  | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
3145  { r.collect_lookups (this, lookup_indexes); })
3146  ;
3147  }
3148 
3149  void closure_features (const hb_map_t *lookup_indexes,
3150  hb_set_t *feature_indexes /* OUT */) const
3151  {
3153  record.closure_features (this, lookup_indexes, feature_indexes);
3154  }
3155 
3156  bool intersects_features (const hb_map_t *feature_index_map) const
3157  {
3159  {
3160  if (feature_index_map->has (record.featureIndex)) return true;
3161  }
3162  return false;
3163  }
3164 
3167  {
3168  TRACE_SUBSET (this);
3169  auto *out = c->serializer->start_embed (*this);
3170  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3171 
3172  out->version.major = version.major;
3173  out->version.minor = version.minor;
3174 
3175  + substitutions.iter ()
3176  | hb_apply (subset_record_array (l, &(out->substitutions), this))
3177  ;
3178 
3179  return_trace (bool (out->substitutions));
3180  }
3181 
3183  {
3184  TRACE_SANITIZE (this);
3186  likely (version.major == 1) &&
3187  substitutions.sanitize (c, this));
3188  }
3189 
3190  protected:
3191  FixedVersion<> version; /* Version--0x00010000u */
3194  public:
3196 };
3197 
3199 {
3200  friend struct FeatureVariations;
3201 
3202  void collect_lookups (const void *base,
3203  const hb_set_t *feature_indexes,
3204  hb_set_t *lookup_indexes /* OUT */) const
3205  {
3206  return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
3207  }
3208 
3209  void closure_features (const void *base,
3210  const hb_map_t *lookup_indexes,
3211  hb_set_t *feature_indexes /* OUT */) const
3212  {
3213  (base+substitutions).closure_features (lookup_indexes, feature_indexes);
3214  }
3215 
3216  bool intersects_features (const void *base, const hb_map_t *feature_index_map) const
3217  {
3218  return (base+substitutions).intersects_features (feature_index_map);
3219  }
3220 
3221  bool subset (hb_subset_layout_context_t *c, const void *base) const
3222  {
3223  TRACE_SUBSET (this);
3224  auto *out = c->subset_context->serializer->embed (this);
3225  if (unlikely (!out)) return_trace (false);
3226 
3227  out->conditions.serialize_subset (c->subset_context, conditions, base);
3228  out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
3229 
3230  return_trace (true);
3231  }
3232 
3233  bool sanitize (hb_sanitize_context_t *c, const void *base) const
3234  {
3235  TRACE_SANITIZE (this);
3236  return_trace (conditions.sanitize (c, base) &&
3237  substitutions.sanitize (c, base));
3238  }
3239 
3240  protected:
3245  public:
3247 };
3248 
3250 {
3251  static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
3252 
3253  bool find_index (const int *coords, unsigned int coord_len,
3254  unsigned int *index) const
3255  {
3256  unsigned int count = varRecords.len;
3257  for (unsigned int i = 0; i < count; i++)
3258  {
3259  const FeatureVariationRecord &record = varRecords.arrayZ[i];
3260  if ((this+record.conditions).evaluate (coords, coord_len))
3261  {
3262  *index = i;
3263  return true;
3264  }
3265  }
3267  return false;
3268  }
3269 
3270  const Feature *find_substitute (unsigned int variations_index,
3271  unsigned int feature_index) const
3272  {
3273  const FeatureVariationRecord &record = varRecords[variations_index];
3274  return (this+record.substitutions).find_substitute (feature_index);
3275  }
3276 
3278  {
3279  TRACE_SERIALIZE (this);
3280  return_trace (c->embed (*this));
3281  }
3282 
3283  void collect_lookups (const hb_set_t *feature_indexes,
3284  hb_set_t *lookup_indexes /* OUT */) const
3285  {
3286  for (const FeatureVariationRecord& r : varRecords)
3287  r.collect_lookups (this, feature_indexes, lookup_indexes);
3288  }
3289 
3290  void closure_features (const hb_map_t *lookup_indexes,
3291  hb_set_t *feature_indexes /* OUT */) const
3292  {
3294  record.closure_features (this, lookup_indexes, feature_indexes);
3295  }
3296 
3299  {
3300  TRACE_SUBSET (this);
3301  auto *out = c->serializer->start_embed (*this);
3302  if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3303 
3304  out->version.major = version.major;
3305  out->version.minor = version.minor;
3306 
3307  int keep_up_to = -1;
3308  for (int i = varRecords.len - 1; i >= 0; i--) {
3309  if (varRecords[i].intersects_features (this, l->feature_index_map)) {
3310  keep_up_to = i;
3311  break;
3312  }
3313  }
3314 
3315  unsigned count = (unsigned) (keep_up_to + 1);
3316  for (unsigned i = 0; i < count; i++) {
3317  subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
3318  }
3319  return_trace (bool (out->varRecords));
3320  }
3321 
3323  {
3324  TRACE_SANITIZE (this);
3326  likely (version.major == 1) &&
3327  varRecords.sanitize (c, this));
3328  }
3329 
3330  protected:
3331  FixedVersion<> version; /* Version--0x00010000u */
3334  public:
3336 };
3337 
3338 
3339 /*
3340  * Device Tables
3341  */
3342 
3344 {
3345  friend struct Device;
3346 
3347  private:
3348 
3349  hb_position_t get_x_delta (hb_font_t *font) const
3350  { return get_delta (font->x_ppem, font->x_scale); }
3351 
3352  hb_position_t get_y_delta (hb_font_t *font) const
3353  { return get_delta (font->y_ppem, font->y_scale); }
3354 
3355  public:
3356 
3357  unsigned int get_size () const
3358  {
3359  unsigned int f = deltaFormat;
3360  if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
3361  return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
3362  }
3363 
3365  {
3366  TRACE_SANITIZE (this);
3367  return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
3368  }
3369 
3371  {
3372  TRACE_SERIALIZE (this);
3373  return_trace (c->embed<HintingDevice> (this));
3374  }
3375 
3376  private:
3377 
3378  int get_delta (unsigned int ppem, int scale) const
3379  {
3380  if (!ppem) return 0;
3381 
3382  int pixels = get_delta_pixels (ppem);
3383 
3384  if (!pixels) return 0;
3385 
3386  return (int) (pixels * (int64_t) scale / ppem);
3387  }
3388  int get_delta_pixels (unsigned int ppem_size) const
3389  {
3390  unsigned int f = deltaFormat;
3391  if (unlikely (f < 1 || f > 3))
3392  return 0;
3393 
3394  if (ppem_size < startSize || ppem_size > endSize)
3395  return 0;
3396 
3397  unsigned int s = ppem_size - startSize;
3398 
3399  unsigned int byte = deltaValueZ[s >> (4 - f)];
3400  unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
3401  unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
3402 
3403  int delta = bits & mask;
3404 
3405  if ((unsigned int) delta >= ((mask + 1) >> 1))
3406  delta -= mask + 1;
3407 
3408  return delta;
3409  }
3410 
3411  protected:
3412  HBUINT16 startSize; /* Smallest size to correct--in ppem */
3413  HBUINT16 endSize; /* Largest size to correct--in ppem */
3414  HBUINT16 deltaFormat; /* Format of DeltaValue array data: 1, 2, or 3
3415  * 1 Signed 2-bit value, 8 values per uint16
3416  * 2 Signed 4-bit value, 4 values per uint16
3417  * 3 Signed 8-bit value, 2 values per uint16
3418  */
3420  deltaValueZ; /* Array of compressed data */
3421  public:
3423 };
3424 
3426 {
3427  friend struct Device;
3428 
3429  private:
3430 
3431  hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store) const
3432  { return font->em_scalef_x (get_delta (font, store)); }
3433 
3434  hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const
3435  { return font->em_scalef_y (get_delta (font, store)); }
3436 
3437  VariationDevice* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const
3438  {
3439  TRACE_SERIALIZE (this);
3440  auto snap = c->snapshot ();
3441  auto *out = c->embed (this);
3442  if (unlikely (!out)) return_trace (nullptr);
3443  if (!layout_variation_idx_map || layout_variation_idx_map->is_empty ()) return_trace (out);
3444 
3445  /* TODO Just get() and bail if NO_VARIATION. Needs to setup the map to return that. */
3446  if (!layout_variation_idx_map->has (varIdx))
3447  {
3448  c->revert (snap);
3449  return_trace (nullptr);
3450  }
3451  unsigned new_idx = layout_variation_idx_map->get (varIdx);
3452  out->varIdx = new_idx;
3453  return_trace (out);
3454  }
3455 
3456  void record_variation_index (hb_set_t *layout_variation_indices) const
3457  {
3458  layout_variation_indices->add (varIdx);
3459  }
3460 
3461  bool sanitize (hb_sanitize_context_t *c) const
3462  {
3463  TRACE_SANITIZE (this);
3464  return_trace (c->check_struct (this));
3465  }
3466 
3467  private:
3468 
3469  float get_delta (hb_font_t *font, const VariationStore &store) const
3470  {
3471  return store.get_delta (varIdx, font->coords, font->num_coords);
3472  }
3473 
3474  protected:
3476  HBUINT16 deltaFormat; /* Format identifier for this table: 0x0x8000 */
3477  public:
3479 };
3480 
3482 {
3483  protected:
3486  public:
3487  HBUINT16 format; /* Format identifier */
3488  public:
3490 };
3491 
3492 struct Device
3493 {
3495  {
3496  switch (u.b.format)
3497  {
3498 #ifndef HB_NO_HINTING
3499  case 1: case 2: case 3:
3500  return u.hinting.get_x_delta (font);
3501 #endif
3502 #ifndef HB_NO_VAR
3503  case 0x8000:
3504  return u.variation.get_x_delta (font, store);
3505 #endif
3506  default:
3507  return 0;
3508  }
3509  }
3511  {
3512  switch (u.b.format)
3513  {
3514  case 1: case 2: case 3:
3515 #ifndef HB_NO_HINTING
3516  return u.hinting.get_y_delta (font);
3517 #endif
3518 #ifndef HB_NO_VAR
3519  case 0x8000:
3520  return u.variation.get_y_delta (font, store);
3521 #endif
3522  default:
3523  return 0;
3524  }
3525  }
3526 
3528  {
3529  TRACE_SANITIZE (this);
3530  if (!u.b.format.sanitize (c)) return_trace (false);
3531  switch (u.b.format) {
3532 #ifndef HB_NO_HINTING
3533  case 1: case 2: case 3:
3534  return_trace (u.hinting.sanitize (c));
3535 #endif
3536 #ifndef HB_NO_VAR
3537  case 0x8000:
3538  return_trace (u.variation.sanitize (c));
3539 #endif
3540  default:
3541  return_trace (true);
3542  }
3543  }
3544 
3545  Device* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map=nullptr) const
3546  {
3547  TRACE_SERIALIZE (this);
3548  switch (u.b.format) {
3549 #ifndef HB_NO_HINTING
3550  case 1:
3551  case 2:
3552  case 3:
3553  return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
3554 #endif
3555 #ifndef HB_NO_VAR
3556  case 0x8000:
3557  return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_map)));
3558 #endif
3559  default:
3560  return_trace (nullptr);
3561  }
3562  }
3563 
3564  void collect_variation_indices (hb_set_t *layout_variation_indices) const
3565  {
3566  switch (u.b.format) {
3567 #ifndef HB_NO_HINTING
3568  case 1:
3569  case 2:
3570  case 3:
3571  return;
3572 #endif
3573 #ifndef HB_NO_VAR
3574  case 0x8000:
3575  u.variation.record_variation_index (layout_variation_indices);
3576  return;
3577 #endif
3578  default:
3579  return;
3580  }
3581  }
3582 
3583  protected:
3584  union {
3587 #ifndef HB_NO_VAR
3589 #endif
3590  } u;
3591  public:
3593 };
3594 
3595 
3596 } /* namespace OT */
3597 
3598 
3599 #endif /* HB_OT_LAYOUT_COMMON_HH */
small capitals from c petite p scientific f u
Definition: afcover.h:88
small capitals from c petite p scientific i
[1]
Definition: afcover.h:80
FT_UInt idx
Definition: cffcmap.c:135
Definition: base.h:37
float factor
#define this
Definition: dialogs.cpp:56
@ HB_NOT_FOUND_STORE
Definition: hb-array.hh:42
hb_array_t< T > hb_array(T *array, unsigned int length)
Definition: hb-array.hh:295
#define TRACE_SERIALIZE(this)
Definition: hb-debug.hh:426
#define TRACE_SANITIZE(this)
Definition: hb-debug.hh:414
#define TRACE_DISPATCH(this, format)
Definition: hb-debug.hh:459
#define return_trace(RET)
Definition: hb-debug.hh:349
#define TRACE_SUBSET(this)
Definition: hb-debug.hh:438
#define hb_is_iterator(Iter)
Definition: hb-iter.hh:265
auto it hb_map(hb_second)) template< typename Type > inline hb_array_t< Type > operator()(hb_array_t< Type > array
#define hb_is_sorted_source_of(Iter, Item)
Definition: hb-iter.hh:320
static auto hb_requires(hb_is_iterable(Iterable))> static inline auto end(Iterable &&iterable) HB_AUTO_RETURN(hb_iter(iterable).end()) namespace OT
Definition: hb-iter.hh:331
HB_EXTERN hb_tag_t const hb_tag_t unsigned int * script_index
HB_EXTERN unsigned int start_offset
#define HB_MAX_LANGSYS
#define HB_MAX_SCRIPTS
subset_offset_array_t< OutputArray > operator()(hb_subset_context_t *subset_context, OutputArray &out, const void *base) const
#define HB_MAX_FEATURE_INDICES
#define HB_MAX_LOOKUP_VISIT_COUNT
HB_MARK_AS_FLAG_T(OT::LookupFlag::Flags)
#define HB_MAX_LANGSYS_FEATURE_COUNT
#define NOT_COVERED
#define _(S, M)
hb_set_t * hb_set_create()
Definition: hb-set.cc:52
void hb_set_destroy(hb_set_t *set)
Definition: hb-set.cc:106
hb_bool_t hb_set_next(const hb_set_t *set, hb_codepoint_t *codepoint)
Definition: hb-set.cc:543
#define likely(expr)
Definition: hb.hh:250
#define unlikely(expr)
Definition: hb.hh:251
DECLARE_NULL_NAMESPACE_BYTES(OT, Index)
RecordListOfScript ScriptList
struct OT::subset_record_array_t HB_FUNCOBJ
IntType< uint16_t > HBUINT16
List16OfOffset16To< Lookup > LookupList
RecordListOf< Feature > FeatureList
DBusConnection const char DBusError DBusBusType DBusError return DBusConnection DBusHandleMessageFunction void DBusFreeFunction return DBusConnection return DBusConnection return const char DBusError return DBusConnection DBusMessage dbus_uint32_t return DBusConnection dbus_bool_t DBusConnection DBusAddWatchFunction DBusRemoveWatchFunction DBusWatchToggledFunction void DBusFreeFunction return DBusConnection DBusDispatchStatusFunction void DBusFreeFunction DBusTimeout return DBusTimeout return DBusWatch return DBusWatch unsigned int return DBusError const DBusError return const DBusMessage return DBusMessage return DBusMessage return DBusMessage return DBusMessage return DBusMessage return DBusMessageIter * iter
EGLOutputLayerEXT EGLint EGLAttrib value
GLenum type
Definition: qopengl.h:270
GLboolean GLboolean GLboolean b
GLsizei const GLfloat * v
[13]
GLboolean r
[2]
GLboolean GLboolean GLboolean GLboolean a
[7]
GLuint index
[2]
GLuint GLuint end
GLsizei const GLubyte GLsizei GLenum const void * coords
GLenum GLenum GLsizei count
GLfloat GLfloat f
GLenum src
GLsizei range
GLuint start
GLenum GLuint GLintptr offset
GLboolean GLboolean g
GLint GLenum GLsizei GLsizei GLsizei GLint GLenum GLenum const void * pixels
GLint first
GLint GLint GLint GLint GLint GLint GLint GLbitfield mask
GLint GLsizei GLsizei GLenum format
GLhandleARB obj
[2]
Definition: qopenglext.h:4164
const GLubyte * c
Definition: qopenglext.h:12701
GLuint coord
Definition: qopenglext.h:6219
GLenum GLint GLenum GLsizei GLsizei GLsizei GLint GLsizei const void * bits
Definition: qopenglext.h:6904
GLenum GLsizei len
Definition: qopenglext.h:3292
GLenum GLenum GLsizei void * row
Definition: qopenglext.h:2747
GLdouble s
[6]
Definition: qopenglext.h:235
GLfloat GLfloat p
[1]
Definition: qopenglext.h:12698
GLenum GLenum GLenum GLenum GLenum scale
Definition: qopenglext.h:10817
GLenum GLenum GLsizei void * table
Definition: qopenglext.h:2745
SSL_CTX int(*) void arg)
uint32_t hb_codepoint_t
Definition: hb-common.h:106
#define HB_TAG(c1, c2, c3, c4)
Definition: hb-common.h:169
int32_t hb_position_t
Definition: hb-common.h:115
uint32_t hb_tag_t
Definition: hb-common.h:157
#define HB_TAG_NONE
Definition: hb-common.h:187
#define HB_OT_TAG_GSUB
Definition: hb-ot-layout.h:58
#define HB_SET_VALUE_INVALID
Definition: hb-set.h:46
QTextStream out(stdout)
[7]
MyRecord record(int row) const
[0]
QGraphicsItem * item
QStringList::Iterator it
Definition: hb-null.hh:93
bool sanitize(hb_sanitize_context_t *c, Ts &&... ds) const
hb_array_t< Type > as_array()
hb_array_t< const Type > sub_array(unsigned int start_offset, unsigned int count) const
DEFINE_SIZE_ARRAY(6, classValue)
Array16Of< HBUINT16 > classValue
DEFINE_SIZE_ARRAY(4, rangeRecord)
SortedArray16Of< RangeRecord > rangeRecord
void intersected_class_glyphs(const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
DEFINE_SIZE_UNION(2, format)
bool collect_class(set_t *glyphs, unsigned int klass) const
hb_codepoint_t operator()(hb_codepoint_t k) const
bool subset(hb_subset_context_t *c, hb_map_t *klass_map=nullptr, bool keep_empty_table=true, bool use_class_zero=true, const Coverage *glyph_filter=nullptr) const
bool has(hb_codepoint_t k) const
bool intersects(const hb_set_t *glyphs) const
unsigned int value_t
void intersected_classes(const hb_set_t *glyphs, hb_set_t *intersect_classes) const
ClassDefFormat2 format2
bool sanitize(hb_sanitize_context_t *c) const
ClassDefFormat1 format1
static constexpr unsigned SENTINEL
bool collect_coverage(set_t *glyphs) const
unsigned int get_class(hb_codepoint_t glyph_id) const
union OT::ClassDef::@166 u
bool intersects_class(const hb_set_t *glyphs, unsigned int klass) const
value_t operator[](hb_codepoint_t k) const
unsigned int get(hb_codepoint_t k) const
bool serialize(hb_serialize_context_t *c, Iterator it_with_class_zero)
bool subset(hb_subset_context_t *c) const
bool sanitize(hb_sanitize_context_t *c) const
DEFINE_SIZE_UNION(2, format)
ConditionFormat1 format1
union OT::Condition::@167 u
context_t::return_t dispatch(context_t *c, Ts &&... ds) const
bool evaluate(const int *coords, unsigned int coord_len) const
bool sanitize(hb_sanitize_context_t *c) const
bool subset(hb_subset_context_t *c) const
Array16OfOffset32To< Condition > conditions
DEFINE_SIZE_ARRAY(2, conditions)
bool evaluate(const int *coords, unsigned int coord_len) const
CoverageFormat1::iter_t format1
iter_t(const Coverage &c_=Null(Coverage))
bool operator!=(const iter_t &o) const
CoverageFormat2::iter_t format2
__item_t__ __item__() const
hb_codepoint_t get_glyph() const
static constexpr bool is_sorted_iterator
hb_codepoint_t get_glyph() const
void init(const struct CoverageFormat1 &c_)
bool operator!=(const iter_t &o) const
SortedArray16Of< HBGlyphID16 > glyphArray
DEFINE_SIZE_ARRAY(4, glyphArray)
void init(const CoverageFormat2 &c_)
hb_codepoint_t get_glyph() const
bool operator!=(const iter_t &o) const
SortedArray16Of< RangeRecord > rangeRecord
DEFINE_SIZE_ARRAY(4, rangeRecord)
bool subset(hb_subset_context_t *c) const
CoverageFormat1 format1
value_t operator[](hb_codepoint_t k) const
bool operator()(hb_codepoint_t k) const
bool sanitize(hb_sanitize_context_t *c) const
union OT::Coverage::@164 u
bool intersects(const hb_set_t *glyphs) const
CoverageFormat2 format2
static constexpr unsigned SENTINEL
bool has(hb_codepoint_t k) const
bool collect_coverage(set_t *glyphs) const
DEFINE_SIZE_UNION(2, format)
unsigned int get_coverage(hb_codepoint_t glyph_id) const
unsigned int value_t
unsigned int get(hb_codepoint_t k) const
iter_t iter() const
void intersected_coverage_glyphs(const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
bool intersects_coverage(const hb_set_t *glyphs, unsigned int index) const
bool serialize(hb_serialize_context_t *c, Iterator glyphs)
hb_position_t get_x_delta(hb_font_t *font, const VariationStore &store=Null(VariationStore)) const
DEFINE_SIZE_UNION(6, b)
Device * copy(hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map=nullptr) const
hb_position_t get_y_delta(hb_font_t *font, const VariationStore &store=Null(VariationStore)) const
bool sanitize(hb_sanitize_context_t *c) const
VariationDevice variation
union OT::Device::@168 u
void collect_variation_indices(hb_set_t *layout_variation_indices) const
HintingDevice hinting
DEFINE_SIZE_ARRAY_SIZED(4, lookupIndex)
unsigned int get_lookup_indexes(unsigned int start_index, unsigned int *lookup_count, unsigned int *lookup_tags) const
Offset16To< FeatureParams > featureParams
unsigned int get_lookup_count() const
bool subset(hb_subset_context_t *c, hb_subset_layout_context_t *l, const Tag *tag=nullptr) const
IndexArray lookupIndex
const FeatureParams & get_feature_params() const
hb_tag_t get_lookup_index(unsigned int i) const
bool sanitize(hb_sanitize_context_t *c, const Record_sanitize_closure_t *closure=nullptr) const
bool intersects_lookup_indexes(const hb_map_t *lookup_indexes) const
void add_lookup_indexes_to(hb_set_t *lookup_indexes) const
bool sanitize(hb_sanitize_context_t *c) const
unsigned get_characters(unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
bool subset(hb_subset_context_t *c) const
const FeatureParamsSize & get_size_params(hb_tag_t tag) const
const FeatureParamsStylisticSet & get_stylistic_set_params(hb_tag_t tag) const
FeatureParamsCharacterVariants characterVariants
FeatureParamsStylisticSet stylisticSet
const FeatureParamsCharacterVariants & get_character_variants_params(hb_tag_t tag) const
bool subset(hb_subset_context_t *c, const Tag *tag) const
FeatureParamsSize size
bool sanitize(hb_sanitize_context_t *c, hb_tag_t tag) const
bool sanitize(hb_sanitize_context_t *c) const
bool subset(hb_subset_context_t *c) const
bool sanitize(hb_sanitize_context_t *c) const
bool subset(hb_subset_context_t *c) const
void closure_features(const hb_map_t *lookup_indexes, hb_set_t *feature_indexes) const
bool subset(hb_subset_context_t *c, hb_subset_layout_context_t *l) const
DEFINE_SIZE_ARRAY(6, substitutions)
Array16Of< FeatureTableSubstitutionRecord > substitutions
void collect_lookups(const hb_set_t *feature_indexes, hb_set_t *lookup_indexes) const
bool intersects_features(const hb_map_t *feature_index_map) const
bool sanitize(hb_sanitize_context_t *c) const
const Feature * find_substitute(unsigned int feature_index) const
bool sanitize(hb_sanitize_context_t *c, const void *base) const
void closure_features(const void *base, const hb_map_t *lookup_indexes, hb_set_t *feature_indexes) const
bool subset(hb_subset_layout_context_t *c, const void *base) const
void collect_lookups(const void *base, hb_set_t *lookup_indexes) const
Offset32To< FeatureTableSubstitution > substitutions
bool sanitize(hb_sanitize_context_t *c, const void *base) const
void collect_lookups(const void *base, const hb_set_t *feature_indexes, hb_set_t *lookup_indexes) const
void closure_features(const void *base, const hb_map_t *lookup_indexes, hb_set_t *feature_indexes) const
bool subset(hb_subset_layout_context_t *c, const void *base) const
bool intersects_features(const void *base, const hb_map_t *feature_index_map) const
Offset32To< ConditionSet > conditions
FeatureVariations * copy(hb_serialize_context_t *c) const
bool subset(hb_subset_context_t *c, hb_subset_layout_context_t *l) const
bool find_index(const int *coords, unsigned int coord_len, unsigned int *index) const
void closure_features(const hb_map_t *lookup_indexes, hb_set_t *feature_indexes) const
static constexpr unsigned NOT_FOUND_INDEX
void collect_lookups(const hb_set_t *feature_indexes, hb_set_t *lookup_indexes) const
const Feature * find_substitute(unsigned int variations_index, unsigned int feature_index) const
bool sanitize(hb_sanitize_context_t *c) const
DEFINE_SIZE_ARRAY_SIZED(8, varRecords)
Array32Of< FeatureVariationRecord > varRecords
bool sanitize(hb_sanitize_context_t *c) const
UnsizedArrayOf< HBUINT16 > deltaValueZ
bool sanitize(hb_sanitize_context_t *c) const
unsigned int get_size() const
DEFINE_SIZE_ARRAY(6, deltaValueZ)
HintingDevice * copy(hb_serialize_context_t *c) const
void serialize(hb_serialize_context_t *c, hb_subset_layout_context_t *l, Iterator it)
bool intersects(const hb_map_t *indexes) const
void add_indexes_to(hb_set_t *output) const
unsigned int get_indexes(unsigned int start_offset, unsigned int *_count, unsigned int *_indexes) const
static constexpr unsigned NOT_FOUND_INDEX
static HB_INTERNAL int cmp(const IntType *a, const IntType *b)
Definition: hb-open-type.hh:79
bool sanitize(hb_sanitize_context_t *c) const
bool compare(const LangSys &o, const hb_map_t *feature_index_map) const
DEFINE_SIZE_ARRAY_SIZED(6, featureIndex)
IndexArray featureIndex
bool subset(hb_subset_context_t *c, hb_subset_layout_context_t *l, const Tag *tag=nullptr) const
unsigned int get_feature_indexes(unsigned int start_offset, unsigned int *feature_count, unsigned int *feature_indexes) const
void collect_features(hb_prune_langsys_context_t *c) const
void add_feature_indexes_to(hb_set_t *feature_indexes) const
unsigned int get_feature_count() const
LangSys * copy(hb_serialize_context_t *c) const
hb_tag_t get_feature_index(unsigned int i) const
HBUINT16 reqFeatureIndex
bool sanitize(hb_sanitize_context_t *c, const Record_sanitize_closure_t *=nullptr) const
unsigned int get_required_feature_index() const
bool has_required_feature() const
bool serialize(hb_serialize_context_t *c, unsigned int lookup_type, uint32_t lookup_props, unsigned int num_subtables)
TSubTable & get_subtable(unsigned int i)
uint32_t get_props() const
bool sanitize(hb_sanitize_context_t *c) const
unsigned int get_type() const
bool subset(hb_subset_context_t *c) const
unsigned int get_size() const
const TSubTable & get_subtable(unsigned int i) const
const Array16OfOffset16To< TSubTable > & get_subtables() const
Array16OfOffset16To< TSubTable > & get_subtables()
context_t::return_t dispatch(context_t *c, Ts &&... ds) const
unsigned int get_subtable_count() const
DEFINE_SIZE_ARRAY(6, subTable)
bool sanitize(hb_sanitize_context_t *c) const
bool subset(hb_subset_context_t *c, hb_subset_layout_context_t *l) const
bool intersects(const hb_set_t *glyphs) const
int cmp(hb_codepoint_t g) const
bool collect_coverage(set_t *glyphs) const
bool sanitize(hb_sanitize_context_t *c) const
unsigned int get_tags(unsigned int start_offset, unsigned int *record_count, hb_tag_t *record_tags) const
Offset16To< Type > & get_offset(unsigned int i)
bool find_index(hb_tag_t tag, unsigned int *index) const
const Tag & get_tag(unsigned int i) const
const Offset16To< Type > & get_offset(unsigned int i) const
DEFINE_SIZE_STATIC(6)
bool subset(hb_subset_layout_context_t *c, const void *base) const
Offset16To< Type > offset
int cmp(hb_tag_t a) const
bool sanitize(hb_sanitize_context_t *c, const void *base) const
bool subset(hb_subset_context_t *c, hb_subset_layout_context_t *l) const
bool subset(hb_subset_context_t *c, hb_subset_layout_context_t *l) const
bool sanitize(hb_sanitize_context_t *c) const
const Type & operator[](unsigned int i) const
bool subset(hb_subset_context_t *c, hb_subset_layout_context_t *l) const
bool subset(hb_subset_context_t *c, hb_subset_layout_context_t *l, const Tag *tag) const
bool has_default_lang_sys() const
bool find_lang_sys_index(hb_tag_t tag, unsigned int *index) const
const LangSys & get_lang_sys(unsigned int i) const
DEFINE_SIZE_ARRAY_SIZED(4, langSys)
const Tag & get_lang_sys_tag(unsigned int i) const
unsigned int get_lang_sys_tags(unsigned int start_offset, unsigned int *lang_sys_count, hb_tag_t *lang_sys_tags) const
bool sanitize(hb_sanitize_context_t *c, const Record_sanitize_closure_t *=nullptr) const
Offset16To< LangSys > defaultLangSys
const LangSys & get_default_lang_sys() const
void prune_langsys(hb_prune_langsys_context_t *c, unsigned script_index) const
RecordArrayOf< LangSys > langSys
unsigned int get_lang_sys_count() const
bool bfind(const T &x, unsigned int *i=nullptr, hb_not_found_t not_found=HB_NOT_FOUND_DONT_STORE, unsigned int to_store=(unsigned int) -1) const
hb_sorted_array_t< const Type > sub_array(unsigned int start_offset, unsigned int count) const
iter_t iter() const
Array16Of< HBUINT16 > regionIndices
void collect_region_refs(hb_set_t &region_indices, const hb_inc_bimap_t &inner_map) const
int16_t get_item_delta(unsigned int item, unsigned int region) const
bool serialize(hb_serialize_context_t *c, const VarData *src, const hb_inc_bimap_t &inner_map, const hb_bimap_t &region_map)
void get_region_scalars(const int *coords, unsigned int coord_count, const VarRegionList &regions, float *scalars, unsigned int num_scalars) const
bool sanitize(hb_sanitize_context_t *c) const
unsigned int get_size() const
DEFINE_SIZE_ARRAY(6, regionIndices)
unsigned int get_row_size() const
HBUINT8 * get_delta_bytes()
float get_delta(unsigned int inner, const int *coords, unsigned int coord_count, const VarRegionList &regions) const
unsigned int get_region_index_count() const
void set_item_delta(unsigned int item, unsigned int region, int16_t delta)
const HBUINT8 * get_delta_bytes() const
bool sanitize(hb_sanitize_context_t *c) const
float evaluate(int coord) const
DEFINE_SIZE_ARRAY(4, axesZ)
bool serialize(hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t &region_map)
unsigned int get_size() const
float evaluate(unsigned int region_index, const int *coords, unsigned int coord_len) const
UnsizedArrayOf< VarRegionAxis > axesZ
bool sanitize(hb_sanitize_context_t *c) const
bool serialize(hb_serialize_context_t *c, const VariationStore *src, const hb_array_t< hb_inc_bimap_t > &inner_maps)
float get_delta(unsigned int index, const int *coords, unsigned int coord_count) const
void get_region_scalars(unsigned int major, const int *coords, unsigned int coord_count, float *scalars, unsigned int num_scalars) const
bool sanitize(hb_sanitize_context_t *c) const
unsigned int get_sub_table_count() const
DEFINE_SIZE_ARRAY_SIZED(8, dataSets)
Offset32To< VarRegionList > regions
bool subset(hb_subset_context_t *c) const
Array16OfOffset32To< VarData > dataSets
unsigned int get_region_index_count(unsigned int major) const
hb_collect_variation_indices_context_t(hb_set_t *layout_variation_indices_, const hb_set_t *glyph_set_, const hb_map_t *gpos_lookups_)
hb_hashmap_t< unsigned, hb_set_t * > * script_langsys_map
bool visitLangsys(unsigned feature_count)
hb_prune_langsys_context_t(const void *table_, hb_hashmap_t< unsigned, hb_set_t * > *script_langsys_map_, const hb_map_t *duplicate_feature_map_, hb_set_t *new_collected_feature_indexes_)
hb_subset_context_t * subset_context
hb_subset_layout_context_t(hb_subset_context_t *c_, hb_tag_t tag_, hb_map_t *lookup_map_, hb_hashmap_t< unsigned, hb_set_t * > *script_langsys_map_, hb_map_t *feature_index_map_)
const hb_hashmap_t< unsigned, hb_set_t * > * script_langsys_map
serialize_math_record_array_t(hb_serialize_context_t *serialize_context_, OutputArray &out_, const void *base_)
subset_offset_array_arg_t(hb_subset_context_t *subset_context_, OutputArray &out_, const void *base_, Arg &&arg_)
subset_offset_array_t(hb_subset_context_t *subset_context_, OutputArray &out_, const void *base_)
subset_record_array_t(hb_subset_layout_context_t *c_, OutputArray *out_, const void *base_)
Definition: main.cpp:38
Definition: moc.h:48
unsigned int length
Definition: hb-array.hh:291
hb_codepoint_t backward(hb_codepoint_t rhs) const
Definition: hb-bimap.hh:58
unsigned int get_population() const
Definition: hb-bimap.hh:77
bool in_error() const
Definition: hb-bimap.hh:42
V get(K key) const
Definition: hb-map.hh:202
unsigned int get_population() const
Definition: hb-map.hh:238
bool is_empty() const
Definition: hb-map.hh:235
bool set(K key, const V &value)
Definition: hb-map.hh:199
bool has(K k, V *vp=nullptr) const
Definition: hb-map.hh:214
hb_codepoint_t add(hb_codepoint_t lhs)
Definition: hb-bimap.hh:90
hb_codepoint_t get_next_value() const
Definition: hb-bimap.hh:104
void revert(snapshot_t snap)
auto src copy(this, std::forward< Ts >(ds)...)) template< typename Type > auto _copy(const Type &src
void del_range(hb_codepoint_t a, hb_codepoint_t b)
Definition: hb-set.hh:103
iter_t iter() const
Definition: hb-set.hh:155
hb_codepoint_t get_max() const
Definition: hb-set.hh:147
bool has(hb_codepoint_t k) const
Definition: hb-set.hh:111
bool intersects(hb_codepoint_t first, hb_codepoint_t last) const
Definition: hb-set.hh:122
bool is_empty() const
Definition: hb-set.hh:83
static constexpr hb_codepoint_t INVALID
Definition: hb-set.hh:149
void add_array(const T *array, unsigned int count, unsigned int stride=sizeof(T))
Definition: hb-set.hh:89
bool in_error() const
Definition: hb-set.hh:78
void add(hb_codepoint_t g)
Definition: hb-set.hh:85
hb_codepoint_t get_min() const
Definition: hb-set.hh:146
Type * push()
Definition: hb-vector.hh:183
array_t as_array()
Definition: hb-vector.hh:152
unsigned int length
Definition: hb-vector.hh:76
bool resize(int size_)
Definition: hb-vector.hh:326
iter_t iter() const
Definition: hb-vector.hh:158
XmlOutput::xml_output tag(const QString &name)
Definition: xmloutput.h:154