hb-ot-color-colr-table.hh 51 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641
  1. /*
  2. * Copyright © 2018 Ebrahim Byagowi
  3. * Copyright © 2020 Google, Inc.
  4. *
  5. * This is part of HarfBuzz, a text shaping library.
  6. *
  7. * Permission is hereby granted, without written agreement and without
  8. * license or royalty fees, to use, copy, modify, and distribute this
  9. * software and its documentation for any purpose, provided that the
  10. * above copyright notice and the following two paragraphs appear in
  11. * all copies of this software.
  12. *
  13. * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14. * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15. * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16. * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17. * DAMAGE.
  18. *
  19. * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20. * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21. * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
  22. * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23. * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24. *
  25. * Google Author(s): Calder Kitagawa
  26. */
  27. #ifndef HB_OT_COLOR_COLR_TABLE_HH
  28. #define HB_OT_COLOR_COLR_TABLE_HH
  29. #include "hb-open-type.hh"
  30. #include "hb-ot-layout-common.hh"
  31. #include "hb-ot-var-common.hh"
  32. /*
  33. * COLR -- Color
  34. * https://docs.microsoft.com/en-us/typography/opentype/spec/colr
  35. */
  36. #define HB_OT_TAG_COLR HB_TAG('C','O','L','R')
  37. #ifndef HB_COLRV1_MAX_NESTING_LEVEL
  38. #define HB_COLRV1_MAX_NESTING_LEVEL 16
  39. #endif
  40. namespace OT {
  41. struct COLR;
  42. struct hb_colrv1_closure_context_t :
  43. hb_dispatch_context_t<hb_colrv1_closure_context_t>
  44. {
  45. template <typename T>
  46. return_t dispatch (const T &obj)
  47. {
  48. if (unlikely (nesting_level_left == 0))
  49. return hb_empty_t ();
  50. if (paint_visited (&obj))
  51. return hb_empty_t ();
  52. nesting_level_left--;
  53. obj.closurev1 (this);
  54. nesting_level_left++;
  55. return hb_empty_t ();
  56. }
  57. static return_t default_return_value () { return hb_empty_t (); }
  58. bool paint_visited (const void *paint)
  59. {
  60. hb_codepoint_t delta = (hb_codepoint_t) ((uintptr_t) paint - (uintptr_t) base);
  61. if (visited_paint.in_error() || visited_paint.has (delta))
  62. return true;
  63. visited_paint.add (delta);
  64. return false;
  65. }
  66. const COLR* get_colr_table () const
  67. { return reinterpret_cast<const COLR *> (base); }
  68. void add_glyph (unsigned glyph_id)
  69. { glyphs->add (glyph_id); }
  70. void add_layer_indices (unsigned first_layer_index, unsigned num_of_layers)
  71. { layer_indices->add_range (first_layer_index, first_layer_index + num_of_layers - 1); }
  72. void add_palette_index (unsigned palette_index)
  73. { palette_indices->add (palette_index); }
  74. public:
  75. const void *base;
  76. hb_set_t visited_paint;
  77. hb_set_t *glyphs;
  78. hb_set_t *layer_indices;
  79. hb_set_t *palette_indices;
  80. unsigned nesting_level_left;
  81. hb_colrv1_closure_context_t (const void *base_,
  82. hb_set_t *glyphs_,
  83. hb_set_t *layer_indices_,
  84. hb_set_t *palette_indices_,
  85. unsigned nesting_level_left_ = HB_COLRV1_MAX_NESTING_LEVEL) :
  86. base (base_),
  87. glyphs (glyphs_),
  88. layer_indices (layer_indices_),
  89. palette_indices (palette_indices_),
  90. nesting_level_left (nesting_level_left_)
  91. {}
  92. };
  93. struct LayerRecord
  94. {
  95. operator hb_ot_color_layer_t () const { return {glyphId, colorIdx}; }
  96. bool sanitize (hb_sanitize_context_t *c) const
  97. {
  98. TRACE_SANITIZE (this);
  99. return_trace (c->check_struct (this));
  100. }
  101. public:
  102. HBGlyphID16 glyphId; /* Glyph ID of layer glyph */
  103. Index colorIdx; /* Index value to use with a
  104. * selected color palette.
  105. * An index value of 0xFFFF
  106. * is a special case indicating
  107. * that the text foreground
  108. * color (defined by a
  109. * higher-level client) should
  110. * be used and shall not be
  111. * treated as actual index
  112. * into CPAL ColorRecord array. */
  113. public:
  114. DEFINE_SIZE_STATIC (4);
  115. };
  116. struct BaseGlyphRecord
  117. {
  118. int cmp (hb_codepoint_t g) const
  119. { return g < glyphId ? -1 : g > glyphId ? 1 : 0; }
  120. bool sanitize (hb_sanitize_context_t *c) const
  121. {
  122. TRACE_SANITIZE (this);
  123. return_trace (c->check_struct (this));
  124. }
  125. public:
  126. HBGlyphID16 glyphId; /* Glyph ID of reference glyph */
  127. HBUINT16 firstLayerIdx; /* Index (from beginning of
  128. * the Layer Records) to the
  129. * layer record. There will be
  130. * numLayers consecutive entries
  131. * for this base glyph. */
  132. HBUINT16 numLayers; /* Number of color layers
  133. * associated with this glyph */
  134. public:
  135. DEFINE_SIZE_STATIC (6);
  136. };
  137. template <typename T>
  138. struct Variable
  139. {
  140. Variable<T>* copy (hb_serialize_context_t *c) const
  141. {
  142. TRACE_SERIALIZE (this);
  143. return_trace (c->embed (this));
  144. }
  145. void closurev1 (hb_colrv1_closure_context_t* c) const
  146. { value.closurev1 (c); }
  147. bool subset (hb_subset_context_t *c) const
  148. {
  149. TRACE_SUBSET (this);
  150. if (!value.subset (c)) return_trace (false);
  151. return_trace (c->serializer->embed (varIdxBase));
  152. }
  153. bool sanitize (hb_sanitize_context_t *c) const
  154. {
  155. TRACE_SANITIZE (this);
  156. return_trace (c->check_struct (this) && value.sanitize (c));
  157. }
  158. protected:
  159. T value;
  160. public:
  161. VarIdx varIdxBase;
  162. public:
  163. DEFINE_SIZE_STATIC (4 + T::static_size);
  164. };
  165. template <typename T>
  166. struct NoVariable
  167. {
  168. static constexpr uint32_t varIdxBase = VarIdx::NO_VARIATION;
  169. NoVariable<T>* copy (hb_serialize_context_t *c) const
  170. {
  171. TRACE_SERIALIZE (this);
  172. return_trace (c->embed (this));
  173. }
  174. void closurev1 (hb_colrv1_closure_context_t* c) const
  175. { value.closurev1 (c); }
  176. bool subset (hb_subset_context_t *c) const
  177. {
  178. TRACE_SUBSET (this);
  179. return_trace (value.subset (c));
  180. }
  181. bool sanitize (hb_sanitize_context_t *c) const
  182. {
  183. TRACE_SANITIZE (this);
  184. return_trace (c->check_struct (this) && value.sanitize (c));
  185. }
  186. T value;
  187. public:
  188. DEFINE_SIZE_STATIC (T::static_size);
  189. };
  190. // Color structures
  191. struct ColorStop
  192. {
  193. void closurev1 (hb_colrv1_closure_context_t* c) const
  194. { c->add_palette_index (paletteIndex); }
  195. bool subset (hb_subset_context_t *c) const
  196. {
  197. TRACE_SUBSET (this);
  198. auto *out = c->serializer->embed (*this);
  199. if (unlikely (!out)) return_trace (false);
  200. return_trace (c->serializer->check_assign (out->paletteIndex, c->plan->colr_palettes->get (paletteIndex),
  201. HB_SERIALIZE_ERROR_INT_OVERFLOW));
  202. }
  203. bool sanitize (hb_sanitize_context_t *c) const
  204. {
  205. TRACE_SANITIZE (this);
  206. return_trace (c->check_struct (this));
  207. }
  208. F2DOT14 stopOffset;
  209. HBUINT16 paletteIndex;
  210. F2DOT14 alpha;
  211. public:
  212. DEFINE_SIZE_STATIC (2 + 2 * F2DOT14::static_size);
  213. };
  214. struct Extend : HBUINT8
  215. {
  216. enum {
  217. EXTEND_PAD = 0,
  218. EXTEND_REPEAT = 1,
  219. EXTEND_REFLECT = 2,
  220. };
  221. public:
  222. DEFINE_SIZE_STATIC (1);
  223. };
  224. template <template<typename> class Var>
  225. struct ColorLine
  226. {
  227. void closurev1 (hb_colrv1_closure_context_t* c) const
  228. {
  229. for (const auto &stop : stops.iter ())
  230. stop.closurev1 (c);
  231. }
  232. bool subset (hb_subset_context_t *c) const
  233. {
  234. TRACE_SUBSET (this);
  235. auto *out = c->serializer->start_embed (this);
  236. if (unlikely (!out)) return_trace (false);
  237. if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
  238. if (!c->serializer->check_assign (out->extend, extend, HB_SERIALIZE_ERROR_INT_OVERFLOW)) return_trace (false);
  239. if (!c->serializer->check_assign (out->stops.len, stops.len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW)) return_trace (false);
  240. for (const auto& stop : stops.iter ())
  241. {
  242. if (!stop.subset (c)) return_trace (false);
  243. }
  244. return_trace (true);
  245. }
  246. bool sanitize (hb_sanitize_context_t *c) const
  247. {
  248. TRACE_SANITIZE (this);
  249. return_trace (c->check_struct (this) &&
  250. stops.sanitize (c));
  251. }
  252. Extend extend;
  253. Array16Of<Var<ColorStop>> stops;
  254. public:
  255. DEFINE_SIZE_ARRAY_SIZED (3, stops);
  256. };
  257. // Composition modes
  258. // Compositing modes are taken from https://www.w3.org/TR/compositing-1/
  259. // NOTE: a brief audit of major implementations suggests most support most
  260. // or all of the specified modes.
  261. struct CompositeMode : HBUINT8
  262. {
  263. enum {
  264. // Porter-Duff modes
  265. // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators
  266. COMPOSITE_CLEAR = 0, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_clear
  267. COMPOSITE_SRC = 1, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_src
  268. COMPOSITE_DEST = 2, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dst
  269. COMPOSITE_SRC_OVER = 3, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcover
  270. COMPOSITE_DEST_OVER = 4, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstover
  271. COMPOSITE_SRC_IN = 5, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcin
  272. COMPOSITE_DEST_IN = 6, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstin
  273. COMPOSITE_SRC_OUT = 7, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcout
  274. COMPOSITE_DEST_OUT = 8, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstout
  275. COMPOSITE_SRC_ATOP = 9, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcatop
  276. COMPOSITE_DEST_ATOP = 10, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstatop
  277. COMPOSITE_XOR = 11, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_xor
  278. COMPOSITE_PLUS = 12, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_plus
  279. // Blend modes
  280. // https://www.w3.org/TR/compositing-1/#blending
  281. COMPOSITE_SCREEN = 13, // https://www.w3.org/TR/compositing-1/#blendingscreen
  282. COMPOSITE_OVERLAY = 14, // https://www.w3.org/TR/compositing-1/#blendingoverlay
  283. COMPOSITE_DARKEN = 15, // https://www.w3.org/TR/compositing-1/#blendingdarken
  284. COMPOSITE_LIGHTEN = 16, // https://www.w3.org/TR/compositing-1/#blendinglighten
  285. COMPOSITE_COLOR_DODGE = 17, // https://www.w3.org/TR/compositing-1/#blendingcolordodge
  286. COMPOSITE_COLOR_BURN = 18, // https://www.w3.org/TR/compositing-1/#blendingcolorburn
  287. COMPOSITE_HARD_LIGHT = 19, // https://www.w3.org/TR/compositing-1/#blendinghardlight
  288. COMPOSITE_SOFT_LIGHT = 20, // https://www.w3.org/TR/compositing-1/#blendingsoftlight
  289. COMPOSITE_DIFFERENCE = 21, // https://www.w3.org/TR/compositing-1/#blendingdifference
  290. COMPOSITE_EXCLUSION = 22, // https://www.w3.org/TR/compositing-1/#blendingexclusion
  291. COMPOSITE_MULTIPLY = 23, // https://www.w3.org/TR/compositing-1/#blendingmultiply
  292. // Modes that, uniquely, do not operate on components
  293. // https://www.w3.org/TR/compositing-1/#blendingnonseparable
  294. COMPOSITE_HSL_HUE = 24, // https://www.w3.org/TR/compositing-1/#blendinghue
  295. COMPOSITE_HSL_SATURATION = 25, // https://www.w3.org/TR/compositing-1/#blendingsaturation
  296. COMPOSITE_HSL_COLOR = 26, // https://www.w3.org/TR/compositing-1/#blendingcolor
  297. COMPOSITE_HSL_LUMINOSITY = 27, // https://www.w3.org/TR/compositing-1/#blendingluminosity
  298. };
  299. public:
  300. DEFINE_SIZE_STATIC (1);
  301. };
  302. struct Affine2x3
  303. {
  304. bool sanitize (hb_sanitize_context_t *c) const
  305. {
  306. TRACE_SANITIZE (this);
  307. return_trace (c->check_struct (this));
  308. }
  309. F16DOT16 xx;
  310. F16DOT16 yx;
  311. F16DOT16 xy;
  312. F16DOT16 yy;
  313. F16DOT16 dx;
  314. F16DOT16 dy;
  315. public:
  316. DEFINE_SIZE_STATIC (6 * F16DOT16::static_size);
  317. };
  318. struct PaintColrLayers
  319. {
  320. void closurev1 (hb_colrv1_closure_context_t* c) const;
  321. bool subset (hb_subset_context_t *c) const
  322. {
  323. TRACE_SUBSET (this);
  324. auto *out = c->serializer->embed (this);
  325. if (unlikely (!out)) return_trace (false);
  326. return_trace (c->serializer->check_assign (out->firstLayerIndex, c->plan->colrv1_layers->get (firstLayerIndex),
  327. HB_SERIALIZE_ERROR_INT_OVERFLOW));
  328. return_trace (true);
  329. }
  330. bool sanitize (hb_sanitize_context_t *c) const
  331. {
  332. TRACE_SANITIZE (this);
  333. return_trace (c->check_struct (this));
  334. }
  335. HBUINT8 format; /* format = 1 */
  336. HBUINT8 numLayers;
  337. HBUINT32 firstLayerIndex; /* index into COLRv1::layerList */
  338. public:
  339. DEFINE_SIZE_STATIC (6);
  340. };
  341. struct PaintSolid
  342. {
  343. void closurev1 (hb_colrv1_closure_context_t* c) const
  344. { c->add_palette_index (paletteIndex); }
  345. bool subset (hb_subset_context_t *c) const
  346. {
  347. TRACE_SUBSET (this);
  348. auto *out = c->serializer->embed (*this);
  349. if (unlikely (!out)) return_trace (false);
  350. return_trace (c->serializer->check_assign (out->paletteIndex, c->plan->colr_palettes->get (paletteIndex),
  351. HB_SERIALIZE_ERROR_INT_OVERFLOW));
  352. }
  353. bool sanitize (hb_sanitize_context_t *c) const
  354. {
  355. TRACE_SANITIZE (this);
  356. return_trace (c->check_struct (this));
  357. }
  358. HBUINT8 format; /* format = 2(noVar) or 3(Var)*/
  359. HBUINT16 paletteIndex;
  360. F2DOT14 alpha;
  361. public:
  362. DEFINE_SIZE_STATIC (3 + F2DOT14::static_size);
  363. };
  364. template <template<typename> class Var>
  365. struct PaintLinearGradient
  366. {
  367. void closurev1 (hb_colrv1_closure_context_t* c) const
  368. { (this+colorLine).closurev1 (c); }
  369. bool subset (hb_subset_context_t *c) const
  370. {
  371. TRACE_SUBSET (this);
  372. auto *out = c->serializer->embed (this);
  373. if (unlikely (!out)) return_trace (false);
  374. return_trace (out->colorLine.serialize_subset (c, colorLine, this));
  375. }
  376. bool sanitize (hb_sanitize_context_t *c) const
  377. {
  378. TRACE_SANITIZE (this);
  379. return_trace (c->check_struct (this) && colorLine.sanitize (c, this));
  380. }
  381. HBUINT8 format; /* format = 4(noVar) or 5 (Var) */
  382. Offset24To<ColorLine<Var>> colorLine; /* Offset (from beginning of PaintLinearGradient
  383. * table) to ColorLine subtable. */
  384. FWORD x0;
  385. FWORD y0;
  386. FWORD x1;
  387. FWORD y1;
  388. FWORD x2;
  389. FWORD y2;
  390. public:
  391. DEFINE_SIZE_STATIC (4 + 6 * FWORD::static_size);
  392. };
  393. template <template<typename> class Var>
  394. struct PaintRadialGradient
  395. {
  396. void closurev1 (hb_colrv1_closure_context_t* c) const
  397. { (this+colorLine).closurev1 (c); }
  398. bool subset (hb_subset_context_t *c) const
  399. {
  400. TRACE_SUBSET (this);
  401. auto *out = c->serializer->embed (this);
  402. if (unlikely (!out)) return_trace (false);
  403. return_trace (out->colorLine.serialize_subset (c, colorLine, this));
  404. }
  405. bool sanitize (hb_sanitize_context_t *c) const
  406. {
  407. TRACE_SANITIZE (this);
  408. return_trace (c->check_struct (this) && colorLine.sanitize (c, this));
  409. }
  410. HBUINT8 format; /* format = 6(noVar) or 7 (Var) */
  411. Offset24To<ColorLine<Var>> colorLine; /* Offset (from beginning of PaintRadialGradient
  412. * table) to ColorLine subtable. */
  413. FWORD x0;
  414. FWORD y0;
  415. UFWORD radius0;
  416. FWORD x1;
  417. FWORD y1;
  418. UFWORD radius1;
  419. public:
  420. DEFINE_SIZE_STATIC (4 + 6 * FWORD::static_size);
  421. };
  422. template <template<typename> class Var>
  423. struct PaintSweepGradient
  424. {
  425. void closurev1 (hb_colrv1_closure_context_t* c) const
  426. { (this+colorLine).closurev1 (c); }
  427. bool subset (hb_subset_context_t *c) const
  428. {
  429. TRACE_SUBSET (this);
  430. auto *out = c->serializer->embed (this);
  431. if (unlikely (!out)) return_trace (false);
  432. return_trace (out->colorLine.serialize_subset (c, colorLine, this));
  433. }
  434. bool sanitize (hb_sanitize_context_t *c) const
  435. {
  436. TRACE_SANITIZE (this);
  437. return_trace (c->check_struct (this) && colorLine.sanitize (c, this));
  438. }
  439. HBUINT8 format; /* format = 8(noVar) or 9 (Var) */
  440. Offset24To<ColorLine<Var>> colorLine; /* Offset (from beginning of PaintSweepGradient
  441. * table) to ColorLine subtable. */
  442. FWORD centerX;
  443. FWORD centerY;
  444. F2DOT14 startAngle;
  445. F2DOT14 endAngle;
  446. public:
  447. DEFINE_SIZE_STATIC (4 + 2 * FWORD::static_size + 2 * F2DOT14::static_size);
  448. };
  449. struct Paint;
  450. // Paint a non-COLR glyph, filled as indicated by paint.
  451. struct PaintGlyph
  452. {
  453. void closurev1 (hb_colrv1_closure_context_t* c) const;
  454. bool subset (hb_subset_context_t *c) const
  455. {
  456. TRACE_SUBSET (this);
  457. auto *out = c->serializer->embed (this);
  458. if (unlikely (!out)) return_trace (false);
  459. if (! c->serializer->check_assign (out->gid, c->plan->glyph_map->get (gid),
  460. HB_SERIALIZE_ERROR_INT_OVERFLOW))
  461. return_trace (false);
  462. return_trace (out->paint.serialize_subset (c, paint, this));
  463. }
  464. bool sanitize (hb_sanitize_context_t *c) const
  465. {
  466. TRACE_SANITIZE (this);
  467. return_trace (c->check_struct (this) && paint.sanitize (c, this));
  468. }
  469. HBUINT8 format; /* format = 10 */
  470. Offset24To<Paint> paint; /* Offset (from beginning of PaintGlyph table) to Paint subtable. */
  471. HBUINT16 gid;
  472. public:
  473. DEFINE_SIZE_STATIC (6);
  474. };
  475. struct PaintColrGlyph
  476. {
  477. void closurev1 (hb_colrv1_closure_context_t* c) const;
  478. bool subset (hb_subset_context_t *c) const
  479. {
  480. TRACE_SUBSET (this);
  481. auto *out = c->serializer->embed (this);
  482. if (unlikely (!out)) return_trace (false);
  483. return_trace (c->serializer->check_assign (out->gid, c->plan->glyph_map->get (gid),
  484. HB_SERIALIZE_ERROR_INT_OVERFLOW));
  485. }
  486. bool sanitize (hb_sanitize_context_t *c) const
  487. {
  488. TRACE_SANITIZE (this);
  489. return_trace (c->check_struct (this));
  490. }
  491. HBUINT8 format; /* format = 11 */
  492. HBUINT16 gid;
  493. public:
  494. DEFINE_SIZE_STATIC (3);
  495. };
  496. template <template<typename> class Var>
  497. struct PaintTransform
  498. {
  499. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  500. bool subset (hb_subset_context_t *c) const
  501. {
  502. TRACE_SUBSET (this);
  503. auto *out = c->serializer->embed (this);
  504. if (unlikely (!out)) return_trace (false);
  505. if (!out->transform.serialize_copy (c->serializer, transform, this)) return_trace (false);
  506. return_trace (out->src.serialize_subset (c, src, this));
  507. }
  508. bool sanitize (hb_sanitize_context_t *c) const
  509. {
  510. TRACE_SANITIZE (this);
  511. return_trace (c->check_struct (this) &&
  512. src.sanitize (c, this) &&
  513. transform.sanitize (c, this));
  514. }
  515. HBUINT8 format; /* format = 12(noVar) or 13 (Var) */
  516. Offset24To<Paint> src; /* Offset (from beginning of PaintTransform table) to Paint subtable. */
  517. Offset24To<Var<Affine2x3>> transform;
  518. public:
  519. DEFINE_SIZE_STATIC (7);
  520. };
  521. struct PaintTranslate
  522. {
  523. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  524. bool subset (hb_subset_context_t *c) const
  525. {
  526. TRACE_SUBSET (this);
  527. auto *out = c->serializer->embed (this);
  528. if (unlikely (!out)) return_trace (false);
  529. return_trace (out->src.serialize_subset (c, src, this));
  530. }
  531. bool sanitize (hb_sanitize_context_t *c) const
  532. {
  533. TRACE_SANITIZE (this);
  534. return_trace (c->check_struct (this) && src.sanitize (c, this));
  535. }
  536. HBUINT8 format; /* format = 14(noVar) or 15 (Var) */
  537. Offset24To<Paint> src; /* Offset (from beginning of PaintTranslate table) to Paint subtable. */
  538. FWORD dx;
  539. FWORD dy;
  540. public:
  541. DEFINE_SIZE_STATIC (4 + 2 * FWORD::static_size);
  542. };
  543. struct PaintScale
  544. {
  545. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  546. bool subset (hb_subset_context_t *c) const
  547. {
  548. TRACE_SUBSET (this);
  549. auto *out = c->serializer->embed (this);
  550. if (unlikely (!out)) return_trace (false);
  551. return_trace (out->src.serialize_subset (c, src, this));
  552. }
  553. bool sanitize (hb_sanitize_context_t *c) const
  554. {
  555. TRACE_SANITIZE (this);
  556. return_trace (c->check_struct (this) && src.sanitize (c, this));
  557. }
  558. HBUINT8 format; /* format = 16 (noVar) or 17(Var) */
  559. Offset24To<Paint> src; /* Offset (from beginning of PaintScale table) to Paint subtable. */
  560. F2DOT14 scaleX;
  561. F2DOT14 scaleY;
  562. public:
  563. DEFINE_SIZE_STATIC (4 + 2 * F2DOT14::static_size);
  564. };
  565. struct PaintScaleAroundCenter
  566. {
  567. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  568. bool subset (hb_subset_context_t *c) const
  569. {
  570. TRACE_SUBSET (this);
  571. auto *out = c->serializer->embed (this);
  572. if (unlikely (!out)) return_trace (false);
  573. return_trace (out->src.serialize_subset (c, src, this));
  574. }
  575. bool sanitize (hb_sanitize_context_t *c) const
  576. {
  577. TRACE_SANITIZE (this);
  578. return_trace (c->check_struct (this) && src.sanitize (c, this));
  579. }
  580. HBUINT8 format; /* format = 18 (noVar) or 19(Var) */
  581. Offset24To<Paint> src; /* Offset (from beginning of PaintScaleAroundCenter table) to Paint subtable. */
  582. F2DOT14 scaleX;
  583. F2DOT14 scaleY;
  584. FWORD centerX;
  585. FWORD centerY;
  586. public:
  587. DEFINE_SIZE_STATIC (4 + 2 * F2DOT14::static_size + 2 * FWORD::static_size);
  588. };
  589. struct PaintScaleUniform
  590. {
  591. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  592. bool subset (hb_subset_context_t *c) const
  593. {
  594. TRACE_SUBSET (this);
  595. auto *out = c->serializer->embed (this);
  596. if (unlikely (!out)) return_trace (false);
  597. return_trace (out->src.serialize_subset (c, src, this));
  598. }
  599. bool sanitize (hb_sanitize_context_t *c) const
  600. {
  601. TRACE_SANITIZE (this);
  602. return_trace (c->check_struct (this) && src.sanitize (c, this));
  603. }
  604. HBUINT8 format; /* format = 20 (noVar) or 21(Var) */
  605. Offset24To<Paint> src; /* Offset (from beginning of PaintScaleUniform table) to Paint subtable. */
  606. F2DOT14 scale;
  607. public:
  608. DEFINE_SIZE_STATIC (4 + F2DOT14::static_size);
  609. };
  610. struct PaintScaleUniformAroundCenter
  611. {
  612. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  613. bool subset (hb_subset_context_t *c) const
  614. {
  615. TRACE_SUBSET (this);
  616. auto *out = c->serializer->embed (this);
  617. if (unlikely (!out)) return_trace (false);
  618. return_trace (out->src.serialize_subset (c, src, this));
  619. }
  620. bool sanitize (hb_sanitize_context_t *c) const
  621. {
  622. TRACE_SANITIZE (this);
  623. return_trace (c->check_struct (this) && src.sanitize (c, this));
  624. }
  625. HBUINT8 format; /* format = 22 (noVar) or 23(Var) */
  626. Offset24To<Paint> src; /* Offset (from beginning of PaintScaleUniformAroundCenter table) to Paint subtable. */
  627. F2DOT14 scale;
  628. FWORD centerX;
  629. FWORD centerY;
  630. public:
  631. DEFINE_SIZE_STATIC (4 + F2DOT14::static_size + 2 * FWORD::static_size);
  632. };
  633. struct PaintRotate
  634. {
  635. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  636. bool subset (hb_subset_context_t *c) const
  637. {
  638. TRACE_SUBSET (this);
  639. auto *out = c->serializer->embed (this);
  640. if (unlikely (!out)) return_trace (false);
  641. return_trace (out->src.serialize_subset (c, src, this));
  642. }
  643. bool sanitize (hb_sanitize_context_t *c) const
  644. {
  645. TRACE_SANITIZE (this);
  646. return_trace (c->check_struct (this) && src.sanitize (c, this));
  647. }
  648. HBUINT8 format; /* format = 24 (noVar) or 25(Var) */
  649. Offset24To<Paint> src; /* Offset (from beginning of PaintRotate table) to Paint subtable. */
  650. F2DOT14 angle;
  651. public:
  652. DEFINE_SIZE_STATIC (4 + F2DOT14::static_size);
  653. };
  654. struct PaintRotateAroundCenter
  655. {
  656. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  657. bool subset (hb_subset_context_t *c) const
  658. {
  659. TRACE_SUBSET (this);
  660. auto *out = c->serializer->embed (this);
  661. if (unlikely (!out)) return_trace (false);
  662. return_trace (out->src.serialize_subset (c, src, this));
  663. }
  664. bool sanitize (hb_sanitize_context_t *c) const
  665. {
  666. TRACE_SANITIZE (this);
  667. return_trace (c->check_struct (this) && src.sanitize (c, this));
  668. }
  669. HBUINT8 format; /* format = 26 (noVar) or 27(Var) */
  670. Offset24To<Paint> src; /* Offset (from beginning of PaintRotateAroundCenter table) to Paint subtable. */
  671. F2DOT14 angle;
  672. FWORD centerX;
  673. FWORD centerY;
  674. public:
  675. DEFINE_SIZE_STATIC (4 + F2DOT14::static_size + 2 * FWORD::static_size);
  676. };
  677. struct PaintSkew
  678. {
  679. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  680. bool subset (hb_subset_context_t *c) const
  681. {
  682. TRACE_SUBSET (this);
  683. auto *out = c->serializer->embed (this);
  684. if (unlikely (!out)) return_trace (false);
  685. return_trace (out->src.serialize_subset (c, src, this));
  686. }
  687. bool sanitize (hb_sanitize_context_t *c) const
  688. {
  689. TRACE_SANITIZE (this);
  690. return_trace (c->check_struct (this) && src.sanitize (c, this));
  691. }
  692. HBUINT8 format; /* format = 28(noVar) or 29 (Var) */
  693. Offset24To<Paint> src; /* Offset (from beginning of PaintSkew table) to Paint subtable. */
  694. F2DOT14 xSkewAngle;
  695. F2DOT14 ySkewAngle;
  696. public:
  697. DEFINE_SIZE_STATIC (4 + 2 * F2DOT14::static_size);
  698. };
  699. struct PaintSkewAroundCenter
  700. {
  701. HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
  702. bool subset (hb_subset_context_t *c) const
  703. {
  704. TRACE_SUBSET (this);
  705. auto *out = c->serializer->embed (this);
  706. if (unlikely (!out)) return_trace (false);
  707. return_trace (out->src.serialize_subset (c, src, this));
  708. }
  709. bool sanitize (hb_sanitize_context_t *c) const
  710. {
  711. TRACE_SANITIZE (this);
  712. return_trace (c->check_struct (this) && src.sanitize (c, this));
  713. }
  714. HBUINT8 format; /* format = 30(noVar) or 31 (Var) */
  715. Offset24To<Paint> src; /* Offset (from beginning of PaintSkewAroundCenter table) to Paint subtable. */
  716. F2DOT14 xSkewAngle;
  717. F2DOT14 ySkewAngle;
  718. FWORD centerX;
  719. FWORD centerY;
  720. public:
  721. DEFINE_SIZE_STATIC (4 + 2 * F2DOT14::static_size + 2 * FWORD::static_size);
  722. };
  723. struct PaintComposite
  724. {
  725. void closurev1 (hb_colrv1_closure_context_t* c) const;
  726. bool subset (hb_subset_context_t *c) const
  727. {
  728. TRACE_SUBSET (this);
  729. auto *out = c->serializer->embed (this);
  730. if (unlikely (!out)) return_trace (false);
  731. if (!out->src.serialize_subset (c, src, this)) return_trace (false);
  732. return_trace (out->backdrop.serialize_subset (c, backdrop, this));
  733. }
  734. bool sanitize (hb_sanitize_context_t *c) const
  735. {
  736. TRACE_SANITIZE (this);
  737. return_trace (c->check_struct (this) &&
  738. src.sanitize (c, this) &&
  739. backdrop.sanitize (c, this));
  740. }
  741. HBUINT8 format; /* format = 32 */
  742. Offset24To<Paint> src; /* Offset (from beginning of PaintComposite table) to source Paint subtable. */
  743. CompositeMode mode; /* If mode is unrecognized use COMPOSITE_CLEAR */
  744. Offset24To<Paint> backdrop; /* Offset (from beginning of PaintComposite table) to backdrop Paint subtable. */
  745. public:
  746. DEFINE_SIZE_STATIC (8);
  747. };
  748. struct ClipBoxData
  749. {
  750. int xMin, yMin, xMax, yMax;
  751. };
  752. struct ClipBoxFormat1
  753. {
  754. bool sanitize (hb_sanitize_context_t *c) const
  755. {
  756. TRACE_SANITIZE (this);
  757. return_trace (c->check_struct (this));
  758. }
  759. void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer HB_UNUSED) const
  760. {
  761. clip_box.xMin = xMin;
  762. clip_box.yMin = yMin;
  763. clip_box.xMax = xMax;
  764. clip_box.yMax = yMax;
  765. }
  766. public:
  767. HBUINT8 format; /* format = 1(noVar) or 2(Var)*/
  768. FWORD xMin;
  769. FWORD yMin;
  770. FWORD xMax;
  771. FWORD yMax;
  772. public:
  773. DEFINE_SIZE_STATIC (1 + 4 * FWORD::static_size);
  774. };
  775. struct ClipBoxFormat2 : Variable<ClipBoxFormat1>
  776. {
  777. void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer) const
  778. {
  779. value.get_clip_box(clip_box, instancer);
  780. if (instancer)
  781. {
  782. clip_box.xMin += _hb_roundf (instancer (varIdxBase, 0));
  783. clip_box.yMin += _hb_roundf (instancer (varIdxBase, 1));
  784. clip_box.xMax += _hb_roundf (instancer (varIdxBase, 2));
  785. clip_box.yMax += _hb_roundf (instancer (varIdxBase, 3));
  786. }
  787. }
  788. };
  789. struct ClipBox
  790. {
  791. ClipBox* copy (hb_serialize_context_t *c) const
  792. {
  793. TRACE_SERIALIZE (this);
  794. switch (u.format) {
  795. case 1: return_trace (reinterpret_cast<ClipBox *> (c->embed (u.format1)));
  796. case 2: return_trace (reinterpret_cast<ClipBox *> (c->embed (u.format2)));
  797. default:return_trace (nullptr);
  798. }
  799. }
  800. template <typename context_t, typename ...Ts>
  801. typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
  802. {
  803. TRACE_DISPATCH (this, u.format);
  804. if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
  805. switch (u.format) {
  806. case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
  807. case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
  808. default:return_trace (c->default_return_value ());
  809. }
  810. }
  811. bool get_extents (hb_glyph_extents_t *extents,
  812. const VarStoreInstancer &instancer) const
  813. {
  814. ClipBoxData clip_box;
  815. switch (u.format) {
  816. case 1:
  817. u.format1.get_clip_box (clip_box, instancer);
  818. break;
  819. case 2:
  820. u.format2.get_clip_box (clip_box, instancer);
  821. break;
  822. default:
  823. return false;
  824. }
  825. extents->x_bearing = clip_box.xMin;
  826. extents->y_bearing = clip_box.yMax;
  827. extents->width = clip_box.xMax - clip_box.xMin;
  828. extents->height = clip_box.yMin - clip_box.yMax;
  829. return true;
  830. }
  831. protected:
  832. union {
  833. HBUINT8 format; /* Format identifier */
  834. ClipBoxFormat1 format1;
  835. ClipBoxFormat2 format2;
  836. } u;
  837. };
  838. struct ClipRecord
  839. {
  840. int cmp (hb_codepoint_t g) const
  841. { return g < startGlyphID ? -1 : g <= endGlyphID ? 0 : +1; }
  842. ClipRecord* copy (hb_serialize_context_t *c, const void *base) const
  843. {
  844. TRACE_SERIALIZE (this);
  845. auto *out = c->embed (this);
  846. if (unlikely (!out)) return_trace (nullptr);
  847. if (!out->clipBox.serialize_copy (c, clipBox, base)) return_trace (nullptr);
  848. return_trace (out);
  849. }
  850. bool sanitize (hb_sanitize_context_t *c, const void *base) const
  851. {
  852. TRACE_SANITIZE (this);
  853. return_trace (c->check_struct (this) && clipBox.sanitize (c, base));
  854. }
  855. bool get_extents (hb_glyph_extents_t *extents,
  856. const void *base,
  857. const VarStoreInstancer &instancer) const
  858. {
  859. return (base+clipBox).get_extents (extents, instancer);
  860. }
  861. public:
  862. HBUINT16 startGlyphID; // first gid clip applies to
  863. HBUINT16 endGlyphID; // last gid clip applies to, inclusive
  864. Offset24To<ClipBox> clipBox; // Box or VarBox
  865. public:
  866. DEFINE_SIZE_STATIC (7);
  867. };
  868. DECLARE_NULL_NAMESPACE_BYTES (OT, ClipRecord);
  869. struct ClipList
  870. {
  871. unsigned serialize_clip_records (hb_serialize_context_t *c,
  872. const hb_set_t& gids,
  873. const hb_map_t& gid_offset_map) const
  874. {
  875. TRACE_SERIALIZE (this);
  876. if (gids.is_empty () ||
  877. gid_offset_map.get_population () != gids.get_population ())
  878. return_trace (0);
  879. unsigned count = 0;
  880. hb_codepoint_t start_gid= gids.get_min ();
  881. hb_codepoint_t prev_gid = start_gid;
  882. unsigned offset = gid_offset_map.get (start_gid);
  883. unsigned prev_offset = offset;
  884. for (const hb_codepoint_t _ : gids.iter ())
  885. {
  886. if (_ == start_gid) continue;
  887. offset = gid_offset_map.get (_);
  888. if (_ == prev_gid + 1 && offset == prev_offset)
  889. {
  890. prev_gid = _;
  891. continue;
  892. }
  893. ClipRecord record;
  894. record.startGlyphID = start_gid;
  895. record.endGlyphID = prev_gid;
  896. record.clipBox = prev_offset;
  897. if (!c->copy (record, this)) return_trace (0);
  898. count++;
  899. start_gid = _;
  900. prev_gid = _;
  901. prev_offset = offset;
  902. }
  903. //last one
  904. {
  905. ClipRecord record;
  906. record.startGlyphID = start_gid;
  907. record.endGlyphID = prev_gid;
  908. record.clipBox = prev_offset;
  909. if (!c->copy (record, this)) return_trace (0);
  910. count++;
  911. }
  912. return_trace (count);
  913. }
  914. bool subset (hb_subset_context_t *c) const
  915. {
  916. TRACE_SUBSET (this);
  917. auto *out = c->serializer->start_embed (*this);
  918. if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
  919. if (!c->serializer->check_assign (out->format, format, HB_SERIALIZE_ERROR_INT_OVERFLOW)) return_trace (false);
  920. const hb_set_t& glyphset = *c->plan->_glyphset_colred;
  921. const hb_map_t &glyph_map = *c->plan->glyph_map;
  922. hb_map_t new_gid_offset_map;
  923. hb_set_t new_gids;
  924. for (const ClipRecord& record : clips.iter ())
  925. {
  926. unsigned start_gid = record.startGlyphID;
  927. unsigned end_gid = record.endGlyphID;
  928. for (unsigned gid = start_gid; gid <= end_gid; gid++)
  929. {
  930. if (!glyphset.has (gid) || !glyph_map.has (gid)) continue;
  931. unsigned new_gid = glyph_map.get (gid);
  932. new_gid_offset_map.set (new_gid, record.clipBox);
  933. new_gids.add (new_gid);
  934. }
  935. }
  936. unsigned count = serialize_clip_records (c->serializer, new_gids, new_gid_offset_map);
  937. if (!count) return_trace (false);
  938. return_trace (c->serializer->check_assign (out->clips.len, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
  939. }
  940. bool sanitize (hb_sanitize_context_t *c) const
  941. {
  942. TRACE_SANITIZE (this);
  943. // TODO Make a formatted struct!
  944. return_trace (c->check_struct (this) && clips.sanitize (c, this));
  945. }
  946. bool
  947. get_extents (hb_codepoint_t gid,
  948. hb_glyph_extents_t *extents,
  949. const VarStoreInstancer &instancer) const
  950. {
  951. auto *rec = clips.as_array ().bsearch (gid);
  952. if (rec)
  953. {
  954. rec->get_extents (extents, this, instancer);
  955. return true;
  956. }
  957. return false;
  958. }
  959. HBUINT8 format; // Set to 1.
  960. SortedArray32Of<ClipRecord> clips; // Clip records, sorted by startGlyphID
  961. public:
  962. DEFINE_SIZE_ARRAY_SIZED (5, clips);
  963. };
  964. struct Paint
  965. {
  966. template <typename ...Ts>
  967. bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
  968. {
  969. TRACE_SANITIZE (this);
  970. if (unlikely (!c->check_start_recursion (HB_COLRV1_MAX_NESTING_LEVEL)))
  971. return_trace (c->no_dispatch_return_value ());
  972. return_trace (c->end_recursion (this->dispatch (c, std::forward<Ts> (ds)...)));
  973. }
  974. template <typename context_t, typename ...Ts>
  975. typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
  976. {
  977. TRACE_DISPATCH (this, u.format);
  978. if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
  979. switch (u.format) {
  980. case 1: return_trace (c->dispatch (u.paintformat1, std::forward<Ts> (ds)...));
  981. case 2: return_trace (c->dispatch (u.paintformat2, std::forward<Ts> (ds)...));
  982. case 3: return_trace (c->dispatch (u.paintformat3, std::forward<Ts> (ds)...));
  983. case 4: return_trace (c->dispatch (u.paintformat4, std::forward<Ts> (ds)...));
  984. case 5: return_trace (c->dispatch (u.paintformat5, std::forward<Ts> (ds)...));
  985. case 6: return_trace (c->dispatch (u.paintformat6, std::forward<Ts> (ds)...));
  986. case 7: return_trace (c->dispatch (u.paintformat7, std::forward<Ts> (ds)...));
  987. case 8: return_trace (c->dispatch (u.paintformat8, std::forward<Ts> (ds)...));
  988. case 9: return_trace (c->dispatch (u.paintformat9, std::forward<Ts> (ds)...));
  989. case 10: return_trace (c->dispatch (u.paintformat10, std::forward<Ts> (ds)...));
  990. case 11: return_trace (c->dispatch (u.paintformat11, std::forward<Ts> (ds)...));
  991. case 12: return_trace (c->dispatch (u.paintformat12, std::forward<Ts> (ds)...));
  992. case 13: return_trace (c->dispatch (u.paintformat13, std::forward<Ts> (ds)...));
  993. case 14: return_trace (c->dispatch (u.paintformat14, std::forward<Ts> (ds)...));
  994. case 15: return_trace (c->dispatch (u.paintformat15, std::forward<Ts> (ds)...));
  995. case 16: return_trace (c->dispatch (u.paintformat16, std::forward<Ts> (ds)...));
  996. case 17: return_trace (c->dispatch (u.paintformat17, std::forward<Ts> (ds)...));
  997. case 18: return_trace (c->dispatch (u.paintformat18, std::forward<Ts> (ds)...));
  998. case 19: return_trace (c->dispatch (u.paintformat19, std::forward<Ts> (ds)...));
  999. case 20: return_trace (c->dispatch (u.paintformat20, std::forward<Ts> (ds)...));
  1000. case 21: return_trace (c->dispatch (u.paintformat21, std::forward<Ts> (ds)...));
  1001. case 22: return_trace (c->dispatch (u.paintformat22, std::forward<Ts> (ds)...));
  1002. case 23: return_trace (c->dispatch (u.paintformat23, std::forward<Ts> (ds)...));
  1003. case 24: return_trace (c->dispatch (u.paintformat24, std::forward<Ts> (ds)...));
  1004. case 25: return_trace (c->dispatch (u.paintformat25, std::forward<Ts> (ds)...));
  1005. case 26: return_trace (c->dispatch (u.paintformat26, std::forward<Ts> (ds)...));
  1006. case 27: return_trace (c->dispatch (u.paintformat27, std::forward<Ts> (ds)...));
  1007. case 28: return_trace (c->dispatch (u.paintformat28, std::forward<Ts> (ds)...));
  1008. case 29: return_trace (c->dispatch (u.paintformat29, std::forward<Ts> (ds)...));
  1009. case 30: return_trace (c->dispatch (u.paintformat30, std::forward<Ts> (ds)...));
  1010. case 31: return_trace (c->dispatch (u.paintformat31, std::forward<Ts> (ds)...));
  1011. case 32: return_trace (c->dispatch (u.paintformat32, std::forward<Ts> (ds)...));
  1012. default:return_trace (c->default_return_value ());
  1013. }
  1014. }
  1015. protected:
  1016. union {
  1017. HBUINT8 format;
  1018. PaintColrLayers paintformat1;
  1019. PaintSolid paintformat2;
  1020. Variable<PaintSolid> paintformat3;
  1021. PaintLinearGradient<NoVariable> paintformat4;
  1022. Variable<PaintLinearGradient<Variable>> paintformat5;
  1023. PaintRadialGradient<NoVariable> paintformat6;
  1024. Variable<PaintRadialGradient<Variable>> paintformat7;
  1025. PaintSweepGradient<NoVariable> paintformat8;
  1026. Variable<PaintSweepGradient<Variable>> paintformat9;
  1027. PaintGlyph paintformat10;
  1028. PaintColrGlyph paintformat11;
  1029. PaintTransform<NoVariable> paintformat12;
  1030. PaintTransform<Variable> paintformat13;
  1031. PaintTranslate paintformat14;
  1032. Variable<PaintTranslate> paintformat15;
  1033. PaintScale paintformat16;
  1034. Variable<PaintScale> paintformat17;
  1035. PaintScaleAroundCenter paintformat18;
  1036. Variable<PaintScaleAroundCenter> paintformat19;
  1037. PaintScaleUniform paintformat20;
  1038. Variable<PaintScaleUniform> paintformat21;
  1039. PaintScaleUniformAroundCenter paintformat22;
  1040. Variable<PaintScaleUniformAroundCenter> paintformat23;
  1041. PaintRotate paintformat24;
  1042. Variable<PaintRotate> paintformat25;
  1043. PaintRotateAroundCenter paintformat26;
  1044. Variable<PaintRotateAroundCenter> paintformat27;
  1045. PaintSkew paintformat28;
  1046. Variable<PaintSkew> paintformat29;
  1047. PaintSkewAroundCenter paintformat30;
  1048. Variable<PaintSkewAroundCenter> paintformat31;
  1049. PaintComposite paintformat32;
  1050. } u;
  1051. public:
  1052. DEFINE_SIZE_MIN (2);
  1053. };
  1054. struct BaseGlyphPaintRecord
  1055. {
  1056. int cmp (hb_codepoint_t g) const
  1057. { return g < glyphId ? -1 : g > glyphId ? 1 : 0; }
  1058. bool serialize (hb_serialize_context_t *s, const hb_map_t* glyph_map,
  1059. const void* src_base, hb_subset_context_t *c) const
  1060. {
  1061. TRACE_SERIALIZE (this);
  1062. auto *out = s->embed (this);
  1063. if (unlikely (!out)) return_trace (false);
  1064. if (!s->check_assign (out->glyphId, glyph_map->get (glyphId),
  1065. HB_SERIALIZE_ERROR_INT_OVERFLOW))
  1066. return_trace (false);
  1067. return_trace (out->paint.serialize_subset (c, paint, src_base));
  1068. }
  1069. bool sanitize (hb_sanitize_context_t *c, const void *base) const
  1070. {
  1071. TRACE_SANITIZE (this);
  1072. return_trace (likely (c->check_struct (this) && paint.sanitize (c, base)));
  1073. }
  1074. public:
  1075. HBGlyphID16 glyphId; /* Glyph ID of reference glyph */
  1076. Offset32To<Paint> paint; /* Offset (from beginning of BaseGlyphPaintRecord array) to Paint,
  1077. * Typically PaintColrLayers */
  1078. public:
  1079. DEFINE_SIZE_STATIC (6);
  1080. };
  1081. struct BaseGlyphList : SortedArray32Of<BaseGlyphPaintRecord>
  1082. {
  1083. bool subset (hb_subset_context_t *c) const
  1084. {
  1085. TRACE_SUBSET (this);
  1086. auto *out = c->serializer->start_embed (this);
  1087. if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
  1088. const hb_set_t* glyphset = c->plan->_glyphset_colred;
  1089. for (const auto& _ : as_array ())
  1090. {
  1091. unsigned gid = _.glyphId;
  1092. if (!glyphset->has (gid)) continue;
  1093. if (_.serialize (c->serializer, c->plan->glyph_map, this, c)) out->len++;
  1094. else return_trace (false);
  1095. }
  1096. return_trace (out->len != 0);
  1097. }
  1098. bool sanitize (hb_sanitize_context_t *c) const
  1099. {
  1100. TRACE_SANITIZE (this);
  1101. return_trace (SortedArray32Of<BaseGlyphPaintRecord>::sanitize (c, this));
  1102. }
  1103. };
  1104. struct LayerList : Array32OfOffset32To<Paint>
  1105. {
  1106. const Paint& get_paint (unsigned i) const
  1107. { return this+(*this)[i]; }
  1108. bool subset (hb_subset_context_t *c) const
  1109. {
  1110. TRACE_SUBSET (this);
  1111. auto *out = c->serializer->start_embed (this);
  1112. if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
  1113. for (const auto& _ : + hb_enumerate (*this)
  1114. | hb_filter (c->plan->colrv1_layers, hb_first))
  1115. {
  1116. auto *o = out->serialize_append (c->serializer);
  1117. if (unlikely (!o) || !o->serialize_subset (c, _.second, this))
  1118. return_trace (false);
  1119. }
  1120. return_trace (true);
  1121. }
  1122. bool sanitize (hb_sanitize_context_t *c) const
  1123. {
  1124. TRACE_SANITIZE (this);
  1125. return_trace (Array32OfOffset32To<Paint>::sanitize (c, this));
  1126. }
  1127. };
  1128. struct COLR
  1129. {
  1130. static constexpr hb_tag_t tableTag = HB_OT_TAG_COLR;
  1131. bool has_data () const { return numBaseGlyphs; }
  1132. unsigned int get_glyph_layers (hb_codepoint_t glyph,
  1133. unsigned int start_offset,
  1134. unsigned int *count, /* IN/OUT. May be NULL. */
  1135. hb_ot_color_layer_t *layers /* OUT. May be NULL. */) const
  1136. {
  1137. const BaseGlyphRecord &record = (this+baseGlyphsZ).bsearch (numBaseGlyphs, glyph);
  1138. hb_array_t<const LayerRecord> all_layers = (this+layersZ).as_array (numLayers);
  1139. hb_array_t<const LayerRecord> glyph_layers = all_layers.sub_array (record.firstLayerIdx,
  1140. record.numLayers);
  1141. if (count)
  1142. {
  1143. + glyph_layers.sub_array (start_offset, count)
  1144. | hb_sink (hb_array (layers, *count))
  1145. ;
  1146. }
  1147. return glyph_layers.length;
  1148. }
  1149. struct accelerator_t
  1150. {
  1151. accelerator_t (hb_face_t *face)
  1152. { colr = hb_sanitize_context_t ().reference_table<COLR> (face); }
  1153. ~accelerator_t () { this->colr.destroy (); }
  1154. bool is_valid () { return colr.get_blob ()->length; }
  1155. void closure_glyphs (hb_codepoint_t glyph,
  1156. hb_set_t *related_ids /* OUT */) const
  1157. { colr->closure_glyphs (glyph, related_ids); }
  1158. void closure_V0palette_indices (const hb_set_t *glyphs,
  1159. hb_set_t *palettes /* OUT */) const
  1160. { colr->closure_V0palette_indices (glyphs, palettes); }
  1161. void closure_forV1 (hb_set_t *glyphset,
  1162. hb_set_t *layer_indices,
  1163. hb_set_t *palette_indices) const
  1164. { colr->closure_forV1 (glyphset, layer_indices, palette_indices); }
  1165. private:
  1166. hb_blob_ptr_t<COLR> colr;
  1167. };
  1168. void closure_glyphs (hb_codepoint_t glyph,
  1169. hb_set_t *related_ids /* OUT */) const
  1170. {
  1171. const BaseGlyphRecord *record = get_base_glyph_record (glyph);
  1172. if (!record) return;
  1173. auto glyph_layers = (this+layersZ).as_array (numLayers).sub_array (record->firstLayerIdx,
  1174. record->numLayers);
  1175. if (!glyph_layers.length) return;
  1176. related_ids->add_array (&glyph_layers[0].glyphId, glyph_layers.length, LayerRecord::min_size);
  1177. }
  1178. void closure_V0palette_indices (const hb_set_t *glyphs,
  1179. hb_set_t *palettes /* OUT */) const
  1180. {
  1181. if (!numBaseGlyphs || !numLayers) return;
  1182. hb_array_t<const BaseGlyphRecord> baseGlyphs = (this+baseGlyphsZ).as_array (numBaseGlyphs);
  1183. hb_array_t<const LayerRecord> all_layers = (this+layersZ).as_array (numLayers);
  1184. for (const BaseGlyphRecord record : baseGlyphs)
  1185. {
  1186. if (!glyphs->has (record.glyphId)) continue;
  1187. hb_array_t<const LayerRecord> glyph_layers = all_layers.sub_array (record.firstLayerIdx,
  1188. record.numLayers);
  1189. for (const LayerRecord layer : glyph_layers)
  1190. palettes->add (layer.colorIdx);
  1191. }
  1192. }
  1193. void closure_forV1 (hb_set_t *glyphset,
  1194. hb_set_t *layer_indices,
  1195. hb_set_t *palette_indices) const
  1196. {
  1197. if (version != 1) return;
  1198. hb_set_t visited_glyphs;
  1199. hb_colrv1_closure_context_t c (this, &visited_glyphs, layer_indices, palette_indices);
  1200. const BaseGlyphList &baseglyph_paintrecords = this+baseGlyphList;
  1201. for (const BaseGlyphPaintRecord &baseglyph_paintrecord: baseglyph_paintrecords.iter ())
  1202. {
  1203. unsigned gid = baseglyph_paintrecord.glyphId;
  1204. if (!glyphset->has (gid)) continue;
  1205. const Paint &paint = &baseglyph_paintrecords+baseglyph_paintrecord.paint;
  1206. paint.dispatch (&c);
  1207. }
  1208. hb_set_union (glyphset, &visited_glyphs);
  1209. }
  1210. const LayerList& get_layerList () const
  1211. { return (this+layerList); }
  1212. const BaseGlyphList& get_baseglyphList () const
  1213. { return (this+baseGlyphList); }
  1214. bool sanitize (hb_sanitize_context_t *c) const
  1215. {
  1216. TRACE_SANITIZE (this);
  1217. return_trace (c->check_struct (this) &&
  1218. (this+baseGlyphsZ).sanitize (c, numBaseGlyphs) &&
  1219. (this+layersZ).sanitize (c, numLayers) &&
  1220. (version == 0 ||
  1221. (version == 1 &&
  1222. baseGlyphList.sanitize (c, this) &&
  1223. layerList.sanitize (c, this) &&
  1224. clipList.sanitize (c, this) &&
  1225. varIdxMap.sanitize (c, this) &&
  1226. varStore.sanitize (c, this))));
  1227. }
  1228. template<typename BaseIterator, typename LayerIterator,
  1229. hb_requires (hb_is_iterator (BaseIterator)),
  1230. hb_requires (hb_is_iterator (LayerIterator))>
  1231. bool serialize_V0 (hb_serialize_context_t *c,
  1232. unsigned version,
  1233. BaseIterator base_it,
  1234. LayerIterator layer_it)
  1235. {
  1236. TRACE_SERIALIZE (this);
  1237. if (unlikely (base_it.len () != layer_it.len ()))
  1238. return_trace (false);
  1239. this->version = version;
  1240. numLayers = 0;
  1241. numBaseGlyphs = base_it.len ();
  1242. if (numBaseGlyphs == 0)
  1243. {
  1244. baseGlyphsZ = 0;
  1245. layersZ = 0;
  1246. return_trace (true);
  1247. }
  1248. c->push ();
  1249. for (const hb_item_type<BaseIterator> _ : + base_it.iter ())
  1250. {
  1251. auto* record = c->embed (_);
  1252. if (unlikely (!record)) return_trace (false);
  1253. record->firstLayerIdx = numLayers;
  1254. numLayers += record->numLayers;
  1255. }
  1256. c->add_link (baseGlyphsZ, c->pop_pack ());
  1257. c->push ();
  1258. for (const hb_item_type<LayerIterator>& _ : + layer_it.iter ())
  1259. _.as_array ().copy (c);
  1260. c->add_link (layersZ, c->pop_pack ());
  1261. return_trace (true);
  1262. }
  1263. const BaseGlyphRecord* get_base_glyph_record (hb_codepoint_t gid) const
  1264. {
  1265. const BaseGlyphRecord* record = &(this+baseGlyphsZ).bsearch (numBaseGlyphs, (unsigned int) gid);
  1266. if (record == &Null (BaseGlyphRecord) ||
  1267. (record && (hb_codepoint_t) record->glyphId != gid))
  1268. record = nullptr;
  1269. return record;
  1270. }
  1271. const BaseGlyphPaintRecord* get_base_glyph_paintrecord (hb_codepoint_t gid) const
  1272. {
  1273. const BaseGlyphPaintRecord* record = &(this+baseGlyphList).bsearch ((unsigned) gid);
  1274. if ((record && (hb_codepoint_t) record->glyphId != gid))
  1275. record = nullptr;
  1276. return record;
  1277. }
  1278. bool subset (hb_subset_context_t *c) const
  1279. {
  1280. TRACE_SUBSET (this);
  1281. const hb_map_t &reverse_glyph_map = *c->plan->reverse_glyph_map;
  1282. const hb_set_t& glyphset = *c->plan->_glyphset_colred;
  1283. auto base_it =
  1284. + hb_range (c->plan->num_output_glyphs ())
  1285. | hb_filter ([&](hb_codepoint_t new_gid)
  1286. {
  1287. hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
  1288. if (glyphset.has (old_gid)) return true;
  1289. return false;
  1290. })
  1291. | hb_map_retains_sorting ([&](hb_codepoint_t new_gid)
  1292. {
  1293. hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
  1294. const BaseGlyphRecord* old_record = get_base_glyph_record (old_gid);
  1295. if (unlikely (!old_record))
  1296. return hb_pair_t<bool, BaseGlyphRecord> (false, Null (BaseGlyphRecord));
  1297. BaseGlyphRecord new_record = {};
  1298. new_record.glyphId = new_gid;
  1299. new_record.numLayers = old_record->numLayers;
  1300. return hb_pair_t<bool, BaseGlyphRecord> (true, new_record);
  1301. })
  1302. | hb_filter (hb_first)
  1303. | hb_map_retains_sorting (hb_second)
  1304. ;
  1305. auto layer_it =
  1306. + hb_range (c->plan->num_output_glyphs ())
  1307. | hb_map (reverse_glyph_map)
  1308. | hb_filter (glyphset)
  1309. | hb_map_retains_sorting ([&](hb_codepoint_t old_gid)
  1310. {
  1311. const BaseGlyphRecord* old_record = get_base_glyph_record (old_gid);
  1312. hb_vector_t<LayerRecord> out_layers;
  1313. if (unlikely (!old_record ||
  1314. old_record->firstLayerIdx >= numLayers ||
  1315. old_record->firstLayerIdx + old_record->numLayers > numLayers))
  1316. return hb_pair_t<bool, hb_vector_t<LayerRecord>> (false, out_layers);
  1317. auto layers = (this+layersZ).as_array (numLayers).sub_array (old_record->firstLayerIdx,
  1318. old_record->numLayers);
  1319. out_layers.resize (layers.length);
  1320. for (unsigned int i = 0; i < layers.length; i++) {
  1321. out_layers[i] = layers[i];
  1322. hb_codepoint_t new_gid = 0;
  1323. if (unlikely (!c->plan->new_gid_for_old_gid (out_layers[i].glyphId, &new_gid)))
  1324. return hb_pair_t<bool, hb_vector_t<LayerRecord>> (false, out_layers);
  1325. out_layers[i].glyphId = new_gid;
  1326. out_layers[i].colorIdx = c->plan->colr_palettes->get (layers[i].colorIdx);
  1327. }
  1328. return hb_pair_t<bool, hb_vector_t<LayerRecord>> (true, out_layers);
  1329. })
  1330. | hb_filter (hb_first)
  1331. | hb_map_retains_sorting (hb_second)
  1332. ;
  1333. if (version == 0 && (!base_it || !layer_it))
  1334. return_trace (false);
  1335. COLR *colr_prime = c->serializer->start_embed<COLR> ();
  1336. if (unlikely (!c->serializer->extend_min (colr_prime))) return_trace (false);
  1337. if (version == 0)
  1338. return_trace (colr_prime->serialize_V0 (c->serializer, version, base_it, layer_it));
  1339. auto snap = c->serializer->snapshot ();
  1340. if (!c->serializer->allocate_size<void> (5 * HBUINT32::static_size)) return_trace (false);
  1341. if (!colr_prime->baseGlyphList.serialize_subset (c, baseGlyphList, this))
  1342. {
  1343. if (c->serializer->in_error ()) return_trace (false);
  1344. //no more COLRv1 glyphs: downgrade to version 0
  1345. c->serializer->revert (snap);
  1346. return_trace (colr_prime->serialize_V0 (c->serializer, 0, base_it, layer_it));
  1347. }
  1348. if (!colr_prime->serialize_V0 (c->serializer, version, base_it, layer_it)) return_trace (false);
  1349. colr_prime->layerList.serialize_subset (c, layerList, this);
  1350. colr_prime->clipList.serialize_subset (c, clipList, this);
  1351. colr_prime->varIdxMap.serialize_copy (c->serializer, varIdxMap, this);
  1352. //TODO: subset varStore once it's implemented in fonttools
  1353. return_trace (true);
  1354. }
  1355. bool
  1356. get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const
  1357. {
  1358. if (version != 1)
  1359. return false;
  1360. VarStoreInstancer instancer (this+varStore,
  1361. this+varIdxMap,
  1362. hb_array (font->coords, font->num_coords));
  1363. if ((this+clipList).get_extents (glyph,
  1364. extents,
  1365. instancer))
  1366. {
  1367. extents->x_bearing = font->em_scale_x (extents->x_bearing);
  1368. extents->y_bearing = font->em_scale_x (extents->y_bearing);
  1369. extents->width = font->em_scale_x (extents->width);
  1370. extents->height = font->em_scale_x (extents->height);
  1371. return true;
  1372. }
  1373. return false;
  1374. }
  1375. protected:
  1376. HBUINT16 version; /* Table version number (starts at 0). */
  1377. HBUINT16 numBaseGlyphs; /* Number of Base Glyph Records. */
  1378. NNOffset32To<SortedUnsizedArrayOf<BaseGlyphRecord>>
  1379. baseGlyphsZ; /* Offset to Base Glyph records. */
  1380. NNOffset32To<UnsizedArrayOf<LayerRecord>>
  1381. layersZ; /* Offset to Layer Records. */
  1382. HBUINT16 numLayers; /* Number of Layer Records. */
  1383. // Version-1 additions
  1384. Offset32To<BaseGlyphList> baseGlyphList;
  1385. Offset32To<LayerList> layerList;
  1386. Offset32To<ClipList> clipList; // Offset to ClipList table (may be NULL)
  1387. Offset32To<DeltaSetIndexMap> varIdxMap; // Offset to DeltaSetIndexMap table (may be NULL)
  1388. Offset32To<VariationStore> varStore;
  1389. public:
  1390. DEFINE_SIZE_MIN (14);
  1391. };
  1392. struct COLR_accelerator_t : COLR::accelerator_t {
  1393. COLR_accelerator_t (hb_face_t *face) : COLR::accelerator_t (face) {}
  1394. };
  1395. } /* namespace OT */
  1396. #endif /* HB_OT_COLOR_COLR_TABLE_HH */