Browse Source

Merge pull request #84080 from bruvzg/hb822

HarfBuzz: Update to version 8.2.2
Rémi Verschelde 1 year ago
parent
commit
78ac4b9d96
76 changed files with 4569 additions and 2424 deletions
  1. 1 1
      thirdparty/README.md
  2. 59 7
      thirdparty/harfbuzz/src/OT/Color/COLR/COLR.hh
  3. 66 16
      thirdparty/harfbuzz/src/OT/Layout/GDEF/GDEF.hh
  4. 18 5
      thirdparty/harfbuzz/src/OT/Layout/GPOS/AnchorFormat3.hh
  5. 3 2
      thirdparty/harfbuzz/src/OT/Layout/GPOS/AnchorMatrix.hh
  6. 7 6
      thirdparty/harfbuzz/src/OT/Layout/GPOS/CursivePosFormat1.hh
  7. 7 6
      thirdparty/harfbuzz/src/OT/Layout/GPOS/LigatureArray.hh
  8. 3 3
      thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkArray.hh
  9. 7 8
      thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkBasePosFormat1.hh
  10. 10 9
      thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkLigPosFormat1.hh
  11. 7 5
      thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkMarkPosFormat1.hh
  12. 5 6
      thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkRecord.hh
  13. 1 1
      thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh
  14. 95 2
      thirdparty/harfbuzz/src/OT/glyf/Glyph.hh
  15. 23 4
      thirdparty/harfbuzz/src/graph/graph.hh
  16. 1 1
      thirdparty/harfbuzz/src/graph/gsubgpos-context.hh
  17. 15 4
      thirdparty/harfbuzz/src/graph/gsubgpos-graph.hh
  18. 5 3
      thirdparty/harfbuzz/src/graph/markbasepos-graph.hh
  19. 35 37
      thirdparty/harfbuzz/src/hb-aat-layout-common.hh
  20. 4 0
      thirdparty/harfbuzz/src/hb-algs.hh
  21. 1 1
      thirdparty/harfbuzz/src/hb-bimap.hh
  22. 3 0
      thirdparty/harfbuzz/src/hb-bit-page.hh
  23. 2 2
      thirdparty/harfbuzz/src/hb-bit-set.hh
  24. 4 4
      thirdparty/harfbuzz/src/hb-buffer-deserialize-json.hh
  25. 5 5
      thirdparty/harfbuzz/src/hb-buffer-deserialize-text-glyphs.hh
  26. 5 5
      thirdparty/harfbuzz/src/hb-buffer-deserialize-text-unicode.hh
  27. 2 2
      thirdparty/harfbuzz/src/hb-buffer.cc
  28. 1 1
      thirdparty/harfbuzz/src/hb-buffer.h
  29. 27 0
      thirdparty/harfbuzz/src/hb-cairo.cc
  30. 1 1
      thirdparty/harfbuzz/src/hb-common.cc
  31. 17 2
      thirdparty/harfbuzz/src/hb-debug.hh
  32. 1 1
      thirdparty/harfbuzz/src/hb-font.cc
  33. 35 1
      thirdparty/harfbuzz/src/hb-ft-colr.hh
  34. 1 1
      thirdparty/harfbuzz/src/hb-limits.hh
  35. 1 1
      thirdparty/harfbuzz/src/hb-map.cc
  36. 17 12
      thirdparty/harfbuzz/src/hb-map.hh
  37. 1 1
      thirdparty/harfbuzz/src/hb-null.hh
  38. 4 4
      thirdparty/harfbuzz/src/hb-number-parser.hh
  39. 373 30
      thirdparty/harfbuzz/src/hb-ot-layout-common.hh
  40. 7 2
      thirdparty/harfbuzz/src/hb-ot-layout.cc
  41. 9 14
      thirdparty/harfbuzz/src/hb-ot-os2-table.hh
  42. 5 5
      thirdparty/harfbuzz/src/hb-ot-post-table.hh
  43. 1 1
      thirdparty/harfbuzz/src/hb-ot-shape.cc
  44. 4 4
      thirdparty/harfbuzz/src/hb-ot-shaper-arabic-joining-list.hh
  45. 4 4
      thirdparty/harfbuzz/src/hb-ot-shaper-arabic-table.hh
  46. 25 6
      thirdparty/harfbuzz/src/hb-ot-shaper-arabic.cc
  47. 7 7
      thirdparty/harfbuzz/src/hb-ot-shaper-indic-machine.hh
  48. 6 6
      thirdparty/harfbuzz/src/hb-ot-shaper-indic-table.cc
  49. 7 7
      thirdparty/harfbuzz/src/hb-ot-shaper-khmer-machine.hh
  50. 7 7
      thirdparty/harfbuzz/src/hb-ot-shaper-myanmar-machine.hh
  51. 784 660
      thirdparty/harfbuzz/src/hb-ot-shaper-use-machine.hh
  52. 487 473
      thirdparty/harfbuzz/src/hb-ot-shaper-use-table.hh
  53. 2 2
      thirdparty/harfbuzz/src/hb-ot-shaper-vowel-constraints.cc
  54. 10 7
      thirdparty/harfbuzz/src/hb-ot-tag-table.hh
  55. 776 31
      thirdparty/harfbuzz/src/hb-ot-var-common.hh
  56. 10 18
      thirdparty/harfbuzz/src/hb-ot-var-cvar-table.hh
  57. 331 26
      thirdparty/harfbuzz/src/hb-ot-var-gvar-table.hh
  58. 73 4
      thirdparty/harfbuzz/src/hb-ot-var-hvar-table.hh
  59. 55 1
      thirdparty/harfbuzz/src/hb-ot-var-mvar-table.hh
  60. 25 0
      thirdparty/harfbuzz/src/hb-paint.cc
  61. 42 0
      thirdparty/harfbuzz/src/hb-paint.h
  62. 8 0
      thirdparty/harfbuzz/src/hb-paint.hh
  63. 3 2
      thirdparty/harfbuzz/src/hb-priority-queue.hh
  64. 15 5
      thirdparty/harfbuzz/src/hb-repacker.hh
  65. 1 1
      thirdparty/harfbuzz/src/hb-sanitize.hh
  66. 12 12
      thirdparty/harfbuzz/src/hb-set.cc
  67. 13 8
      thirdparty/harfbuzz/src/hb-subset-input.cc
  68. 6 1
      thirdparty/harfbuzz/src/hb-subset-plan-member-list.hh
  69. 39 20
      thirdparty/harfbuzz/src/hb-subset-plan.cc
  70. 34 0
      thirdparty/harfbuzz/src/hb-subset-plan.hh
  71. 13 0
      thirdparty/harfbuzz/src/hb-subset.cc
  72. 2 1
      thirdparty/harfbuzz/src/hb-subset.h
  73. 863 861
      thirdparty/harfbuzz/src/hb-ucd-table.hh
  74. 3 3
      thirdparty/harfbuzz/src/hb-unicode-emoji-table.hh
  75. 4 22
      thirdparty/harfbuzz/src/hb-vector.hh
  76. 3 3
      thirdparty/harfbuzz/src/hb-version.h

+ 1 - 1
thirdparty/README.md

@@ -339,7 +339,7 @@ Files extracted from upstream source:
 ## harfbuzz
 
 - Upstream: https://github.com/harfbuzz/harfbuzz
-- Version: 8.1.1 (1d665c2b521512cdd56964138fc601debd1f1177, 2023)
+- Version: 8.2.2 (18a6e78549e8e04a281129ea8ca784ce85f111b8, 2023)
 - License: MIT
 
 Files extracted from upstream source:

+ 59 - 7
thirdparty/harfbuzz/src/OT/Color/COLR/COLR.hh

@@ -53,6 +53,7 @@ struct Paint;
 struct hb_paint_context_t :
        hb_dispatch_context_t<hb_paint_context_t>
 {
+  const char *get_name () { return "PAINT"; }
   template <typename T>
   return_t dispatch (const T &obj) { obj.paint_glyph (this); return hb_empty_t (); }
   static return_t default_return_value () { return hb_empty_t (); }
@@ -68,6 +69,8 @@ public:
   unsigned int palette_index;
   hb_color_t foreground;
   VarStoreInstancer &instancer;
+  hb_map_t current_glyphs;
+  hb_map_t current_layers;
   int depth_left = HB_MAX_NESTING_LEVEL;
   int edge_count = HB_COLRV1_MAX_EDGE_COUNT;
 
@@ -261,6 +264,7 @@ struct Variable
 
   void paint_glyph (hb_paint_context_t *c) const
   {
+    TRACE_PAINT (this);
     value.paint_glyph (c, varIdxBase);
   }
 
@@ -281,7 +285,7 @@ struct Variable
   public:
   VarIdx varIdxBase;
   public:
-  DEFINE_SIZE_STATIC (4 + T::static_size);
+  DEFINE_SIZE_MIN (VarIdx::static_size + T::min_size);
 };
 
 template <typename T>
@@ -315,6 +319,7 @@ struct NoVariable
 
   void paint_glyph (hb_paint_context_t *c) const
   {
+    TRACE_PAINT (this);
     value.paint_glyph (c, varIdxBase);
   }
 
@@ -332,7 +337,7 @@ struct NoVariable
 
   T      value;
   public:
-  DEFINE_SIZE_STATIC (T::static_size);
+  DEFINE_SIZE_MIN (T::min_size);
 };
 
 // Color structures
@@ -558,6 +563,7 @@ struct Affine2x3
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     c->funcs->push_transform (c->data,
 			      xx.to_float (c->instancer (varIdxBase, 0)),
 			      yx.to_float (c->instancer (varIdxBase, 1)),
@@ -639,6 +645,7 @@ struct PaintSolid
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     hb_bool_t is_foreground;
     hb_color_t color;
 
@@ -693,6 +700,7 @@ struct PaintLinearGradient
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     hb_color_line_t cl = {
       (void *) &(this+colorLine),
       (this+colorLine).static_get_color_stops, c,
@@ -759,6 +767,7 @@ struct PaintRadialGradient
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     hb_color_line_t cl = {
       (void *) &(this+colorLine),
       (this+colorLine).static_get_color_stops, c,
@@ -823,6 +832,7 @@ struct PaintSweepGradient
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     hb_color_line_t cl = {
       (void *) &(this+colorLine),
       (this+colorLine).static_get_color_stops, c,
@@ -874,6 +884,7 @@ struct PaintGlyph
 
   void paint_glyph (hb_paint_context_t *c) const
   {
+    TRACE_PAINT (this);
     c->funcs->push_inverse_root_transform (c->data, c->font);
     c->funcs->push_clip_glyph (c->data, gid, c->font);
     c->funcs->push_root_transform (c->data, c->font);
@@ -946,6 +957,7 @@ struct PaintTransform
 
   void paint_glyph (hb_paint_context_t *c) const
   {
+    TRACE_PAINT (this);
     (this+transform).paint_glyph (c);
     c->recurse (this+src);
     c->funcs->pop_transform (c->data);
@@ -990,6 +1002,7 @@ struct PaintTranslate
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     float ddx = dx + c->instancer (varIdxBase, 0);
     float ddy = dy + c->instancer (varIdxBase, 1);
 
@@ -1038,6 +1051,7 @@ struct PaintScale
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     float sx = scaleX.to_float (c->instancer (varIdxBase, 0));
     float sy = scaleY.to_float (c->instancer (varIdxBase, 1));
 
@@ -1088,6 +1102,7 @@ struct PaintScaleAroundCenter
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     float sx = scaleX.to_float (c->instancer (varIdxBase, 0));
     float sy = scaleY.to_float (c->instancer (varIdxBase, 1));
     float tCenterX = centerX + c->instancer (varIdxBase, 2);
@@ -1141,6 +1156,7 @@ struct PaintScaleUniform
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     float s = scale.to_float (c->instancer (varIdxBase, 0));
 
     bool p1 = c->funcs->push_scale (c->data, s, s);
@@ -1188,6 +1204,7 @@ struct PaintScaleUniformAroundCenter
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     float s = scale.to_float (c->instancer (varIdxBase, 0));
     float tCenterX = centerX + c->instancer (varIdxBase, 1);
     float tCenterY = centerY + c->instancer (varIdxBase, 2);
@@ -1239,6 +1256,7 @@ struct PaintRotate
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     float a = angle.to_float (c->instancer (varIdxBase, 0));
 
     bool p1 = c->funcs->push_rotate (c->data, a);
@@ -1286,6 +1304,7 @@ struct PaintRotateAroundCenter
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     float a = angle.to_float (c->instancer (varIdxBase, 0));
     float tCenterX = centerX + c->instancer (varIdxBase, 1);
     float tCenterY = centerY + c->instancer (varIdxBase, 2);
@@ -1340,6 +1359,7 @@ struct PaintSkew
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     float sx = xSkewAngle.to_float(c->instancer (varIdxBase, 0));
     float sy = ySkewAngle.to_float(c->instancer (varIdxBase, 1));
 
@@ -1390,6 +1410,7 @@ struct PaintSkewAroundCenter
 
   void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
   {
+    TRACE_PAINT (this);
     float sx = xSkewAngle.to_float(c->instancer (varIdxBase, 0));
     float sy = ySkewAngle.to_float(c->instancer (varIdxBase, 1));
     float tCenterX = centerX + c->instancer (varIdxBase, 2);
@@ -1425,8 +1446,10 @@ struct PaintComposite
     auto *out = c->serializer->embed (this);
     if (unlikely (!out)) return_trace (false);
 
-    if (!out->src.serialize_subset (c, src, this, instancer)) return_trace (false);
-    return_trace (out->backdrop.serialize_subset (c, backdrop, this, instancer));
+    bool ret = false;
+    ret |= out->src.serialize_subset (c, src, this, instancer);
+    ret |= out->backdrop.serialize_subset (c, backdrop, this, instancer);
+    return_trace (ret);
   }
 
   bool sanitize (hb_sanitize_context_t *c) const
@@ -1440,6 +1463,7 @@ struct PaintComposite
 
   void paint_glyph (hb_paint_context_t *c) const
   {
+    TRACE_PAINT (this);
     c->recurse (this+backdrop);
     c->funcs->push_group (c->data);
     c->recurse (this+src);
@@ -1898,15 +1922,16 @@ struct LayerList : Array32OfOffset32To<Paint>
     auto *out = c->serializer->start_embed (this);
     if (unlikely (!c->serializer->extend_min (out)))  return_trace (false);
 
+    bool ret = false;
     for (const auto& _ : + hb_enumerate (*this)
                          | hb_filter (c->plan->colrv1_layers, hb_first))
 
     {
       auto *o = out->serialize_append (c->serializer);
-      if (unlikely (!o) || !o->serialize_subset (c, _.second, this, instancer))
-        return_trace (false);
+      if (unlikely (!o)) return_trace (false);
+      ret |= o->serialize_subset (c, _.second, this, instancer);
     }
-    return_trace (true);
+    return_trace (ret);
   }
 
   bool sanitize (hb_sanitize_context_t *c) const
@@ -2284,6 +2309,7 @@ struct COLR
 	                         &(this+varIdxMap),
 	                         hb_array (font->coords, font->num_coords));
     hb_paint_context_t c (this, funcs, data, font, palette_index, foreground, instancer);
+    c.current_glyphs.add (glyph);
 
     if (version == 1)
     {
@@ -2399,18 +2425,42 @@ hb_paint_context_t::recurse (const Paint &paint)
 
 void PaintColrLayers::paint_glyph (hb_paint_context_t *c) const
 {
+  TRACE_PAINT (this);
   const LayerList &paint_offset_lists = c->get_colr_table ()->get_layerList ();
   for (unsigned i = firstLayerIndex; i < firstLayerIndex + numLayers; i++)
   {
+    if (unlikely (c->current_layers.has (i)))
+      continue;
+
+    c->current_layers.add (i);
+
     const Paint &paint = paint_offset_lists.get_paint (i);
     c->funcs->push_group (c->data);
     c->recurse (paint);
     c->funcs->pop_group (c->data, HB_PAINT_COMPOSITE_MODE_SRC_OVER);
+
+    c->current_layers.del (i);
   }
 }
 
 void PaintColrGlyph::paint_glyph (hb_paint_context_t *c) const
 {
+  TRACE_PAINT (this);
+
+  if (unlikely (c->current_glyphs.has (gid)))
+    return;
+
+  c->current_glyphs.add (gid);
+
+  c->funcs->push_inverse_root_transform (c->data, c->font);
+  if (c->funcs->color_glyph (c->data, gid, c->font))
+  {
+    c->funcs->pop_transform (c->data);
+    c->current_glyphs.del (gid);
+    return;
+  }
+  c->funcs->pop_transform (c->data);
+
   const COLR *colr_table = c->get_colr_table ();
   const Paint *paint = colr_table->get_base_glyph_paint (gid);
 
@@ -2429,6 +2479,8 @@ void PaintColrGlyph::paint_glyph (hb_paint_context_t *c) const
 
   if (has_clip_box)
     c->funcs->pop_clip (c->data);
+
+  c->current_glyphs.del (gid);
 }
 
 } /* namespace OT */

+ 66 - 16
thirdparty/harfbuzz/src/OT/Layout/GDEF/GDEF.hh

@@ -29,7 +29,7 @@
 #ifndef OT_LAYOUT_GDEF_GDEF_HH
 #define OT_LAYOUT_GDEF_GDEF_HH
 
-#include "../../../hb-ot-layout-common.hh"
+#include "../../../hb-ot-var-common.hh"
 
 #include "../../../hb-font.hh"
 #include "../../../hb-cache.hh"
@@ -204,17 +204,19 @@ struct CaretValueFormat3
     if (!c->serializer->embed (coordinate)) return_trace (false);
 
     unsigned varidx = (this+deviceTable).get_variation_index ();
-    if (c->plan->layout_variation_idx_delta_map.has (varidx))
+    hb_pair_t<unsigned, int> *new_varidx_delta;
+    if (!c->plan->layout_variation_idx_delta_map.has (varidx, &new_varidx_delta))
+      return_trace (false);
+
+    uint32_t new_varidx = hb_first (*new_varidx_delta);
+    int delta = hb_second (*new_varidx_delta);
+    if (delta != 0)
     {
-      int delta = hb_second (c->plan->layout_variation_idx_delta_map.get (varidx));
-      if (delta != 0)
-      {
-        if (!c->serializer->check_assign (out->coordinate, coordinate + delta, HB_SERIALIZE_ERROR_INT_OVERFLOW))
-          return_trace (false);
-      }
+      if (!c->serializer->check_assign (out->coordinate, coordinate + delta, HB_SERIALIZE_ERROR_INT_OVERFLOW))
+        return_trace (false);
     }
 
-    if (c->plan->all_axes_pinned)
+    if (new_varidx == HB_OT_LAYOUT_NO_VARIATIONS_INDEX)
       return_trace (c->serializer->check_assign (out->caretValueFormat, 1, HB_SERIALIZE_ERROR_INT_OVERFLOW));
 
     if (!c->serializer->embed (deviceTable))
@@ -602,6 +604,26 @@ struct GDEFVersion1_2
 		  (version.to_int () < 0x00010003u || varStore.sanitize (c, this)));
   }
 
+  static void remap_varidx_after_instantiation (const hb_map_t& varidx_map,
+                                                hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>>& layout_variation_idx_delta_map /* IN/OUT */)
+  {
+    /* varidx_map is empty which means varstore is empty after instantiation,
+     * no variations, map all varidx to HB_OT_LAYOUT_NO_VARIATIONS_INDEX.
+     * varidx_map doesn't have original varidx, indicating delta row is all
+     * zeros, map varidx to HB_OT_LAYOUT_NO_VARIATIONS_INDEX */
+    for (auto _ : layout_variation_idx_delta_map.iter_ref ())
+    {
+      /* old_varidx->(varidx, delta) mapping generated for subsetting, then this
+       * varidx is used as key of varidx_map during instantiation */
+      uint32_t varidx = _.second.first;
+      uint32_t *new_varidx;
+      if (varidx_map.has (varidx, &new_varidx))
+        _.second.first = *new_varidx;
+      else
+        _.second.first = HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
+    }
+  }
+
   bool subset (hb_subset_context_t *c) const
   {
     TRACE_SUBSET (this);
@@ -624,6 +646,22 @@ struct GDEFVersion1_2
     {
       if (c->plan->all_axes_pinned)
         out->varStore = 0;
+      else if (c->plan->normalized_coords)
+      {
+        if (varStore)
+        {
+          item_variations_t item_vars;
+          if (item_vars.instantiate (this+varStore, c->plan, true, true,
+                                     c->plan->gdef_varstore_inner_maps.as_array ()))
+            subset_varstore = out->varStore.serialize_serialize (c->serializer,
+                                                                 item_vars.has_long_word (),
+                                                                 c->plan->axis_tags,
+                                                                 item_vars.get_region_list (),
+                                                                 item_vars.get_vardata_encodings ());
+          remap_varidx_after_instantiation (item_vars.get_varidx_map (),
+                                            c->plan->layout_variation_idx_delta_map);
+        }
+      }
       else
         subset_varstore = out->varStore.serialize_subset (c, varStore, this, c->plan->gdef_varstore_inner_maps.as_array ());
     }
@@ -922,17 +960,32 @@ struct GDEF
   { get_lig_caret_list ().collect_variation_indices (c); }
 
   void remap_layout_variation_indices (const hb_set_t *layout_variation_indices,
+				       const hb_vector_t<int>& normalized_coords,
+				       bool calculate_delta, /* not pinned at default */
+				       bool no_variations, /* all axes pinned */
 				       hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map /* OUT */) const
   {
     if (!has_var_store ()) return;
-    if (layout_variation_indices->is_empty ()) return;
-
+    const VariationStore &var_store = get_var_store ();
+    float *store_cache = var_store.create_cache ();
+    
     unsigned new_major = 0, new_minor = 0;
     unsigned last_major = (layout_variation_indices->get_min ()) >> 16;
     for (unsigned idx : layout_variation_indices->iter ())
     {
+      int delta = 0;
+      if (calculate_delta)
+        delta = roundf (var_store.get_delta (idx, normalized_coords.arrayZ,
+                                             normalized_coords.length, store_cache));
+
+      if (no_variations)
+      {
+        layout_variation_idx_delta_map->set (idx, hb_pair_t<unsigned, int> (HB_OT_LAYOUT_NO_VARIATIONS_INDEX, delta));
+        continue;
+      }
+
       uint16_t major = idx >> 16;
-      if (major >= get_var_store ().get_sub_table_count ()) break;
+      if (major >= var_store.get_sub_table_count ()) break;
       if (major != last_major)
       {
 	new_minor = 0;
@@ -940,14 +993,11 @@ struct GDEF
       }
 
       unsigned new_idx = (new_major << 16) + new_minor;
-      if (!layout_variation_idx_delta_map->has (idx))
-        continue;
-      int delta = hb_second (layout_variation_idx_delta_map->get (idx));
-
       layout_variation_idx_delta_map->set (idx, hb_pair_t<unsigned, int> (new_idx, delta));
       ++new_minor;
       last_major = major;
     }
+    var_store.destroy_cache (store_cache);
   }
 
   protected:

+ 18 - 5
thirdparty/harfbuzz/src/OT/Layout/GPOS/AnchorFormat3.hh

@@ -52,9 +52,14 @@ struct AnchorFormat3
     if (unlikely (!c->serializer->embed (yCoordinate))) return_trace (false);
 
     unsigned x_varidx = xDeviceTable ? (this+xDeviceTable).get_variation_index () : HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
-    if (c->plan->layout_variation_idx_delta_map.has (x_varidx))
+    if (x_varidx != HB_OT_LAYOUT_NO_VARIATIONS_INDEX)
     {
-      int delta = hb_second (c->plan->layout_variation_idx_delta_map.get (x_varidx));
+      hb_pair_t<unsigned, int> *new_varidx_delta;
+      if (!c->plan->layout_variation_idx_delta_map.has (x_varidx, &new_varidx_delta))
+        return_trace (false);
+     
+      x_varidx = hb_first (*new_varidx_delta);
+      int delta = hb_second (*new_varidx_delta);
       if (delta != 0)
       {
         if (!c->serializer->check_assign (out->xCoordinate, xCoordinate + delta,
@@ -64,9 +69,14 @@ struct AnchorFormat3
     }
 
     unsigned y_varidx = yDeviceTable ? (this+yDeviceTable).get_variation_index () : HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
-    if (c->plan->layout_variation_idx_delta_map.has (y_varidx))
+    if (y_varidx != HB_OT_LAYOUT_NO_VARIATIONS_INDEX)
     {
-      int delta = hb_second (c->plan->layout_variation_idx_delta_map.get (y_varidx));
+      hb_pair_t<unsigned, int> *new_varidx_delta;
+      if (!c->plan->layout_variation_idx_delta_map.has (y_varidx, &new_varidx_delta))
+        return_trace (false);
+
+      y_varidx = hb_first (*new_varidx_delta);
+      int delta = hb_second (*new_varidx_delta);
       if (delta != 0)
       {
         if (!c->serializer->check_assign (out->yCoordinate, yCoordinate + delta,
@@ -75,7 +85,10 @@ struct AnchorFormat3
       }
     }
 
-    if (c->plan->all_axes_pinned)
+    /* in case that all axes are pinned or no variations after instantiation,
+     * both var_idxes will be mapped to HB_OT_LAYOUT_NO_VARIATIONS_INDEX */
+    if (x_varidx == HB_OT_LAYOUT_NO_VARIATIONS_INDEX &&
+        y_varidx == HB_OT_LAYOUT_NO_VARIATIONS_INDEX)
       return_trace (c->serializer->check_assign (out->format, 1, HB_SERIALIZE_ERROR_INT_OVERFLOW));
 
     if (!c->serializer->embed (xDeviceTable)) return_trace (false);

+ 3 - 2
thirdparty/harfbuzz/src/OT/Layout/GPOS/AnchorMatrix.hh

@@ -65,14 +65,15 @@ struct AnchorMatrix
     if (unlikely (!c->serializer->extend_min (out)))  return_trace (false);
 
     out->rows = num_rows;
+    bool ret = false;
     for (const unsigned i : index_iter)
     {
       auto *offset = c->serializer->embed (matrixZ[i]);
       if (!offset) return_trace (false);
-      offset->serialize_subset (c, matrixZ[i], this);
+      ret |= offset->serialize_subset (c, matrixZ[i], this);
     }
 
-    return_trace (true);
+    return_trace (ret);
   }
 };
 

+ 7 - 6
thirdparty/harfbuzz/src/OT/Layout/GPOS/CursivePosFormat1.hh

@@ -24,16 +24,17 @@ struct EntryExitRecord
     (src_base+exitAnchor).collect_variation_indices (c);
   }
 
-  EntryExitRecord* subset (hb_subset_context_t *c,
-                           const void *src_base) const
+  bool subset (hb_subset_context_t *c,
+	       const void *src_base) const
   {
     TRACE_SERIALIZE (this);
     auto *out = c->serializer->embed (this);
-    if (unlikely (!out)) return_trace (nullptr);
+    if (unlikely (!out)) return_trace (false);
 
-    out->entryAnchor.serialize_subset (c, entryAnchor, src_base);
-    out->exitAnchor.serialize_subset (c, exitAnchor, src_base);
-    return_trace (out);
+    bool ret = false;
+    ret |= out->entryAnchor.serialize_subset (c, entryAnchor, src_base);
+    ret |= out->exitAnchor.serialize_subset (c, exitAnchor, src_base);
+    return_trace (ret);
   }
 
   protected:

+ 7 - 6
thirdparty/harfbuzz/src/OT/Layout/GPOS/LigatureArray.hh

@@ -27,6 +27,7 @@ struct LigatureArray : List16OfOffset16To<LigatureAttach>
     auto *out = c->serializer->start_embed (this);
     if (unlikely (!c->serializer->extend_min (out)))  return_trace (false);
 
+    bool ret = false;
     for (const auto _ : + hb_zip (coverage, *this)
                   | hb_filter (glyphset, hb_first))
     {
@@ -38,13 +39,13 @@ struct LigatureArray : List16OfOffset16To<LigatureAttach>
           + hb_range (src.rows * class_count)
           | hb_filter ([=] (unsigned index) { return klass_mapping->has (index % class_count); })
           ;
-      matrix->serialize_subset (c,
-                                _.second,
-                                this,
-                                src.rows,
-                                indexes);
+      ret |= matrix->serialize_subset (c,
+				       _.second,
+				       this,
+				       src.rows,
+				       indexes);
     }
-    return_trace (this->len);
+    return_trace (ret);
   }
 };
 

+ 3 - 3
thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkArray.hh

@@ -82,10 +82,10 @@ struct MarkArray : Array16Of<MarkRecord>        /* Array of MarkRecords--in Cove
     | hb_map (hb_second)
     ;
 
+    bool ret = false;
     unsigned new_length = 0;
     for (const auto& mark_record : mark_iter) {
-      if (unlikely (!mark_record.subset (c, this, klass_mapping)))
-        return_trace (false);
+      ret |= mark_record.subset (c, this, klass_mapping);
       new_length++;
     }
 
@@ -93,7 +93,7 @@ struct MarkArray : Array16Of<MarkRecord>        /* Array of MarkRecords--in Cove
                                                 HB_SERIALIZE_ERROR_ARRAY_OVERFLOW)))
       return_trace (false);
 
-    return_trace (true);
+    return_trace (ret);
   }
 };
 

+ 7 - 8
thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkBasePosFormat1.hh

@@ -197,9 +197,10 @@ struct MarkBasePosFormat1_2
     if (!out->markCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
       return_trace (false);
 
-    out->markArray.serialize_subset (c, markArray, this,
-                                     (this+markCoverage).iter (),
-                                     &klass_mapping);
+    if (unlikely (!out->markArray.serialize_subset (c, markArray, this,
+						    (this+markCoverage).iter (),
+						    &klass_mapping)))
+      return_trace (false);
 
     unsigned basecount = (this+baseArray).rows;
     auto base_iter =
@@ -228,11 +229,9 @@ struct MarkBasePosFormat1_2
       ;
     }
 
-    out->baseArray.serialize_subset (c, baseArray, this,
-                                     base_iter.len (),
-                                     base_indexes.iter ());
-
-    return_trace (true);
+    return_trace (out->baseArray.serialize_subset (c, baseArray, this,
+						   base_iter.len (),
+						   base_indexes.iter ()));
   }
 };
 

+ 10 - 9
thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkLigPosFormat1.hh

@@ -169,7 +169,7 @@ struct MarkLigPosFormat1_2
   {
     TRACE_SUBSET (this);
     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
-    const hb_map_t &glyph_map = *c->plan->glyph_map;
+    const hb_map_t &glyph_map = c->plan->glyph_map_gsub;
 
     auto *out = c->serializer->start_embed (*this);
     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
@@ -195,23 +195,24 @@ struct MarkLigPosFormat1_2
     if (!out->markCoverage.serialize_serialize (c->serializer, new_mark_coverage))
       return_trace (false);
 
-    out->markArray.serialize_subset (c, markArray, this,
-                                     (this+markCoverage).iter (),
-                                     &klass_mapping);
+    if (unlikely (!out->markArray.serialize_subset (c, markArray, this,
+						    (this+markCoverage).iter (),
+						    &klass_mapping)))
+      return_trace (false);
 
     auto new_ligature_coverage =
     + hb_iter (this + ligatureCoverage)
-    | hb_filter (glyphset)
+    | hb_take ((this + ligatureArray).len)
     | hb_map_retains_sorting (glyph_map)
+    | hb_filter ([] (hb_codepoint_t glyph) { return glyph != HB_MAP_VALUE_INVALID; })
     ;
 
     if (!out->ligatureCoverage.serialize_serialize (c->serializer, new_ligature_coverage))
       return_trace (false);
 
-    out->ligatureArray.serialize_subset (c, ligatureArray, this,
-                                         hb_iter (this+ligatureCoverage), classCount, &klass_mapping);
-
-    return_trace (true);
+    return_trace (out->ligatureArray.serialize_subset (c, ligatureArray, this,
+						       hb_iter (this+ligatureCoverage),
+						       classCount, &klass_mapping));
   }
 
 };

+ 7 - 5
thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkMarkPosFormat1.hh

@@ -183,9 +183,10 @@ struct MarkMarkPosFormat1_2
     if (!out->mark1Coverage.serialize_serialize (c->serializer, new_coverage.iter ()))
       return_trace (false);
 
-    out->mark1Array.serialize_subset (c, mark1Array, this,
-                                      (this+mark1Coverage).iter (),
-                                      &klass_mapping);
+    if (unlikely (!out->mark1Array.serialize_subset (c, mark1Array, this,
+						     (this+mark1Coverage).iter (),
+						     &klass_mapping)))
+      return_trace (false);
 
     unsigned mark2count = (this+mark2Array).rows;
     auto mark2_iter =
@@ -214,9 +215,10 @@ struct MarkMarkPosFormat1_2
       ;
     }
 
-    out->mark2Array.serialize_subset (c, mark2Array, this, mark2_iter.len (), mark2_indexes.iter ());
+    return_trace (out->mark2Array.serialize_subset (c, mark2Array, this,
+						    mark2_iter.len (),
+						    mark2_indexes.iter ()));
 
-    return_trace (true);
   }
 };
 

+ 5 - 6
thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkRecord.hh

@@ -24,17 +24,16 @@ struct MarkRecord
     return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
   }
 
-  MarkRecord *subset (hb_subset_context_t    *c,
-                      const void             *src_base,
-                      const hb_map_t         *klass_mapping) const
+  bool subset (hb_subset_context_t    *c,
+	       const void             *src_base,
+	       const hb_map_t         *klass_mapping) const
   {
     TRACE_SUBSET (this);
     auto *out = c->serializer->embed (this);
-    if (unlikely (!out)) return_trace (nullptr);
+    if (unlikely (!out)) return_trace (false);
 
     out->klass = klass_mapping->get (klass);
-    out->markAnchor.serialize_subset (c, markAnchor, src_base);
-    return_trace (out);
+    return_trace (out->markAnchor.serialize_subset (c, markAnchor, src_base));
   }
 
   void collect_variation_indices (hb_collect_variation_indices_context_t *c,

+ 1 - 1
thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh

@@ -163,7 +163,7 @@ struct PairPosFormat2_4
 
 
     /* Isolate simple kerning and apply it half to each side.
-     * Results in better cursor positinoing / underline drawing.
+     * Results in better cursor positioning / underline drawing.
      *
      * Disabled, because causes issues... :-(
      * https://github.com/harfbuzz/harfbuzz/issues/3408

+ 95 - 2
thirdparty/harfbuzz/src/OT/glyf/Glyph.hh

@@ -103,6 +103,63 @@ struct Glyph
     }
   }
 
+  bool get_all_points_without_var (const hb_face_t *face,
+                                   contour_point_vector_t &points /* OUT */) const
+  {
+    switch (type) {
+    case SIMPLE:
+      if (unlikely (!SimpleGlyph (*header, bytes).get_contour_points (points)))
+        return false;
+      break;
+    case COMPOSITE:
+    {
+      for (auto &item : get_composite_iterator ())
+        if (unlikely (!item.get_points (points))) return false;
+      break;
+    }
+#ifndef HB_NO_VAR_COMPOSITES
+    case VAR_COMPOSITE:
+    {
+      for (auto &item : get_var_composite_iterator ())
+        if (unlikely (!item.get_points (points))) return false;
+      break;
+    }
+#endif
+    case EMPTY:
+      break;
+    }
+
+    /* Init phantom points */
+    if (unlikely (!points.resize (points.length + PHANTOM_COUNT))) return false;
+    hb_array_t<contour_point_t> phantoms = points.as_array ().sub_array (points.length - PHANTOM_COUNT, PHANTOM_COUNT);
+    {
+      int lsb = 0;
+      int h_delta = face->table.hmtx->get_leading_bearing_without_var_unscaled (gid, &lsb) ?
+                    (int) header->xMin - lsb : 0;
+      HB_UNUSED int tsb = 0;
+      int v_orig  = (int) header->yMax +
+#ifndef HB_NO_VERTICAL
+                    ((void) face->table.vmtx->get_leading_bearing_without_var_unscaled (gid, &tsb), tsb)
+#else
+                    0
+#endif
+                    ;
+      unsigned h_adv = face->table.hmtx->get_advance_without_var_unscaled (gid);
+      unsigned v_adv =
+#ifndef HB_NO_VERTICAL
+                       face->table.vmtx->get_advance_without_var_unscaled (gid)
+#else
+                       - face->get_upem ()
+#endif
+                       ;
+      phantoms[PHANTOM_LEFT].x = h_delta;
+      phantoms[PHANTOM_RIGHT].x = (int) h_adv + h_delta;
+      phantoms[PHANTOM_TOP].y = v_orig;
+      phantoms[PHANTOM_BOTTOM].y = v_orig - (int) v_adv;
+    }
+    return true;
+  }
+
   void update_mtx (const hb_subset_plan_t *plan,
                    int xMin, int xMax,
                    int yMin, int yMax,
@@ -293,6 +350,7 @@ struct Glyph
 		   bool use_my_metrics = true,
 		   bool phantom_only = false,
 		   hb_array_t<int> coords = hb_array_t<int> (),
+		   hb_map_t *current_glyphs = nullptr,
 		   unsigned int depth = 0,
 		   unsigned *edge_count = nullptr) const
   {
@@ -302,6 +360,10 @@ struct Glyph
     if (unlikely (*edge_count > HB_GLYF_MAX_EDGE_COUNT)) return false;
     (*edge_count)++;
 
+    hb_map_t current_glyphs_stack;
+    if (current_glyphs == nullptr)
+      current_glyphs = &current_glyphs_stack;
+
     if (head_maxp_info)
     {
       head_maxp_info->maxComponentDepth = hb_max (head_maxp_info->maxComponentDepth, depth);
@@ -396,10 +458,17 @@ struct Glyph
       unsigned int comp_index = 0;
       for (auto &item : get_composite_iterator ())
       {
+	hb_codepoint_t item_gid = item.get_gid ();
+
+        if (unlikely (current_glyphs->has (item_gid)))
+	  continue;
+
+	current_glyphs->add (item_gid);
+
 	unsigned old_count = all_points.length;
 
 	if (unlikely ((!phantom_only || (use_my_metrics && item.is_use_my_metrics ())) &&
-		      !glyf_accelerator.glyph_for_gid (item.get_gid ())
+		      !glyf_accelerator.glyph_for_gid (item_gid)
 				       .get_points (font,
 						    glyf_accelerator,
 						    all_points,
@@ -410,9 +479,13 @@ struct Glyph
 						    use_my_metrics,
 						    phantom_only,
 						    coords,
+						    current_glyphs,
 						    depth + 1,
 						    edge_count)))
+	{
+	  current_glyphs->del (item_gid);
 	  return false;
+	}
 
 	auto comp_points = all_points.as_array ().sub_array (old_count);
 
@@ -448,9 +521,13 @@ struct Glyph
 	all_points.resize (all_points.length - PHANTOM_COUNT);
 
 	if (all_points.length > HB_GLYF_MAX_POINTS)
+	{
+	  current_glyphs->del (item_gid);
 	  return false;
+	}
 
 	comp_index++;
+        current_glyphs->del (item_gid);
       }
 
       if (head_maxp_info && depth == 0)
@@ -468,6 +545,13 @@ struct Glyph
       hb_array_t<contour_point_t> points_left = points.as_array ();
       for (auto &item : get_var_composite_iterator ())
       {
+	hb_codepoint_t item_gid = item.get_gid ();
+
+        if (unlikely (current_glyphs->has (item_gid)))
+	  continue;
+
+	current_glyphs->add (item_gid);
+
 	unsigned item_num_points = item.get_num_points ();
 	hb_array_t<contour_point_t> record_points = points_left.sub_array (0, item_num_points);
 	assert (record_points.length == item_num_points);
@@ -485,7 +569,7 @@ struct Glyph
 	unsigned old_count = all_points.length;
 
 	if (unlikely ((!phantom_only || (use_my_metrics && item.is_use_my_metrics ())) &&
-		      !glyf_accelerator.glyph_for_gid (item.get_gid ())
+		      !glyf_accelerator.glyph_for_gid (item_gid)
 				       .get_points (font,
 						    glyf_accelerator,
 						    all_points,
@@ -496,9 +580,13 @@ struct Glyph
 						    use_my_metrics,
 						    phantom_only,
 						    coord_setter.get_coords (),
+						    current_glyphs,
 						    depth + 1,
 						    edge_count)))
+	{
+	  current_glyphs->del (item_gid);
 	  return false;
+	}
 
 	auto comp_points = all_points.as_array ().sub_array (old_count);
 
@@ -514,9 +602,14 @@ struct Glyph
 	all_points.resize (all_points.length - PHANTOM_COUNT);
 
 	if (all_points.length > HB_GLYF_MAX_POINTS)
+	{
+	  current_glyphs->del (item_gid);
 	  return false;
+	}
 
 	points_left += item_num_points;
+
+        current_glyphs->del (item_gid);
       }
       all_points.extend (phantoms);
     } break;

+ 23 - 4
thirdparty/harfbuzz/src/graph/graph.hh

@@ -290,7 +290,7 @@ struct graph_t
 	new_parents.set (id_map[_.first], _.second);
       }
 
-      if (new_parents.in_error ())
+      if (parents.in_error() || new_parents.in_error ())
         return false;
 
       parents = std::move (new_parents);
@@ -310,8 +310,15 @@ struct graph_t
       if (parents.has (old_index, &pv))
       {
         unsigned v = *pv;
-	parents.set (new_index, v);
+	if (!parents.set (new_index, v))
+          incoming_edges_ -= v;
 	parents.del (old_index);
+
+        if (incoming_edges_ == 1)
+	{
+	  single_parent = *parents.keys ();
+	  parents.reset ();
+	}
       }
     }
 
@@ -467,6 +474,18 @@ struct graph_t
     return root ().equals (other.root (), *this, other, 0);
   }
 
+  void print () const {
+    for (int i = vertices_.length - 1; i >= 0; i--)
+    {
+      const auto& v = vertices_[i];
+      printf("%d: %u [", i, (unsigned int)v.table_size());
+      for (const auto &l : v.obj.real_links) {
+        printf("%u, ", l.objidx);
+      }
+      printf("]\n");
+    }
+  }
+
   // Sorts links of all objects in a consistent manner and zeroes all offsets.
   void normalize ()
   {
@@ -547,7 +566,7 @@ struct graph_t
 
     update_distances ();
 
-    hb_priority_queue_t queue;
+    hb_priority_queue_t<int64_t> queue;
     hb_vector_t<vertex_t> &sorted_graph = vertices_scratch_;
     if (unlikely (!check_success (sorted_graph.resize (vertices_.length)))) return;
     hb_vector_t<unsigned> id_map;
@@ -1350,7 +1369,7 @@ struct graph_t
       vertices_.arrayZ[i].distance = hb_int_max (int64_t);
     vertices_.tail ().distance = 0;
 
-    hb_priority_queue_t queue;
+    hb_priority_queue_t<int64_t> queue;
     queue.insert (0, vertices_.length - 1);
 
     hb_vector_t<bool> visited;

+ 1 - 1
thirdparty/harfbuzz/src/graph/gsubgpos-context.hh

@@ -40,7 +40,7 @@ struct gsubgpos_graph_context_t
   graph_t& graph;
   unsigned lookup_list_index;
   hb_hashmap_t<unsigned, graph::Lookup*> lookups;
-
+  hb_hashmap_t<unsigned, unsigned> subtable_to_extension;
 
   HB_INTERNAL gsubgpos_graph_context_t (hb_tag_t table_tag_,
                                         graph_t& graph_);

+ 15 - 4
thirdparty/harfbuzz/src/graph/gsubgpos-graph.hh

@@ -299,24 +299,35 @@ struct Lookup : public OT::Lookup
                                 unsigned subtable_index)
   {
     unsigned type = lookupType;
+    unsigned ext_index = -1;
+    unsigned* existing_ext_index = nullptr;
+    if (c.subtable_to_extension.has(subtable_index, &existing_ext_index)) {
+      ext_index = *existing_ext_index;
+    } else {    
+      ext_index = create_extension_subtable(c, subtable_index, type);
+      c.subtable_to_extension.set(subtable_index, ext_index);
+    }
 
-    unsigned ext_index = create_extension_subtable(c, subtable_index, type);
     if (ext_index == (unsigned) -1)
       return false;
 
+    auto& subtable_vertex = c.graph.vertices_[subtable_index];
     auto& lookup_vertex = c.graph.vertices_[lookup_index];
     for (auto& l : lookup_vertex.obj.real_links.writer ())
     {
-      if (l.objidx == subtable_index)
+      if (l.objidx == subtable_index) {
         // Change lookup to point at the extension.
         l.objidx = ext_index;
+        if (existing_ext_index)
+          subtable_vertex.remove_parent(lookup_index);
+      }
     }
 
     // Make extension point at the subtable.
     auto& ext_vertex = c.graph.vertices_[ext_index];
-    auto& subtable_vertex = c.graph.vertices_[subtable_index];
     ext_vertex.add_parent (lookup_index);
-    subtable_vertex.remap_parent (lookup_index, ext_index);
+    if (!existing_ext_index)
+      subtable_vertex.remap_parent (lookup_index, ext_index);
 
     return true;
   }

+ 5 - 3
thirdparty/harfbuzz/src/graph/markbasepos-graph.hh

@@ -217,7 +217,7 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
 
     const unsigned base_coverage_id = c.graph.index_for_offset (this_index, &baseCoverage);
     const unsigned base_size =
-        OT::Layout::GPOS_impl::PairPosFormat1_3<SmallTypes>::min_size +
+        OT::Layout::GPOS_impl::MarkBasePosFormat1_2<SmallTypes>::min_size +
         MarkArray::min_size +
         AnchorMatrix::min_size +
         c.graph.vertices_[base_coverage_id].table_size ();
@@ -318,7 +318,9 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
   {
     hb_vector_t<class_info_t> class_to_info;
 
-    unsigned class_count= classCount;
+    unsigned class_count = classCount;
+    if (!class_count) return class_to_info;
+
     if (!class_to_info.resize (class_count))
       return hb_vector_t<class_info_t>();
 
@@ -482,7 +484,7 @@ struct MarkBasePos : public OT::Layout::GPOS_impl::MarkBasePos
       return ((MarkBasePosFormat1*)(&u.format1))->split_subtables (c, parent_index, this_index);
 #ifndef HB_NO_BEYOND_64K
     case 2: HB_FALLTHROUGH;
-      // Don't split 24bit PairPos's.
+      // Don't split 24bit MarkBasePos's.
 #endif
     default:
       return hb_vector_t<unsigned> ();

+ 35 - 37
thirdparty/harfbuzz/src/hb-aat-layout-common.hh

@@ -851,43 +851,41 @@ struct StateTableDriver
        *
        *   https://github.com/harfbuzz/harfbuzz/issues/2860
        */
-      const EntryT *wouldbe_entry;
-      bool safe_to_break =
-	/* 1. */
-	!c->is_actionable (this, entry)
-      &&
-	/* 2. */
-	(
-	  /* 2a. */
-	  state == StateTableT::STATE_START_OF_TEXT
-	||
-	  /* 2b. */
-	  (
-	    (entry.flags & context_t::DontAdvance) &&
-	    next_state == StateTableT::STATE_START_OF_TEXT
-	  )
-	||
-	  /* 2c. */
-	  (
-	    wouldbe_entry = &machine.get_entry (StateTableT::STATE_START_OF_TEXT, klass)
-	  ,
-	    /* 2c'. */
-	    !c->is_actionable (this, *wouldbe_entry)
-	  &&
-	    /* 2c". */
-	    (
-	      next_state == machine.new_state (wouldbe_entry->newState)
-	    &&
-	      (entry.flags & context_t::DontAdvance) == (wouldbe_entry->flags & context_t::DontAdvance)
-	    )
-	  )
-	)
-      &&
-	/* 3. */
-	!c->is_actionable (this, machine.get_entry (state, StateTableT::CLASS_END_OF_TEXT))
-      ;
-
-      if (!safe_to_break && buffer->backtrack_len () && buffer->idx < buffer->len)
+
+      const auto is_safe_to_break_extra = [&]()
+      {
+          /* 2c. */
+          const auto wouldbe_entry = machine.get_entry(StateTableT::STATE_START_OF_TEXT, klass);
+      
+          /* 2c'. */
+          if (c->is_actionable (this, wouldbe_entry))
+              return false;
+      
+          /* 2c". */
+          return next_state == machine.new_state(wouldbe_entry.newState)
+              && (entry.flags & context_t::DontAdvance) == (wouldbe_entry.flags & context_t::DontAdvance);
+      };
+      
+      const auto is_safe_to_break = [&]()
+      {
+          /* 1. */
+          if (c->is_actionable (this, entry))
+              return false;
+      
+          /* 2. */
+          // This one is meh, I know...
+          const auto ok =
+                 state == StateTableT::STATE_START_OF_TEXT
+              || ((entry.flags & context_t::DontAdvance) && next_state == StateTableT::STATE_START_OF_TEXT)
+              || is_safe_to_break_extra();
+          if (!ok)
+              return false;
+      
+          /* 3. */
+          return !c->is_actionable (this, machine.get_entry (state, StateTableT::CLASS_END_OF_TEXT));
+      };
+
+      if (!is_safe_to_break () && buffer->backtrack_len () && buffer->idx < buffer->len)
 	buffer->unsafe_to_break_from_outbuffer (buffer->backtrack_len () - 1, buffer->idx + 1);
 
       c->transition (this, entry);

+ 4 - 0
thirdparty/harfbuzz/src/hb-algs.hh

@@ -367,6 +367,10 @@ struct
 	    hb_enable_if (std::is_integral<T>::value && sizeof (T) > sizeof (uint32_t))> constexpr auto
   impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, (uint32_t) (v ^ (v >> 32)) * 2654435761u /* Knuth's multiplicative hash */)
 
+  template <typename T,
+	    hb_enable_if (std::is_floating_point<T>::value)> constexpr auto
+  impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, fasthash32 (std::addressof (v), sizeof (T), 0xf437ffe6))
+
   template <typename T> constexpr auto
   impl (const T& v, hb_priority<0>) const HB_RETURN (uint32_t, std::hash<hb_decay<decltype (hb_deref (v))>>{} (hb_deref (v)))
 

+ 1 - 1
thirdparty/harfbuzz/src/hb-bimap.hh

@@ -93,7 +93,7 @@ struct hb_bimap_t
   auto iter () const HB_AUTO_RETURN (+ forw_map.iter())
 };
 
-/* Inremental bimap: only lhs is given, rhs is incrementally assigned */
+/* Incremental bimap: only lhs is given, rhs is incrementally assigned */
 struct hb_inc_bimap_t
 {
   bool in_error () const { return forw_map.in_error () || back_map.in_error (); }

+ 3 - 0
thirdparty/harfbuzz/src/hb-bit-page.hh

@@ -97,6 +97,7 @@ struct hb_bit_page_t
   static inline constexpr unsigned len ()
   { return ARRAY_LENGTH_CONST (v); }
 
+  operator bool () const { return !is_empty (); }
   bool is_empty () const
   {
     if (has_population ()) return !population;
@@ -218,6 +219,7 @@ struct hb_bit_page_t
     return count;
   }
 
+  bool operator == (const hb_bit_page_t &other) const { return is_equal (other); }
   bool is_equal (const hb_bit_page_t &other) const
   {
     for (unsigned i = 0; i < len (); i++)
@@ -225,6 +227,7 @@ struct hb_bit_page_t
 	return false;
     return true;
   }
+  bool operator <= (const hb_bit_page_t &larger_page) const { return is_subset (larger_page); }
   bool is_subset (const hb_bit_page_t &larger_page) const
   {
     if (has_population () && larger_page.has_population () &&

+ 2 - 2
thirdparty/harfbuzz/src/hb-bit-set.hh

@@ -913,7 +913,7 @@ struct hb_bit_set_t
 
     /* The extra page_map length is necessary; can't just rely on vector here,
      * since the next check would be tricked because a null page also has
-     * major==0, which we can't distinguish from an actualy major==0 page... */
+     * major==0, which we can't distinguish from an actually major==0 page... */
     unsigned i = last_page_lookup;
     if (likely (i < page_map.length))
     {
@@ -947,7 +947,7 @@ struct hb_bit_set_t
 
     /* The extra page_map length is necessary; can't just rely on vector here,
      * since the next check would be tricked because a null page also has
-     * major==0, which we can't distinguish from an actualy major==0 page... */
+     * major==0, which we can't distinguish from an actually major==0 page... */
     unsigned i = last_page_lookup;
     if (likely (i < page_map.length))
     {

+ 4 - 4
thirdparty/harfbuzz/src/hb-buffer-deserialize-json.hh

@@ -32,7 +32,7 @@
 #include "hb.hh"
 
 
-#line 33 "hb-buffer-deserialize-json.hh"
+#line 36 "hb-buffer-deserialize-json.hh"
 static const unsigned char _deserialize_json_trans_keys[] = {
 	0u, 0u, 9u, 123u, 9u, 34u, 97u, 117u, 120u, 121u, 34u, 34u, 9u, 58u, 9u, 57u, 
 	48u, 57u, 9u, 125u, 9u, 125u, 9u, 93u, 9u, 125u, 34u, 34u, 9u, 58u, 9u, 57u, 
@@ -555,12 +555,12 @@ _hb_buffer_deserialize_json (hb_buffer_t *buffer,
   hb_glyph_info_t info = {0};
   hb_glyph_position_t pos = {0};
   
-#line 552 "hb-buffer-deserialize-json.hh"
+#line 559 "hb-buffer-deserialize-json.hh"
 	{
 	cs = deserialize_json_start;
 	}
 
-#line 555 "hb-buffer-deserialize-json.hh"
+#line 564 "hb-buffer-deserialize-json.hh"
 	{
 	int _slen;
 	int _trans;
@@ -772,7 +772,7 @@ _resume:
 	*end_ptr = p;
 }
 	break;
-#line 733 "hb-buffer-deserialize-json.hh"
+#line 776 "hb-buffer-deserialize-json.hh"
 	}
 
 _again:

+ 5 - 5
thirdparty/harfbuzz/src/hb-buffer-deserialize-text-glyphs.hh

@@ -32,7 +32,7 @@
 #include "hb.hh"
 
 
-#line 33 "hb-buffer-deserialize-text-glyphs.hh"
+#line 36 "hb-buffer-deserialize-text-glyphs.hh"
 static const unsigned char _deserialize_text_glyphs_trans_keys[] = {
 	0u, 0u, 48u, 57u, 45u, 57u, 48u, 57u, 45u, 57u, 48u, 57u, 48u, 57u, 45u, 57u, 
 	48u, 57u, 44u, 44u, 45u, 57u, 48u, 57u, 44u, 57u, 43u, 124u, 9u, 124u, 9u, 124u, 
@@ -349,12 +349,12 @@ _hb_buffer_deserialize_text_glyphs (hb_buffer_t *buffer,
   hb_glyph_info_t info = {0};
   hb_glyph_position_t pos = {0};
   
-#line 346 "hb-buffer-deserialize-text-glyphs.hh"
+#line 353 "hb-buffer-deserialize-text-glyphs.hh"
 	{
 	cs = deserialize_text_glyphs_start;
 	}
 
-#line 349 "hb-buffer-deserialize-text-glyphs.hh"
+#line 358 "hb-buffer-deserialize-text-glyphs.hh"
 	{
 	int _slen;
 	int _trans;
@@ -550,7 +550,7 @@ _resume:
 	*end_ptr = p;
 }
 	break;
-#line 516 "hb-buffer-deserialize-text-glyphs.hh"
+#line 554 "hb-buffer-deserialize-text-glyphs.hh"
 	}
 
 _again:
@@ -667,7 +667,7 @@ _again:
 	*end_ptr = p;
 }
 	break;
-#line 616 "hb-buffer-deserialize-text-glyphs.hh"
+#line 671 "hb-buffer-deserialize-text-glyphs.hh"
 	}
 	}
 

+ 5 - 5
thirdparty/harfbuzz/src/hb-buffer-deserialize-text-unicode.hh

@@ -32,7 +32,7 @@
 #include "hb.hh"
 
 
-#line 33 "hb-buffer-deserialize-text-unicode.hh"
+#line 36 "hb-buffer-deserialize-text-unicode.hh"
 static const unsigned char _deserialize_text_unicode_trans_keys[] = {
 	0u, 0u, 9u, 117u, 43u, 102u, 48u, 102u, 48u, 57u, 9u, 124u, 9u, 124u, 9u, 124u, 
 	9u, 124u, 0
@@ -197,12 +197,12 @@ _hb_buffer_deserialize_text_unicode (hb_buffer_t *buffer,
   hb_glyph_info_t info = {0};
   const hb_glyph_position_t pos = {0};
   
-#line 194 "hb-buffer-deserialize-text-unicode.hh"
+#line 201 "hb-buffer-deserialize-text-unicode.hh"
 	{
 	cs = deserialize_text_unicode_start;
 	}
 
-#line 197 "hb-buffer-deserialize-text-unicode.hh"
+#line 206 "hb-buffer-deserialize-text-unicode.hh"
 	{
 	int _slen;
 	int _trans;
@@ -269,7 +269,7 @@ _resume:
 	*end_ptr = p;
 }
 	break;
-#line 256 "hb-buffer-deserialize-text-unicode.hh"
+#line 273 "hb-buffer-deserialize-text-unicode.hh"
 	}
 
 _again:
@@ -307,7 +307,7 @@ _again:
 	*end_ptr = p;
 }
 	break;
-#line 289 "hb-buffer-deserialize-text-unicode.hh"
+#line 311 "hb-buffer-deserialize-text-unicode.hh"
 	}
 	}
 

+ 2 - 2
thirdparty/harfbuzz/src/hb-buffer.cc

@@ -1327,7 +1327,7 @@ hb_buffer_get_invisible_glyph (const hb_buffer_t *buffer)
  * Sets the #hb_codepoint_t that replaces characters not found in
  * the font during shaping.
  *
- * The not-found glyph defaults to zero, sometimes knows as the
+ * The not-found glyph defaults to zero, sometimes known as the
  * ".notdef" glyph.  This API allows for differentiating the two.
  *
  * Since: 3.1.0
@@ -2076,7 +2076,7 @@ hb_buffer_t::sort (unsigned int start, unsigned int end, int(*compar)(const hb_g
  * hb_buffer_diff:
  * @buffer: a buffer.
  * @reference: other buffer to compare to.
- * @dottedcircle_glyph: glyph id of U+25CC DOTTED CIRCLE, or (hb_codepont_t) -1.
+ * @dottedcircle_glyph: glyph id of U+25CC DOTTED CIRCLE, or (hb_codepoint_t) -1.
  * @position_fuzz: allowed absolute difference in position values.
  *
  * If dottedcircle_glyph is (hb_codepoint_t) -1 then #HB_BUFFER_DIFF_FLAG_DOTTED_CIRCLE_PRESENT

+ 1 - 1
thirdparty/harfbuzz/src/hb-buffer.h

@@ -99,7 +99,7 @@ typedef struct hb_glyph_info_t {
  * 				   layout, by avoiding re-shaping of each line
  * 				   after line-breaking, by limiting the
  * 				   reshaping to a small piece around the
- * 				   breaking positin only, even if the breaking
+ * 				   breaking position only, even if the breaking
  * 				   position carries the
  * 				   #HB_GLYPH_FLAG_UNSAFE_TO_BREAK or when
  * 				   hyphenation or other text transformation

+ 27 - 0
thirdparty/harfbuzz/src/hb-cairo.cc

@@ -166,6 +166,32 @@ hb_cairo_pop_transform (hb_paint_funcs_t *pfuncs HB_UNUSED,
   cairo_restore (cr);
 }
 
+static hb_bool_t
+hb_cairo_paint_color_glyph (hb_paint_funcs_t *pfuncs HB_UNUSED,
+			    void *paint_data,
+			    hb_codepoint_t glyph,
+			    hb_font_t *font,
+			    void *user_data HB_UNUSED)
+{
+  hb_cairo_context_t *c = (hb_cairo_context_t *) paint_data;
+  cairo_t *cr = c->cr;
+
+  cairo_save (cr);
+
+  hb_position_t x_scale, y_scale;
+  hb_font_get_scale (font, &x_scale, &y_scale);
+  cairo_scale (cr, x_scale, y_scale);
+
+  cairo_glyph_t cairo_glyph = { glyph, 0, 0 };
+  cairo_set_scaled_font (cr, c->scaled_font);
+  cairo_set_font_size (cr, 1);
+  cairo_show_glyphs (cr, &cairo_glyph, 1);
+
+  cairo_restore (cr);
+
+  return true;
+}
+
 static void
 hb_cairo_push_clip_glyph (hb_paint_funcs_t *pfuncs HB_UNUSED,
 			  void *paint_data,
@@ -397,6 +423,7 @@ static struct hb_cairo_paint_funcs_lazy_loader_t : hb_paint_funcs_lazy_loader_t<
 
     hb_paint_funcs_set_push_transform_func (funcs, hb_cairo_push_transform, nullptr, nullptr);
     hb_paint_funcs_set_pop_transform_func (funcs, hb_cairo_pop_transform, nullptr, nullptr);
+    hb_paint_funcs_set_color_glyph_func (funcs, hb_cairo_paint_color_glyph, nullptr, nullptr);
     hb_paint_funcs_set_push_clip_glyph_func (funcs, hb_cairo_push_clip_glyph, nullptr, nullptr);
     hb_paint_funcs_set_push_clip_rectangle_func (funcs, hb_cairo_push_clip_rectangle, nullptr, nullptr);
     hb_paint_funcs_set_pop_clip_func (funcs, hb_cairo_pop_clip, nullptr, nullptr);

+ 1 - 1
thirdparty/harfbuzz/src/hb-common.cc

@@ -815,7 +815,7 @@ parse_tag (const char **pp, const char *end, hb_tag_t *tag)
   }
 
   const char *p = *pp;
-  while (*pp < end && (ISALNUM(**pp) || **pp == '_'))
+  while (*pp < end && (**pp != ' ' && **pp != '=' && **pp != '[' && **pp != quote))
     (*pp)++;
 
   if (p == *pp || *pp - p > 4)

+ 17 - 2
thirdparty/harfbuzz/src/hb-debug.hh

@@ -265,8 +265,9 @@ static inline void _hb_warn_no_return (bool returned)
   }
 }
 template <>
-/*static*/ inline void _hb_warn_no_return<hb_empty_t> (bool returned HB_UNUSED)
-{}
+/*static*/ inline void _hb_warn_no_return<hb_empty_t> (bool returned HB_UNUSED) {}
+template <>
+/*static*/ inline void _hb_warn_no_return<void> (bool returned HB_UNUSED) {}
 
 template <int max_level, typename ret_t>
 struct hb_auto_trace_t
@@ -450,12 +451,26 @@ struct hb_no_trace_t {
 #define HB_DEBUG_SUBSET_REPACK (HB_DEBUG+0)
 #endif
 
+#ifndef HB_DEBUG_PAINT
+#define HB_DEBUG_PAINT (HB_DEBUG+0)
+#endif
+#if HB_DEBUG_PAINT
+#define TRACE_PAINT(this) \
+  HB_UNUSED hb_auto_trace_t<HB_DEBUG_PAINT, void> trace \
+  (&c->debug_depth, c->get_name (), this, HB_FUNC, \
+   " ")
+#else
+#define TRACE_PAINT(this) HB_UNUSED hb_no_trace_t<void> trace
+#endif
+
+
 #ifndef HB_DEBUG_DISPATCH
 #define HB_DEBUG_DISPATCH ( \
 	HB_DEBUG_APPLY + \
 	HB_DEBUG_SANITIZE + \
 	HB_DEBUG_SERIALIZE + \
 	HB_DEBUG_SUBSET + \
+	HB_DEBUG_PAINT + \
 	0)
 #endif
 #if HB_DEBUG_DISPATCH

+ 1 - 1
thirdparty/harfbuzz/src/hb-font.cc

@@ -1066,7 +1066,7 @@ hb_font_get_nominal_glyph (hb_font_t      *font,
  * @glyph_stride: The stride between successive glyph IDs
  *
  * Fetches the nominal glyph IDs for a sequence of Unicode code points. Glyph
- * IDs must be returned in a #hb_codepoint_t output parameter. Stopes at the
+ * IDs must be returned in a #hb_codepoint_t output parameter. Stops at the
  * first unsupported glyph ID.
  *
  * Return value: the number of code points processed

+ 35 - 1
thirdparty/harfbuzz/src/hb-ft-colr.hh

@@ -105,6 +105,8 @@ struct hb_ft_paint_context_t
   FT_Color *palette;
   unsigned palette_index;
   hb_color_t foreground;
+  hb_map_t current_glyphs;
+  hb_map_t current_layers;
   int depth_left = HB_MAX_NESTING_LEVEL;
   int edge_count = HB_COLRV1_MAX_EDGE_COUNT;
 };
@@ -220,9 +222,18 @@ _hb_ft_paint (hb_ft_paint_context_t *c,
 				  &paint.u.colr_layers.layer_iterator,
 				  &other_paint))
       {
+        unsigned i = paint.u.colr_layers.layer_iterator.layer;
+
+	if (unlikely (c->current_layers.has (i)))
+	  continue;
+
+	c->current_layers.add (i);
+
 	c->funcs->push_group (c->data);
 	c->recurse (other_paint);
 	c->funcs->pop_group (c->data, HB_PAINT_COMPOSITE_MODE_SRC_OVER);
+
+	c->current_layers.del (i);
       }
     }
     break;
@@ -320,8 +331,27 @@ _hb_ft_paint (hb_ft_paint_context_t *c,
     break;
     case FT_COLR_PAINTFORMAT_COLR_GLYPH:
     {
+      hb_codepoint_t gid = paint.u.colr_glyph.glyphID;
+
+      if (unlikely (c->current_glyphs.has (gid)))
+	return;
+
+      c->current_glyphs.add (gid);
+
+      c->funcs->push_inverse_root_transform (c->data, c->font);
+      c->ft_font->lock.unlock ();
+      if (c->funcs->color_glyph (c->data, gid, c->font))
+      {
+	c->ft_font->lock.lock ();
+	c->funcs->pop_transform (c->data);
+	c->current_glyphs.del (gid);
+	return;
+      }
+      c->ft_font->lock.lock ();
+      c->funcs->pop_transform (c->data);
+
       FT_OpaquePaint other_paint = {0};
-      if (FT_Get_Color_Glyph_Paint (ft_face, paint.u.colr_glyph.glyphID,
+      if (FT_Get_Color_Glyph_Paint (ft_face, gid,
 				    FT_COLOR_NO_ROOT_TRANSFORM,
 				    &other_paint))
       {
@@ -350,6 +380,8 @@ _hb_ft_paint (hb_ft_paint_context_t *c,
 
         if (has_clip_box)
           c->funcs->pop_clip (c->data);
+
+	c->current_glyphs.del (gid);
       }
     }
     break;
@@ -474,6 +506,7 @@ hb_ft_paint_glyph_colr (hb_font_t *font,
     hb_ft_paint_context_t c (ft_font, font,
 			     paint_funcs, paint_data,
 			     palette, palette_index, foreground);
+    c.current_glyphs.add (gid);
 
     bool is_bounded = true;
     FT_ClipBox clip_box;
@@ -497,6 +530,7 @@ hb_ft_paint_glyph_colr (hb_font_t *font,
       hb_ft_paint_context_t ce (ft_font, font,
 			        extents_funcs, &extents_data,
 			        palette, palette_index, foreground);
+      ce.current_glyphs.add (gid);
       ce.funcs->push_root_transform (ce.data, font);
       ce.recurse (paint);
       ce.funcs->pop_transform (ce.data);

+ 1 - 1
thirdparty/harfbuzz/src/hb-limits.hh

@@ -106,7 +106,7 @@
 #endif
 
 #ifndef HB_COLRV1_MAX_EDGE_COUNT
-#define HB_COLRV1_MAX_EDGE_COUNT 1024
+#define HB_COLRV1_MAX_EDGE_COUNT 65536
 #endif
 
 

+ 1 - 1
thirdparty/harfbuzz/src/hb-map.cc

@@ -365,7 +365,7 @@ hb_map_update (hb_map_t *map,
  * @key: (out): Key retrieved
  * @value: (out): Value retrieved
  *
- * Fetches the next key/value paire in @map.
+ * Fetches the next key/value pair in @map.
  *
  * Set @idx to -1 to get started.
  *

+ 17 - 12
thirdparty/harfbuzz/src/hb-map.hh

@@ -104,12 +104,12 @@ struct hb_hashmap_t
     hb_pair_t<const K &, V &> get_pair_ref() { return hb_pair_t<const K &, V &> (key, value); }
 
     uint32_t total_hash () const
-    { return (hash * 31) + hb_hash (value); }
+    { return (hash * 31u) + hb_hash (value); }
 
-    static constexpr bool is_trivial = std::is_trivially_constructible<K>::value &&
-				       std::is_trivially_destructible<K>::value &&
-				       std::is_trivially_constructible<V>::value &&
-				       std::is_trivially_destructible<V>::value;
+    static constexpr bool is_trivial = hb_is_trivially_constructible(K) &&
+				       hb_is_trivially_destructible(K) &&
+				       hb_is_trivially_constructible(V) &&
+				       hb_is_trivially_destructible(V);
   };
 
   hb_object_header_t header;
@@ -276,6 +276,11 @@ struct hb_hashmap_t
     uint32_t hash = hb_hash (key);
     return set_with_hash (std::move (key), hash, std::forward<VV> (value), overwrite);
   }
+  bool add (const K &key)
+  {
+    uint32_t hash = hb_hash (key);
+    return set_with_hash (key, hash, item_t::default_value ());
+  }
 
   const V& get_with_hash (const K &key, uint32_t hash) const
   {
@@ -393,37 +398,37 @@ struct hb_hashmap_t
 
   auto iter_items () const HB_AUTO_RETURN
   (
-    + hb_iter (items, size ())
+    + hb_iter (items, this->size ())
     | hb_filter (&item_t::is_real)
   )
   auto iter_ref () const HB_AUTO_RETURN
   (
-    + iter_items ()
+    + this->iter_items ()
     | hb_map (&item_t::get_pair_ref)
   )
   auto iter () const HB_AUTO_RETURN
   (
-    + iter_items ()
+    + this->iter_items ()
     | hb_map (&item_t::get_pair)
   )
   auto keys_ref () const HB_AUTO_RETURN
   (
-    + iter_items ()
+    + this->iter_items ()
     | hb_map (&item_t::get_key)
   )
   auto keys () const HB_AUTO_RETURN
   (
-    + keys_ref ()
+    + this->keys_ref ()
     | hb_map (hb_ridentity)
   )
   auto values_ref () const HB_AUTO_RETURN
   (
-    + iter_items ()
+    + this->iter_items ()
     | hb_map (&item_t::get_value)
   )
   auto values () const HB_AUTO_RETURN
   (
-    + values_ref ()
+    + this->values_ref ()
     | hb_map (hb_ridentity)
   )
 

+ 1 - 1
thirdparty/harfbuzz/src/hb-null.hh

@@ -85,7 +85,7 @@ using hb_null_size = _hb_null_size<T, void>;
 template <typename T, typename>
 struct _hb_static_size : hb_integral_constant<unsigned, sizeof (T)> {};
 template <typename T>
-struct _hb_static_size<T, hb_void_t<decltype (T::min_size)>> : hb_integral_constant<unsigned, T::static_size> {};
+struct _hb_static_size<T, hb_void_t<decltype (T::static_size)>> : hb_integral_constant<unsigned, T::static_size> {};
 template <typename T>
 using hb_static_size = _hb_static_size<T, void>;
 #define hb_static_size(T) hb_static_size<T>::value

+ 4 - 4
thirdparty/harfbuzz/src/hb-number-parser.hh

@@ -31,7 +31,7 @@
 #include "hb.hh"
 
 
-#line 32 "hb-number-parser.hh"
+#line 35 "hb-number-parser.hh"
 static const unsigned char _double_parser_trans_keys[] = {
 	0u, 0u, 43u, 57u, 46u, 57u, 48u, 57u, 43u, 57u, 48u, 57u, 48u, 101u, 48u, 57u, 
 	46u, 101u, 0
@@ -135,12 +135,12 @@ strtod_rl (const char *p, const char **end_ptr /* IN/OUT */)
 
   int cs;
   
-#line 132 "hb-number-parser.hh"
+#line 139 "hb-number-parser.hh"
 	{
 	cs = double_parser_start;
 	}
 
-#line 135 "hb-number-parser.hh"
+#line 144 "hb-number-parser.hh"
 	{
 	int _slen;
 	int _trans;
@@ -198,7 +198,7 @@ _resume:
 	  exp_overflow = true;
 }
 	break;
-#line 187 "hb-number-parser.hh"
+#line 202 "hb-number-parser.hh"
 	}
 
 _again:

+ 373 - 30
thirdparty/harfbuzz/src/hb-ot-layout-common.hh

@@ -191,27 +191,15 @@ struct hb_collect_variation_indices_context_t :
   static return_t default_return_value () { return hb_empty_t (); }
 
   hb_set_t *layout_variation_indices;
-  hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map;
-  hb_vector_t<int> *normalized_coords;
-  const VariationStore *var_store;
   const hb_set_t *glyph_set;
   const hb_map_t *gpos_lookups;
-  float *store_cache;
 
   hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
-					  hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map_,
-					  hb_vector_t<int> *normalized_coords_,
-					  const VariationStore *var_store_,
 					  const hb_set_t *glyph_set_,
-					  const hb_map_t *gpos_lookups_,
-					  float *store_cache_) :
+					  const hb_map_t *gpos_lookups_) :
 					layout_variation_indices (layout_variation_indices_),
-					varidx_delta_map (varidx_delta_map_),
-					normalized_coords (normalized_coords_),
-					var_store (var_store_),
 					glyph_set (glyph_set_),
-					gpos_lookups (gpos_lookups_),
-					store_cache (store_cache_) {}
+					gpos_lookups (gpos_lookups_) {}
 };
 
 template<typename OutputArray>
@@ -2300,6 +2288,158 @@ static inline bool ClassDef_serialize (hb_serialize_context_t *c,
  * Item Variation Store
  */
 
+/* ported from fonttools (class _Encoding) */
+struct delta_row_encoding_t
+{
+  /* each byte represents a region, value is one of 0/1/2/4, which means bytes
+   * needed for this region */
+  hb_vector_t<uint8_t> chars;
+  unsigned width = 0;
+  hb_vector_t<uint8_t> columns;
+  unsigned overhead = 0;
+  hb_vector_t<const hb_vector_t<int>*> items;
+
+  delta_row_encoding_t () = default;
+  delta_row_encoding_t (hb_vector_t<uint8_t>&& chars_,
+                        const hb_vector_t<int>* row = nullptr) :
+                        delta_row_encoding_t ()
+
+  {
+    chars = std::move (chars_);
+    width = get_width ();
+    columns = get_columns ();
+    overhead = get_chars_overhead (columns);
+    if (row) items.push (row);
+  }
+
+  bool is_empty () const
+  { return !items; }
+
+  static hb_vector_t<uint8_t> get_row_chars (const hb_vector_t<int>& row)
+  {
+    hb_vector_t<uint8_t> ret;
+    if (!ret.alloc (row.length)) return ret;
+
+    bool long_words = false;
+
+    /* 0/1/2 byte encoding */
+    for (int i = row.length - 1; i >= 0; i--)
+    {
+      int v =  row.arrayZ[i];
+      if (v == 0)
+        ret.push (0);
+      else if (v > 32767 || v < -32768)
+      {
+        long_words = true;
+        break;
+      }
+      else if (v > 127 || v < -128)
+        ret.push (2);
+      else
+        ret.push (1);
+    }
+
+    if (!long_words)
+      return ret;
+
+    /* redo, 0/2/4 bytes encoding */
+    ret.reset ();
+    for (int i = row.length - 1; i >= 0; i--)
+    {
+      int v =  row.arrayZ[i];
+      if (v == 0)
+        ret.push (0);
+      else if (v > 32767 || v < -32768)
+        ret.push (4);
+      else
+        ret.push (2);
+    }
+    return ret;
+  }
+
+  inline unsigned get_width ()
+  {
+    unsigned ret = + hb_iter (chars)
+                   | hb_reduce (hb_add, 0u)
+                   ;
+    return ret;
+  }
+
+  hb_vector_t<uint8_t> get_columns ()
+  {
+    hb_vector_t<uint8_t> cols;
+    cols.alloc (chars.length);
+    for (auto v : chars)
+    {
+      uint8_t flag = v ? 1 : 0;
+      cols.push (flag);
+    }
+    return cols;
+  }
+
+  static inline unsigned get_chars_overhead (const hb_vector_t<uint8_t>& cols)
+  {
+    unsigned c = 4 + 6; // 4 bytes for LOffset, 6 bytes for VarData header
+    unsigned cols_bit_count = 0;
+    for (auto v : cols)
+      if (v) cols_bit_count++;
+    return c + cols_bit_count * 2;
+  }
+
+  unsigned get_gain () const
+  {
+    int count = items.length;
+    return hb_max (0, (int) overhead - count);
+  }
+
+  int gain_from_merging (const delta_row_encoding_t& other_encoding) const
+  {
+    int combined_width = 0;
+    for (unsigned i = 0; i < chars.length; i++)
+      combined_width += hb_max (chars.arrayZ[i], other_encoding.chars.arrayZ[i]);
+   
+    hb_vector_t<uint8_t> combined_columns;
+    combined_columns.alloc (columns.length);
+    for (unsigned i = 0; i < columns.length; i++)
+      combined_columns.push (columns.arrayZ[i] | other_encoding.columns.arrayZ[i]);
+    
+    int combined_overhead = get_chars_overhead (combined_columns);
+    int combined_gain = (int) overhead + (int) other_encoding.overhead - combined_overhead
+                        - (combined_width - (int) width) * items.length
+                        - (combined_width - (int) other_encoding.width) * other_encoding.items.length;
+
+    return combined_gain;
+  }
+
+  static int cmp (const void *pa, const void *pb)
+  {
+    const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa;
+    const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb;
+
+    int gain_a = a->get_gain ();
+    int gain_b = b->get_gain ();
+
+    if (gain_a != gain_b)
+      return gain_a - gain_b;
+
+    return (b->chars).as_array ().cmp ((a->chars).as_array ());
+  }
+
+  static int cmp_width (const void *pa, const void *pb)
+  {
+    const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa;
+    const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb;
+
+    if (a->width != b->width)
+      return (int) a->width - (int) b->width;
+
+    return (b->chars).as_array ().cmp ((a->chars).as_array ());
+  }
+
+  bool add_row (const hb_vector_t<int>* row)
+  { return items.push (row); }
+};
+
 struct VarRegionAxis
 {
   float evaluate (int coord) const
@@ -2334,6 +2474,12 @@ struct VarRegionAxis
      * have to do that at runtime. */
   }
 
+  bool serialize (hb_serialize_context_t *c) const
+  {
+    TRACE_SERIALIZE (this);
+    return_trace (c->embed (this));
+  }
+
   public:
   F2DOT14	startCoord;
   F2DOT14	peakCoord;
@@ -2391,6 +2537,47 @@ struct VarRegionList
     return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount));
   }
 
+  bool serialize (hb_serialize_context_t *c,
+                  const hb_vector_t<hb_tag_t>& axis_tags,
+                  const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& regions)
+  {
+    TRACE_SERIALIZE (this);
+    unsigned axis_count = axis_tags.length;
+    unsigned region_count = regions.length;
+    if (!axis_count || !region_count) return_trace (false);
+    if (unlikely (hb_unsigned_mul_overflows (axis_count * region_count,
+                                             VarRegionAxis::static_size))) return_trace (false);
+    if (unlikely (!c->extend_min (this))) return_trace (false);
+    axisCount = axis_count;
+    regionCount = region_count;
+
+    for (unsigned r = 0; r < region_count; r++)
+    {
+      const auto& region = regions[r];
+      for (unsigned i = 0; i < axis_count; i++)
+      {
+        hb_tag_t tag = axis_tags.arrayZ[i];
+        VarRegionAxis var_region_rec;
+        Triple *coords;
+        if (region->has (tag, &coords))
+        {
+          var_region_rec.startCoord.set_float (coords->minimum);
+          var_region_rec.peakCoord.set_float (coords->middle);
+          var_region_rec.endCoord.set_float (coords->maximum);
+        }
+        else
+        {
+          var_region_rec.startCoord.set_int (0);
+          var_region_rec.peakCoord.set_int (0);
+          var_region_rec.endCoord.set_int (0);
+        }
+        if (!var_region_rec.serialize (c))
+          return_trace (false);
+      }
+    }
+    return_trace (true);
+  }
+
   bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_inc_bimap_t &region_map)
   {
     TRACE_SERIALIZE (this);
@@ -2411,6 +2598,45 @@ struct VarRegionList
     return_trace (true);
   }
 
+  bool get_var_region (unsigned region_index,
+                       const hb_map_t& axes_old_index_tag_map,
+                       hb_hashmap_t<hb_tag_t, Triple>& axis_tuples /* OUT */) const
+  {
+    if (region_index >= regionCount) return false;
+    const VarRegionAxis* axis_region = axesZ.arrayZ + (region_index * axisCount);
+    for (unsigned i = 0; i < axisCount; i++)
+    {
+      hb_tag_t *axis_tag;
+      if (!axes_old_index_tag_map.has (i, &axis_tag))
+        return false;
+
+      float min_val = axis_region->startCoord.to_float ();
+      float def_val = axis_region->peakCoord.to_float ();
+      float max_val = axis_region->endCoord.to_float ();
+
+      if (def_val != 0.f)
+        axis_tuples.set (*axis_tag, Triple (min_val, def_val, max_val));
+      axis_region++;
+    }
+    return !axis_tuples.in_error ();
+  }
+
+  bool get_var_regions (const hb_map_t& axes_old_index_tag_map,
+                        hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions /* OUT */) const
+  {
+    if (!regions.alloc (regionCount))
+      return false;
+
+    for (unsigned i = 0; i < regionCount; i++)
+    {
+      hb_hashmap_t<hb_tag_t, Triple> axis_tuples;
+      if (!get_var_region (i, axes_old_index_tag_map, axis_tuples))
+        return false;
+      regions.push (std::move (axis_tuples));
+    }
+    return !regions.in_error ();
+  }
+
   unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
 
   public:
@@ -2430,6 +2656,9 @@ struct VarData
 
   unsigned int get_region_index_count () const
   { return regionIndices.len; }
+  
+  unsigned get_region_index (unsigned i) const
+  { return i >= regionIndices.len ? -1 : regionIndices[i]; }
 
   unsigned int get_row_size () const
   { return (wordCount () + regionIndices.len) * (longWords () ? 2 : 1); }
@@ -2505,6 +2734,81 @@ struct VarData
 				  get_row_size ()));
   }
 
+  bool serialize (hb_serialize_context_t *c,
+                  bool has_long,
+                  const hb_vector_t<const hb_vector_t<int>*>& rows)
+  {
+    TRACE_SERIALIZE (this);
+    if (unlikely (!c->extend_min (this))) return_trace (false);
+    unsigned row_count = rows.length;
+    itemCount = row_count;
+
+    int min_threshold = has_long ? -65536 : -128;
+    int max_threshold = has_long ? +65535 : +127;
+    enum delta_size_t { kZero=0, kNonWord, kWord };
+    hb_vector_t<delta_size_t> delta_sz;
+    unsigned num_regions = rows[0]->length;
+    if (!delta_sz.resize (num_regions))
+      return_trace (false);
+
+    unsigned word_count = 0;
+    for (unsigned r = 0; r < num_regions; r++)
+    {
+      for (unsigned i = 0; i < row_count; i++)
+      {
+        int delta = rows[i]->arrayZ[r];
+        if (delta < min_threshold || delta > max_threshold)
+        {
+          delta_sz[r] = kWord;
+          word_count++;
+          break;
+        }
+        else if (delta != 0)
+        {
+          delta_sz[r] = kNonWord;
+        }
+      }
+    }
+
+    /* reorder regions: words and then non-words*/
+    unsigned word_index = 0;
+    unsigned non_word_index = word_count;
+    hb_map_t ri_map;
+    for (unsigned r = 0; r < num_regions; r++)
+    {
+      if (!delta_sz[r]) continue;
+      unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
+      if (!ri_map.set (new_r, r))
+        return_trace (false);
+    }
+
+    wordSizeCount = word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
+
+    unsigned ri_count = ri_map.get_population ();
+    regionIndices.len = ri_count;
+    if (unlikely (!c->extend (this))) return_trace (false);
+
+    for (unsigned r = 0; r < ri_count; r++)
+    {
+      hb_codepoint_t *idx;
+      if (!ri_map.has (r, &idx))
+        return_trace (false);
+      regionIndices[r] = *idx;
+    }
+
+    HBUINT8 *delta_bytes = get_delta_bytes ();
+    unsigned row_size = get_row_size ();
+    for (unsigned int i = 0; i < row_count; i++)
+    {
+      for (unsigned int r = 0; r < ri_count; r++)
+      {
+        int delta = rows[i]->arrayZ[ri_map[r]];
+        set_item_delta_fast (i, r, delta, delta_bytes, row_size);
+      }
+    }
+    return_trace (true);
+  }
+
   bool serialize (hb_serialize_context_t *c,
 		  const VarData *src,
 		  const hb_inc_bimap_t &inner_map,
@@ -2625,13 +2929,15 @@ struct VarData
     }
   }
 
-  protected:
+  public:
   const HBUINT8 *get_delta_bytes () const
   { return &StructAfter<HBUINT8> (regionIndices); }
 
+  protected:
   HBUINT8 *get_delta_bytes ()
   { return &StructAfter<HBUINT8> (regionIndices); }
 
+  public:
   int32_t get_item_delta_fast (unsigned int item, unsigned int region,
 			       const HBUINT8 *delta_bytes, unsigned row_size) const
   {
@@ -2662,6 +2968,7 @@ struct VarData
 				 get_row_size ());
   }
 
+  protected:
   void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta,
 			    HBUINT8 *delta_bytes, unsigned row_size)
   {
@@ -2704,6 +3011,7 @@ struct VarData
 
 struct VariationStore
 {
+  friend struct item_variations_t;
   using cache_t = VarRegionList::cache_t;
 
   cache_t *create_cache () const
@@ -2774,6 +3082,36 @@ struct VariationStore
 		  dataSets.sanitize (c, this));
   }
 
+  bool serialize (hb_serialize_context_t *c,
+                  bool has_long,
+                  const hb_vector_t<hb_tag_t>& axis_tags,
+                  const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& region_list,
+                  const hb_vector_t<delta_row_encoding_t>& vardata_encodings)
+  {
+    TRACE_SERIALIZE (this);
+#ifdef HB_NO_VAR
+    return_trace (false);
+#endif
+    if (unlikely (!c->extend_min (this))) return_trace (false);
+    
+    format = 1;
+    if (!regions.serialize_serialize (c, axis_tags, region_list))
+      return_trace (false);
+
+    unsigned num_var_data = vardata_encodings.length;
+    if (!num_var_data) return_trace (false);
+    if (unlikely (!c->check_assign (dataSets.len, num_var_data,
+                                    HB_SERIALIZE_ERROR_INT_OVERFLOW)))
+      return_trace (false);
+
+    if (unlikely (!c->extend (dataSets))) return_trace (false);
+    for (unsigned i = 0; i < num_var_data; i++)
+      if (!dataSets[i].serialize_serialize (c, has_long, vardata_encodings[i].items))
+        return_trace (false);
+    
+    return_trace (true);
+  }
+
   bool serialize (hb_serialize_context_t *c,
 		  const VariationStore *src,
 		  const hb_array_t <const hb_inc_bimap_t> &inner_maps)
@@ -2903,6 +3241,22 @@ struct VariationStore
      return dataSets.len;
    }
 
+  const VarData& get_sub_table (unsigned i) const
+  {
+#ifdef HB_NO_VAR
+     return Null (VarData);
+#endif
+     return this+dataSets[i];
+  }
+
+  const VarRegionList& get_region_list () const
+  {
+#ifdef HB_NO_VAR
+     return Null (VarRegionList);
+#endif
+     return this+regions;
+  }
+
   protected:
   HBUINT16				format;
   Offset32To<VarRegionList>		regions;
@@ -3215,8 +3569,7 @@ struct FeatureTableSubstitutionRecord
     if (unlikely (!out)) return_trace (false);
 
     out->featureIndex = c->feature_index_map->get (featureIndex);
-    bool ret = out->feature.serialize_subset (c->subset_context, feature, base, c);
-    return_trace (ret);
+    return_trace (out->feature.serialize_subset (c->subset_context, feature, base, c));
   }
 
   bool sanitize (hb_sanitize_context_t *c, const void *base) const
@@ -3615,23 +3968,13 @@ struct VariationDevice
     auto *out = c->embed (this);
     if (unlikely (!out)) return_trace (nullptr);
 
-    unsigned new_idx = hb_first (*v);
-    out->varIdx = new_idx;
+    if (!c->check_assign (out->varIdx, hb_first (*v), HB_SERIALIZE_ERROR_INT_OVERFLOW))
+      return_trace (nullptr);
     return_trace (out);
   }
 
   void collect_variation_index (hb_collect_variation_indices_context_t *c) const
-  {
-    c->layout_variation_indices->add (varIdx);
-    int delta = 0;
-    if (c->normalized_coords && c->var_store)
-      delta = roundf (c->var_store->get_delta (varIdx, c->normalized_coords->arrayZ,
-                                               c->normalized_coords->length, c->store_cache));
-
-    /* set new varidx to HB_OT_LAYOUT_NO_VARIATIONS_INDEX here, will remap
-     * varidx later*/
-    c->varidx_delta_map->set (varIdx, hb_pair_t<unsigned, int> (HB_OT_LAYOUT_NO_VARIATIONS_INDEX, delta));
-  }
+  { c->layout_variation_indices->add (varIdx); }
 
   bool sanitize (hb_sanitize_context_t *c) const
   {

+ 7 - 2
thirdparty/harfbuzz/src/hb-ot-layout.cc

@@ -1308,11 +1308,16 @@ hb_ot_layout_collect_features_map (hb_face_t      *face,
   unsigned int count = l.get_feature_indexes (0, nullptr, nullptr);
   feature_map->alloc (count);
 
-  for (unsigned int i = 0; i < count; i++)
+  /* Loop in reverse, such that earlier entries win. That emulates
+   * a linear search, which seems to be what other implementations do.
+   * We found that with arialuni_t.ttf, the "ur" language system has
+   * duplicate features, and the earlier ones work but not later ones.
+   */
+  for (unsigned int i = count; i; i--)
   {
     unsigned feature_index = 0;
     unsigned feature_count = 1;
-    l.get_feature_indexes (i, &feature_count, &feature_index);
+    l.get_feature_indexes (i - 1, &feature_count, &feature_index);
     if (!feature_count)
       break;
     hb_tag_t feature_tag = g.get_feature_tag (feature_index);

+ 9 - 14
thirdparty/harfbuzz/src/hb-ot-os2-table.hh

@@ -246,24 +246,19 @@ struct OS2
     }
 #endif
 
-    if (c->plan->user_axes_location.has (HB_TAG ('w','g','h','t')) &&
-        !c->plan->pinned_at_default)
+    Triple *axis_range;
+    if (c->plan->user_axes_location.has (HB_TAG ('w','g','h','t'), &axis_range))
     {
-      float weight_class = c->plan->user_axes_location.get (HB_TAG ('w','g','h','t')).middle;
-      if (!c->serializer->check_assign (os2_prime->usWeightClass,
-                                        roundf (hb_clamp (weight_class, 1.0f, 1000.0f)),
-                                        HB_SERIALIZE_ERROR_INT_OVERFLOW))
-        return_trace (false);
+      unsigned weight_class = static_cast<unsigned> (roundf (hb_clamp (axis_range->middle, 1.0f, 1000.0f)));
+      if (os2_prime->usWeightClass != weight_class)
+        os2_prime->usWeightClass = weight_class;
     }
 
-    if (c->plan->user_axes_location.has (HB_TAG ('w','d','t','h')) &&
-        !c->plan->pinned_at_default)
+    if (c->plan->user_axes_location.has (HB_TAG ('w','d','t','h'), &axis_range))
     {
-      float width = c->plan->user_axes_location.get (HB_TAG ('w','d','t','h')).middle;
-      if (!c->serializer->check_assign (os2_prime->usWidthClass,
-                                        roundf (map_wdth_to_widthclass (width)),
-                                        HB_SERIALIZE_ERROR_INT_OVERFLOW))
-        return_trace (false);
+      unsigned width_class = static_cast<unsigned> (roundf (map_wdth_to_widthclass (axis_range->middle)));
+      if (os2_prime->usWidthClass != width_class)
+        os2_prime->usWidthClass = width_class;
     }
 
     if (c->plan->flags & HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES)

+ 5 - 5
thirdparty/harfbuzz/src/hb-ot-post-table.hh

@@ -113,12 +113,12 @@ struct post
     }
 #endif
 
-    if (c->plan->user_axes_location.has (HB_TAG ('s','l','n','t')) &&
-        !c->plan->pinned_at_default)
+    Triple *axis_range;
+    if (c->plan->user_axes_location.has (HB_TAG ('s','l','n','t'), &axis_range))
     {
-      float italic_angle = c->plan->user_axes_location.get (HB_TAG ('s','l','n','t')).middle;
-      italic_angle = hb_max (-90.f, hb_min (italic_angle, 90.f));
-      post_prime->italicAngle.set_float (italic_angle);
+      float italic_angle = hb_max (-90.f, hb_min (axis_range->middle, 90.f));
+      if (post_prime->italicAngle.to_float () != italic_angle)
+        post_prime->italicAngle.set_float (italic_angle);
     }
 
     if (glyph_names && version.major == 2)

+ 1 - 1
thirdparty/harfbuzz/src/hb-ot-shape.cc

@@ -1054,7 +1054,7 @@ hb_ot_position_plan (const hb_ot_shape_context_t *c)
    * direction is backward we don't shift and it will end up
    * hanging over the next glyph after the final reordering.
    *
-   * Note: If fallback positinoing happens, we don't care about
+   * Note: If fallback positioning happens, we don't care about
    * this as it will be overridden.
    */
   bool adjust_offsets_when_zeroing = c->plan->adjust_mark_positioning_when_zeroing &&

+ 4 - 4
thirdparty/harfbuzz/src/hb-ot-shaper-arabic-joining-list.hh

@@ -6,10 +6,10 @@
  *
  * on files with these headers:
  *
- * # ArabicShaping-15.0.0.txt
- * # Date: 2022-02-14, 18:50:00 GMT [KW, RP]
- * # Scripts-15.0.0.txt
- * # Date: 2022-04-26, 23:15:02 GMT
+ * # ArabicShaping-15.1.0.txt
+ * # Date: 2023-01-05
+ * # Scripts-15.1.0.txt
+ * # Date: 2023-07-28, 16:01:07 GMT
  */
 
 #ifndef HB_OT_SHAPER_ARABIC_JOINING_LIST_HH

+ 4 - 4
thirdparty/harfbuzz/src/hb-ot-shaper-arabic-table.hh

@@ -6,10 +6,10 @@
  *
  * on files with these headers:
  *
- * # ArabicShaping-15.0.0.txt
- * # Date: 2022-02-14, 18:50:00 GMT [KW, RP]
- * # Blocks-15.0.0.txt
- * # Date: 2022-01-28, 20:58:00 GMT [KW]
+ * # ArabicShaping-15.1.0.txt
+ * # Date: 2023-01-05
+ * # Blocks-15.1.0.txt
+ * # Date: 2023-07-28, 15:47:20 GMT
  * UnicodeData.txt does not have a header.
  */
 

+ 25 - 6
thirdparty/harfbuzz/src/hb-ot-shaper-arabic.cc

@@ -486,8 +486,10 @@ apply_stch (const hb_ot_shape_plan_t *plan HB_UNUSED,
   if (likely (!(buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_ARABIC_HAS_STCH)))
     return;
 
-  /* The Arabic shaper currently always processes in RTL mode, so we should
-   * stretch / position the stretched pieces to the left / preceding glyphs. */
+  bool rtl = buffer->props.direction == HB_DIRECTION_RTL;
+
+  if (!rtl)
+    buffer->reverse ();
 
   /* We do a two pass implementation:
    * First pass calculates the exact number of extra glyphs we need,
@@ -577,7 +579,10 @@ apply_stch (const hb_ot_shape_plan_t *plan HB_UNUSED,
 	++n_copies;
 	hb_position_t excess = (n_copies + 1) * sign * w_repeating - sign * w_remaining;
 	if (excess > 0)
+	{
 	  extra_repeat_overlap = excess / (n_copies * n_repeating);
+	  w_remaining = 0;
+	}
       }
 
       if (step == MEASURE)
@@ -588,7 +593,7 @@ apply_stch (const hb_ot_shape_plan_t *plan HB_UNUSED,
       else
       {
 	buffer->unsafe_to_break (context, end);
-	hb_position_t x_offset = 0;
+	hb_position_t x_offset = w_remaining / 2;
 	for (unsigned int k = end; k > start; k--)
 	{
 	  hb_position_t width = font->get_glyph_h_advance (info[k - 1].codepoint);
@@ -599,16 +604,27 @@ apply_stch (const hb_ot_shape_plan_t *plan HB_UNUSED,
 
 	  DEBUG_MSG (ARABIC, nullptr, "appending %u copies of glyph %u; j=%u",
 		     repeat, info[k - 1].codepoint, j);
+	  pos[k - 1].x_advance = 0;
 	  for (unsigned int n = 0; n < repeat; n++)
 	  {
-	    x_offset -= width;
-	    if (n > 0)
-	      x_offset += extra_repeat_overlap;
+	    if (rtl)
+	    {
+	      x_offset -= width;
+	      if (n > 0)
+		x_offset += extra_repeat_overlap;
+	    }
 	    pos[k - 1].x_offset = x_offset;
 	    /* Append copy. */
 	    --j;
 	    info[j] = info[k - 1];
 	    pos[j] = pos[k - 1];
+
+	    if (!rtl)
+	    {
+	      x_offset += width;
+	      if (n > 0)
+		x_offset -= extra_repeat_overlap;
+	    }
 	  }
 	}
       }
@@ -625,6 +641,9 @@ apply_stch (const hb_ot_shape_plan_t *plan HB_UNUSED,
       buffer->len = new_len;
     }
   }
+
+  if (!rtl)
+    buffer->reverse ();
 }
 
 

+ 7 - 7
thirdparty/harfbuzz/src/hb-ot-shaper-indic-machine.hh

@@ -53,7 +53,7 @@ enum indic_syllable_type_t {
 };
 
 
-#line 54 "hb-ot-shaper-indic-machine.hh"
+#line 57 "hb-ot-shaper-indic-machine.hh"
 #define indic_syllable_machine_ex_A 9u
 #define indic_syllable_machine_ex_C 1u
 #define indic_syllable_machine_ex_CM 16u
@@ -76,7 +76,7 @@ enum indic_syllable_type_t {
 #define indic_syllable_machine_ex_ZWNJ 5u
 
 
-#line 75 "hb-ot-shaper-indic-machine.hh"
+#line 80 "hb-ot-shaper-indic-machine.hh"
 static const unsigned char _indic_syllable_machine_trans_keys[] = {
 	8u, 8u, 4u, 13u, 5u, 13u, 5u, 13u, 13u, 13u, 4u, 13u, 4u, 13u, 4u, 13u, 
 	8u, 8u, 5u, 13u, 5u, 13u, 13u, 13u, 4u, 13u, 4u, 13u, 4u, 13u, 4u, 13u, 
@@ -460,7 +460,7 @@ find_syllables_indic (hb_buffer_t *buffer)
   int cs;
   hb_glyph_info_t *info = buffer->info;
   
-#line 453 "hb-ot-shaper-indic-machine.hh"
+#line 464 "hb-ot-shaper-indic-machine.hh"
 	{
 	cs = indic_syllable_machine_start;
 	ts = 0;
@@ -476,7 +476,7 @@ find_syllables_indic (hb_buffer_t *buffer)
 
   unsigned int syllable_serial = 1;
   
-#line 465 "hb-ot-shaper-indic-machine.hh"
+#line 480 "hb-ot-shaper-indic-machine.hh"
 	{
 	int _slen;
 	int _trans;
@@ -490,7 +490,7 @@ _resume:
 #line 1 "NONE"
 	{ts = p;}
 	break;
-#line 477 "hb-ot-shaper-indic-machine.hh"
+#line 494 "hb-ot-shaper-indic-machine.hh"
 	}
 
 	_keys = _indic_syllable_machine_trans_keys + (cs<<1);
@@ -593,7 +593,7 @@ _eof_trans:
 #line 114 "hb-ot-shaper-indic-machine.rl"
 	{act = 6;}
 	break;
-#line 559 "hb-ot-shaper-indic-machine.hh"
+#line 597 "hb-ot-shaper-indic-machine.hh"
 	}
 
 _again:
@@ -602,7 +602,7 @@ _again:
 #line 1 "NONE"
 	{ts = 0;}
 	break;
-#line 566 "hb-ot-shaper-indic-machine.hh"
+#line 606 "hb-ot-shaper-indic-machine.hh"
 	}
 
 	if ( ++p != pe )

+ 6 - 6
thirdparty/harfbuzz/src/hb-ot-shaper-indic-table.cc

@@ -6,12 +6,12 @@
  *
  * on files with these headers:
  *
- * # IndicSyllabicCategory-15.0.0.txt
- * # Date: 2022-05-26, 02:18:00 GMT [KW, RP]
- * # IndicPositionalCategory-15.0.0.txt
- * # Date: 2022-05-26, 02:18:00 GMT [KW, RP]
- * # Blocks-15.0.0.txt
- * # Date: 2022-01-28, 20:58:00 GMT [KW]
+ * # IndicSyllabicCategory-15.1.0.txt
+ * # Date: 2023-01-05
+ * # IndicPositionalCategory-15.1.0.txt
+ * # Date: 2023-01-05
+ * # Blocks-15.1.0.txt
+ * # Date: 2023-07-28, 15:47:20 GMT
  */
 
 #include "hb.hh"

+ 7 - 7
thirdparty/harfbuzz/src/hb-ot-shaper-khmer-machine.hh

@@ -48,7 +48,7 @@ enum khmer_syllable_type_t {
 };
 
 
-#line 49 "hb-ot-shaper-khmer-machine.hh"
+#line 52 "hb-ot-shaper-khmer-machine.hh"
 #define khmer_syllable_machine_ex_C 1u
 #define khmer_syllable_machine_ex_DOTTEDCIRCLE 11u
 #define khmer_syllable_machine_ex_H 4u
@@ -66,7 +66,7 @@ enum khmer_syllable_type_t {
 #define khmer_syllable_machine_ex_ZWNJ 5u
 
 
-#line 65 "hb-ot-shaper-khmer-machine.hh"
+#line 70 "hb-ot-shaper-khmer-machine.hh"
 static const unsigned char _khmer_syllable_machine_trans_keys[] = {
 	5u, 26u, 5u, 26u, 1u, 15u, 5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 
 	5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 1u, 15u, 5u, 26u, 5u, 26u, 
@@ -294,7 +294,7 @@ find_syllables_khmer (hb_buffer_t *buffer)
   int cs;
   hb_glyph_info_t *info = buffer->info;
   
-#line 287 "hb-ot-shaper-khmer-machine.hh"
+#line 298 "hb-ot-shaper-khmer-machine.hh"
 	{
 	cs = khmer_syllable_machine_start;
 	ts = 0;
@@ -310,7 +310,7 @@ find_syllables_khmer (hb_buffer_t *buffer)
 
   unsigned int syllable_serial = 1;
   
-#line 299 "hb-ot-shaper-khmer-machine.hh"
+#line 314 "hb-ot-shaper-khmer-machine.hh"
 	{
 	int _slen;
 	int _trans;
@@ -324,7 +324,7 @@ _resume:
 #line 1 "NONE"
 	{ts = p;}
 	break;
-#line 311 "hb-ot-shaper-khmer-machine.hh"
+#line 328 "hb-ot-shaper-khmer-machine.hh"
 	}
 
 	_keys = _khmer_syllable_machine_trans_keys + (cs<<1);
@@ -394,7 +394,7 @@ _eof_trans:
 #line 98 "hb-ot-shaper-khmer-machine.rl"
 	{act = 3;}
 	break;
-#line 368 "hb-ot-shaper-khmer-machine.hh"
+#line 398 "hb-ot-shaper-khmer-machine.hh"
 	}
 
 _again:
@@ -403,7 +403,7 @@ _again:
 #line 1 "NONE"
 	{ts = 0;}
 	break;
-#line 375 "hb-ot-shaper-khmer-machine.hh"
+#line 407 "hb-ot-shaper-khmer-machine.hh"
 	}
 
 	if ( ++p != pe )

+ 7 - 7
thirdparty/harfbuzz/src/hb-ot-shaper-myanmar-machine.hh

@@ -50,7 +50,7 @@ enum myanmar_syllable_type_t {
 };
 
 
-#line 51 "hb-ot-shaper-myanmar-machine.hh"
+#line 54 "hb-ot-shaper-myanmar-machine.hh"
 #define myanmar_syllable_machine_ex_A 9u
 #define myanmar_syllable_machine_ex_As 32u
 #define myanmar_syllable_machine_ex_C 1u
@@ -77,7 +77,7 @@ enum myanmar_syllable_type_t {
 #define myanmar_syllable_machine_ex_ZWNJ 5u
 
 
-#line 76 "hb-ot-shaper-myanmar-machine.hh"
+#line 81 "hb-ot-shaper-myanmar-machine.hh"
 static const unsigned char _myanmar_syllable_machine_trans_keys[] = {
 	1u, 41u, 3u, 41u, 5u, 39u, 5u, 8u, 3u, 41u, 3u, 39u, 3u, 39u, 5u, 39u, 
 	5u, 39u, 3u, 39u, 3u, 39u, 3u, 41u, 5u, 39u, 1u, 15u, 3u, 39u, 3u, 39u, 
@@ -443,7 +443,7 @@ find_syllables_myanmar (hb_buffer_t *buffer)
   int cs;
   hb_glyph_info_t *info = buffer->info;
   
-#line 436 "hb-ot-shaper-myanmar-machine.hh"
+#line 447 "hb-ot-shaper-myanmar-machine.hh"
 	{
 	cs = myanmar_syllable_machine_start;
 	ts = 0;
@@ -459,7 +459,7 @@ find_syllables_myanmar (hb_buffer_t *buffer)
 
   unsigned int syllable_serial = 1;
   
-#line 448 "hb-ot-shaper-myanmar-machine.hh"
+#line 463 "hb-ot-shaper-myanmar-machine.hh"
 	{
 	int _slen;
 	int _trans;
@@ -473,7 +473,7 @@ _resume:
 #line 1 "NONE"
 	{ts = p;}
 	break;
-#line 460 "hb-ot-shaper-myanmar-machine.hh"
+#line 477 "hb-ot-shaper-myanmar-machine.hh"
 	}
 
 	_keys = _myanmar_syllable_machine_trans_keys + (cs<<1);
@@ -519,7 +519,7 @@ _eof_trans:
 #line 113 "hb-ot-shaper-myanmar-machine.rl"
 	{te = p;p--;{ found_syllable (myanmar_non_myanmar_cluster); }}
 	break;
-#line 498 "hb-ot-shaper-myanmar-machine.hh"
+#line 523 "hb-ot-shaper-myanmar-machine.hh"
 	}
 
 _again:
@@ -528,7 +528,7 @@ _again:
 #line 1 "NONE"
 	{ts = 0;}
 	break;
-#line 505 "hb-ot-shaper-myanmar-machine.hh"
+#line 532 "hb-ot-shaper-myanmar-machine.hh"
 	}
 
 	if ( ++p != pe )

File diff suppressed because it is too large
+ 784 - 660
thirdparty/harfbuzz/src/hb-ot-shaper-use-machine.hh


File diff suppressed because it is too large
+ 487 - 473
thirdparty/harfbuzz/src/hb-ot-shaper-use-table.hh


+ 2 - 2
thirdparty/harfbuzz/src/hb-ot-shaper-vowel-constraints.cc

@@ -10,8 +10,8 @@
  * # Date: 2015-03-12, 21:17:00 GMT [AG]
  * # Date: 2019-11-08, 23:22:00 GMT [AG]
  *
- * # Scripts-15.0.0.txt
- * # Date: 2022-04-26, 23:15:02 GMT
+ * # Scripts-15.1.0.txt
+ * # Date: 2023-07-28, 16:01:07 GMT
  */
 
 #include "hb.hh"

+ 10 - 7
thirdparty/harfbuzz/src/hb-ot-tag-table.hh

@@ -6,8 +6,8 @@
  *
  * on files with these headers:
  *
- * <meta name="updated_at" content="2022-01-28 10:00 PM" />
- * File-Date: 2022-03-02
+ * <meta name="updated_at" content="2022-09-30 11:47 PM" />
+ * File-Date: 2023-08-02
  */
 
 #ifndef HB_OT_TAG_TABLE_HH
@@ -257,7 +257,7 @@ static const LangTag ot_languages3[] = {
   {HB_TAG('a','i','i',' '),	HB_TAG('S','Y','R',' ')},	/* Assyrian Neo-Aramaic -> Syriac */
 /*{HB_TAG('a','i','o',' '),	HB_TAG('A','I','O',' ')},*/	/* Aiton */
   {HB_TAG('a','i','w',' '),	HB_TAG('A','R','I',' ')},	/* Aari */
-  {HB_TAG('a','j','p',' '),	HB_TAG('A','R','A',' ')},	/* South Levantine Arabic -> Arabic */
+  {HB_TAG('a','j','p',' '),	HB_TAG('A','R','A',' ')},	/* South Levantine Arabic (retired code) -> Arabic */
   {HB_TAG('a','j','t',' '),	HB_TAG('A','R','A',' ')},	/* Judeo-Tunisian Arabic (retired code) -> Arabic */
   {HB_TAG('a','k','b',' '),	HB_TAG('A','K','B',' ')},	/* Batak Angkola */
   {HB_TAG('a','k','b',' '),	HB_TAG('B','T','K',' ')},	/* Batak Angkola -> Batak */
@@ -269,7 +269,7 @@ static const LangTag ot_languages3[] = {
 /*{HB_TAG('a','n','g',' '),	HB_TAG('A','N','G',' ')},*/	/* Old English (ca. 450-1100) -> Anglo-Saxon */
   {HB_TAG('a','o','a',' '),	HB_TAG('C','P','P',' ')},	/* Angolar -> Creoles */
   {HB_TAG('a','p','a',' '),	HB_TAG('A','T','H',' ')},	/* Apache [collection] -> Athapaskan */
-  {HB_TAG('a','p','c',' '),	HB_TAG('A','R','A',' ')},	/* North Levantine Arabic -> Arabic */
+  {HB_TAG('a','p','c',' '),	HB_TAG('A','R','A',' ')},	/* Levantine Arabic -> Arabic */
   {HB_TAG('a','p','d',' '),	HB_TAG('A','R','A',' ')},	/* Sudanese Arabic -> Arabic */
   {HB_TAG('a','p','j',' '),	HB_TAG('A','T','H',' ')},	/* Jicarilla Apache -> Athapaskan */
   {HB_TAG('a','p','k',' '),	HB_TAG('A','T','H',' ')},	/* Kiowa Apache -> Athapaskan */
@@ -1211,6 +1211,7 @@ static const LangTag ot_languages3[] = {
   {HB_TAG('p','p','a',' '),	HB_TAG('B','A','G',' ')},	/* Pao (retired code) -> Baghelkhandi */
   {HB_TAG('p','r','e',' '),	HB_TAG('C','P','P',' ')},	/* Principense -> Creoles */
 /*{HB_TAG('p','r','o',' '),	HB_TAG('P','R','O',' ')},*/	/* Old Provençal (to 1500) -> Provençal / Old Provençal */
+  {HB_TAG('p','r','p',' '),	HB_TAG('G','U','J',' ')},	/* Parsi (retired code) -> Gujarati */
   {HB_TAG('p','r','s',' '),	HB_TAG('D','R','I',' ')},	/* Dari */
   {HB_TAG('p','r','s',' '),	HB_TAG('F','A','R',' ')},	/* Dari -> Persian */
   {HB_TAG('p','s','e',' '),	HB_TAG('M','L','Y',' ')},	/* Central Malay -> Malay */
@@ -1439,7 +1440,7 @@ static const LangTag ot_languages3[] = {
   {HB_TAG('t','c','h',' '),	HB_TAG('C','P','P',' ')},	/* Turks And Caicos Creole English -> Creoles */
   {HB_TAG('t','c','p',' '),	HB_TAG('Q','I','N',' ')},	/* Tawr Chin -> Chin */
   {HB_TAG('t','c','s',' '),	HB_TAG('C','P','P',' ')},	/* Torres Strait Creole -> Creoles */
-  {HB_TAG('t','c','y',' '),	HB_TAG('T','U','L',' ')},	/* Tulu -> Tumbuka */
+  {HB_TAG('t','c','y',' '),	HB_TAG('T','U','L',' ')},	/* Tulu */
   {HB_TAG('t','c','z',' '),	HB_TAG('Q','I','N',' ')},	/* Thado Chin -> Chin */
 /*{HB_TAG('t','d','d',' '),	HB_TAG('T','D','D',' ')},*/	/* Tai Nüa -> Dehong Dai */
   {HB_TAG('t','d','x',' '),	HB_TAG('M','L','G',' ')},	/* Tandroy-Mahafaly Malagasy -> Malagasy */
@@ -1495,8 +1496,8 @@ static const LangTag ot_languages3[] = {
   {HB_TAG('t','t','q',' '),	HB_TAG('T','M','H',' ')},	/* Tawallammat Tamajaq -> Tamashek */
   {HB_TAG('t','t','q',' '),	HB_TAG('B','B','R',' ')},	/* Tawallammat Tamajaq -> Berber */
   {HB_TAG('t','u','a',' '),	HB_TAG_NONE	       },	/* Wiarumus != Turoyo Aramaic */
-  {HB_TAG('t','u','l',' '),	HB_TAG_NONE	       },	/* Tula != Tumbuka */
-/*{HB_TAG('t','u','m',' '),	HB_TAG('T','U','M',' ')},*/	/* Tumbuka -> Tulu */
+  {HB_TAG('t','u','l',' '),	HB_TAG_NONE	       },	/* Tula != Tulu */
+/*{HB_TAG('t','u','m',' '),	HB_TAG('T','U','M',' ')},*/	/* Tumbuka */
   {HB_TAG('t','u','u',' '),	HB_TAG('A','T','H',' ')},	/* Tututni -> Athapaskan */
   {HB_TAG('t','u','v',' '),	HB_TAG_NONE	       },	/* Turkana != Tuvin */
   {HB_TAG('t','u','y',' '),	HB_TAG('K','A','L',' ')},	/* Tugen -> Kalenjin */
@@ -1581,6 +1582,7 @@ static const LangTag ot_languages3[] = {
   {HB_TAG('y','b','a',' '),	HB_TAG_NONE	       },	/* Yala != Yoruba */
   {HB_TAG('y','b','b',' '),	HB_TAG('B','M','L',' ')},	/* Yemba -> Bamileke */
   {HB_TAG('y','b','d',' '),	HB_TAG('A','R','K',' ')},	/* Yangbye (retired code) -> Rakhine */
+  {HB_TAG('y','c','r',' '),	HB_TAG_NONE	       },	/* Yilan Creole != Y-Cree */
   {HB_TAG('y','d','d',' '),	HB_TAG('J','I','I',' ')},	/* Eastern Yiddish -> Yiddish */
 /*{HB_TAG('y','g','p',' '),	HB_TAG('Y','G','P',' ')},*/	/* Gepo */
   {HB_TAG('y','i','h',' '),	HB_TAG('J','I','I',' ')},	/* Western Yiddish -> Yiddish */
@@ -1602,6 +1604,7 @@ static const LangTag ot_languages3[] = {
   {HB_TAG('z','g','n',' '),	HB_TAG('Z','H','A',' ')},	/* Guibian Zhuang -> Zhuang */
   {HB_TAG('z','h','d',' '),	HB_TAG('Z','H','A',' ')},	/* Dai Zhuang -> Zhuang */
   {HB_TAG('z','h','n',' '),	HB_TAG('Z','H','A',' ')},	/* Nong Zhuang -> Zhuang */
+  {HB_TAG('z','k','b',' '),	HB_TAG('K','H','A',' ')},	/* Koibal (retired code) -> Khakass */
   {HB_TAG('z','l','j',' '),	HB_TAG('Z','H','A',' ')},	/* Liujiang Zhuang -> Zhuang */
   {HB_TAG('z','l','m',' '),	HB_TAG('M','L','Y',' ')},	/* Malay */
   {HB_TAG('z','l','n',' '),	HB_TAG('Z','H','A',' ')},	/* Lianshan Zhuang -> Zhuang */

+ 776 - 31
thirdparty/harfbuzz/src/hb-ot-var-common.hh

@@ -27,6 +27,7 @@
 #define HB_OT_VAR_COMMON_HH
 
 #include "hb-ot-layout-common.hh"
+#include "hb-priority-queue.hh"
 
 
 namespace OT {
@@ -230,7 +231,7 @@ struct VarStoreInstancer
   /* according to the spec, if colr table has varStore but does not have
    * varIdxMap, then an implicit identity mapping is used */
   float operator() (uint32_t varIdx, unsigned short offset = 0) const
-  { return varStore->get_delta (varIdxMap ? varIdxMap->map (VarIdx::add (varIdx, offset)) : varIdx + offset, coords); }
+  { return coords ? varStore->get_delta (varIdxMap ? varIdxMap->map (VarIdx::add (varIdx, offset)) : varIdx + offset, coords) : 0; }
 
   const VariationStore *varStore;
   const DeltaSetIndexMap *varIdxMap;
@@ -449,17 +450,26 @@ struct tuple_delta_t
   hb_vector_t<char> compiled_tuple_header;
   hb_vector_t<char> compiled_deltas;
 
+  /* compiled peak coords, empty for non-gvar tuples */
+  hb_vector_t<char> compiled_peak_coords;
+
   tuple_delta_t () = default;
   tuple_delta_t (const tuple_delta_t& o) = default;
 
-  tuple_delta_t (tuple_delta_t&& o) : tuple_delta_t ()
+  friend void swap (tuple_delta_t& a, tuple_delta_t& b)
   {
-    axis_tuples = std::move (o.axis_tuples);
-    indices = std::move (o.indices);
-    deltas_x = std::move (o.deltas_x);
-    deltas_y = std::move (o.deltas_y);
+    hb_swap (a.axis_tuples, b.axis_tuples);
+    hb_swap (a.indices, b.indices);
+    hb_swap (a.deltas_x, b.deltas_x);
+    hb_swap (a.deltas_y, b.deltas_y);
+    hb_swap (a.compiled_tuple_header, b.compiled_tuple_header);
+    hb_swap (a.compiled_deltas, b.compiled_deltas);
+    hb_swap (a.compiled_peak_coords, b.compiled_peak_coords);
   }
 
+  tuple_delta_t (tuple_delta_t&& o) : tuple_delta_t ()
+  { hb_swap (*this, o); }
+
   tuple_delta_t& operator = (tuple_delta_t&& o)
   {
     hb_swap (*this, o);
@@ -552,13 +562,43 @@ struct tuple_delta_t
     return out;
   }
 
+  bool compile_peak_coords (const hb_map_t& axes_index_map,
+                            const hb_map_t& axes_old_index_tag_map)
+  {
+    unsigned axis_count = axes_index_map.get_population ();
+    if (unlikely (!compiled_peak_coords.alloc (axis_count * F2DOT14::static_size)))
+      return false;
+
+    unsigned orig_axis_count = axes_old_index_tag_map.get_population ();
+    for (unsigned i = 0; i < orig_axis_count; i++)
+    {
+      if (!axes_index_map.has (i))
+        continue;
+
+      hb_tag_t axis_tag = axes_old_index_tag_map.get (i);
+      Triple *coords;
+      F2DOT14 peak_coord;
+      if (axis_tuples.has (axis_tag, &coords))
+        peak_coord.set_float (coords->middle);
+      else
+        peak_coord.set_int (0);
+
+      /* push F2DOT14 value into char vector */
+      int16_t val = peak_coord.to_int ();
+      compiled_peak_coords.push (static_cast<char> (val >> 8));
+      compiled_peak_coords.push (static_cast<char> (val & 0xFF));
+    }
+
+    return !compiled_peak_coords.in_error ();
+  }
+
   /* deltas should be compiled already before we compile tuple
    * variation header cause we need to fill in the size of the
    * serialized data for this tuple variation */
-  //TODO(qxliu):add option to use sharedTuples in gvar
   bool compile_tuple_var_header (const hb_map_t& axes_index_map,
                                  unsigned points_data_length,
-                                 const hb_map_t& axes_old_index_tag_map)
+                                 const hb_map_t& axes_old_index_tag_map,
+                                 const hb_hashmap_t<const hb_vector_t<char>*, unsigned>* shared_tuples_idx_map)
   {
     if (!compiled_deltas) return false;
 
@@ -574,14 +614,25 @@ struct tuple_delta_t
     hb_array_t<F2DOT14> coords (p, end - p);
 
     /* encode peak coords */
-    unsigned peak_count = encode_peak_coords(coords, flag, axes_index_map, axes_old_index_tag_map);
-    if (!peak_count) return false;
+    unsigned peak_count = 0;
+    unsigned *shared_tuple_idx;
+    if (shared_tuples_idx_map &&
+        shared_tuples_idx_map->has (&compiled_peak_coords, &shared_tuple_idx))
+    {
+      flag = *shared_tuple_idx;
+    }
+    else
+    {
+      peak_count = encode_peak_coords(coords, flag, axes_index_map, axes_old_index_tag_map);
+      if (!peak_count) return false;
+    }
 
     /* encode interim coords, it's optional so returned num could be 0 */
     unsigned interim_count = encode_interm_coords (coords.sub_array (peak_count), flag, axes_index_map, axes_old_index_tag_map);
 
-    //TODO(qxliu): add option to use shared_points in gvar
-    flag |= TupleVariationHeader::TuppleIndex::PrivatePointNumbers;
+    /* pointdata length = 0 implies "use shared points" */
+    if (points_data_length)
+      flag |= TupleVariationHeader::TuppleIndex::PrivatePointNumbers;
 
     unsigned serialized_data_size = points_data_length + compiled_deltas.length;
     TupleVariationHeader *o = reinterpret_cast<TupleVariationHeader *> (compiled_tuple_header.begin ());
@@ -870,6 +921,111 @@ struct tuple_delta_t
     }
     return encoded_len;
   }
+
+  bool calc_inferred_deltas (const contour_point_vector_t& orig_points)
+  {
+    unsigned point_count = orig_points.length;
+    if (point_count != indices.length)
+      return false;
+
+    unsigned ref_count = 0;
+    hb_vector_t<unsigned> end_points;
+
+    for (unsigned i = 0; i < point_count; i++)
+    {
+      if (indices.arrayZ[i])
+        ref_count++;
+      if (orig_points.arrayZ[i].is_end_point)
+        end_points.push (i);
+    }
+    /* all points are referenced, nothing to do */
+    if (ref_count == point_count)
+      return true;
+    if (unlikely (end_points.in_error ())) return false;
+
+    hb_set_t inferred_idxes;
+    unsigned start_point = 0;
+    for (unsigned end_point : end_points)
+    {
+      /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */
+      unsigned unref_count = 0;
+      for (unsigned i = start_point; i < end_point + 1; i++)
+        unref_count += indices.arrayZ[i];
+      unref_count = (end_point - start_point + 1) - unref_count;
+
+      unsigned j = start_point;
+      if (unref_count == 0 || unref_count > end_point - start_point)
+        goto no_more_gaps;
+      for (;;)
+      {
+        /* Locate the next gap of unreferenced points between two referenced points prev and next.
+         * Note that a gap may wrap around at left (start_point) and/or at right (end_point).
+         */
+        unsigned int prev, next, i;
+        for (;;)
+        {
+          i = j;
+          j = next_index (i, start_point, end_point);
+          if (indices.arrayZ[i] && !indices.arrayZ[j]) break;
+        }
+        prev = j = i;
+        for (;;)
+        {
+          i = j;
+          j = next_index (i, start_point, end_point);
+          if (!indices.arrayZ[i] && indices.arrayZ[j]) break;
+        }
+        next = j;
+       /* Infer deltas for all unref points in the gap between prev and next */
+        i = prev;
+        for (;;)
+        {
+          i = next_index (i, start_point, end_point);
+          if (i == next) break;
+          deltas_x.arrayZ[i] = infer_delta (orig_points.arrayZ[i].x, orig_points.arrayZ[prev].x, orig_points.arrayZ[next].x,
+                                            deltas_x.arrayZ[prev], deltas_x.arrayZ[next]);
+          deltas_y.arrayZ[i] = infer_delta (orig_points.arrayZ[i].y, orig_points.arrayZ[prev].y, orig_points.arrayZ[next].y,
+                                            deltas_y.arrayZ[prev], deltas_y.arrayZ[next]);
+          inferred_idxes.add (i);
+          if (--unref_count == 0) goto no_more_gaps;
+        }
+      }
+    no_more_gaps:
+      start_point = end_point + 1;
+    }
+
+    for (unsigned i = 0; i < point_count; i++)
+    {
+      /* if points are not referenced and deltas are not inferred, set to 0.
+       * reference all points for gvar */
+      if ( !indices[i])
+      {
+        if (!inferred_idxes.has (i))
+        {
+          deltas_x.arrayZ[i] = 0.f;
+          deltas_y.arrayZ[i] = 0.f;
+        }
+        indices[i] = true;
+      }
+    }
+    return true;
+  }
+
+  static float infer_delta (float target_val, float prev_val, float next_val, float prev_delta, float next_delta)
+  {
+    if (prev_val == next_val)
+      return (prev_delta == next_delta) ? prev_delta : 0.f;
+    else if (target_val <= hb_min (prev_val, next_val))
+      return (prev_val < next_val) ? prev_delta : next_delta;
+    else if (target_val >= hb_max (prev_val, next_val))
+      return (prev_val > next_val) ? prev_delta : next_delta;
+
+    float r = (target_val - prev_val) / (next_val - prev_val);
+    return prev_delta + r * (next_delta - prev_delta);
+  }
+
+  static unsigned int next_index (unsigned int i, unsigned int start, unsigned int end)
+  { return (i >= end) ? start : (i + 1); }
 };
 
 struct TupleVariationData
@@ -910,7 +1066,22 @@ struct TupleVariationData
     /* referenced point set-> count map, used in finding shared points */
     hb_hashmap_t<const hb_vector_t<bool>*, unsigned> point_set_count_map;
 
+    /* empty for non-gvar tuples.
+     * shared_points_bytes is just a copy of some value in the point_data_map,
+     * which will be freed during map destruction. Save it for serialization, so
+     * no need to do find_shared_points () again */
+    hb_bytes_t shared_points_bytes;
+
+    /* total compiled byte size as TupleVariationData format, initialized to its
+     * min_size: 4 */
+    unsigned compiled_byte_size = 4;
+
     public:
+    tuple_variations_t () = default;
+    tuple_variations_t (const tuple_variations_t&) = delete;
+    tuple_variations_t& operator=(const tuple_variations_t&) = delete;
+    tuple_variations_t (tuple_variations_t&&) = default;
+    tuple_variations_t& operator=(tuple_variations_t&&) = default;
     ~tuple_variations_t () { fini (); }
     void fini ()
     {
@@ -921,8 +1092,17 @@ struct TupleVariationData
       tuple_vars.fini ();
     }
 
+    explicit operator bool () const { return bool (tuple_vars); }
     unsigned get_var_count () const
-    { return tuple_vars.length; }
+    {
+      unsigned count = tuple_vars.length;
+      if (shared_points_bytes.length)
+        count |= TupleVarCount::SharedPointNumbers;
+      return count;
+    }
+
+    unsigned get_compiled_byte_size () const
+    { return compiled_byte_size; }
 
     bool create_from_tuple_var_data (tuple_iterator_t iterator,
                                      unsigned tuple_var_count,
@@ -992,13 +1172,71 @@ struct TupleVariationData
       return true;
     }
 
-    void change_tuple_variations_axis_limits (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
+    bool create_from_item_var_data (const VarData &var_data,
+                                    const hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions,
+                                    const hb_map_t& axes_old_index_tag_map,
+                                    const hb_inc_bimap_t* inner_map = nullptr)
+    {
+      /* NULL offset, to keep original varidx valid, just return */
+      if (&var_data == &Null (VarData))
+        return true;
+  
+      unsigned num_regions = var_data.get_region_index_count ();
+      if (!tuple_vars.alloc (num_regions)) return false;
+  
+      unsigned item_count = inner_map ? inner_map->get_population () : var_data.get_item_count ();
+      unsigned row_size = var_data.get_row_size ();
+      const HBUINT8 *delta_bytes = var_data.get_delta_bytes ();
+  
+      for (unsigned r = 0; r < num_regions; r++)
+      {
+        /* In VarData, deltas are organized in rows, convert them into
+         * column(region) based tuples, resize deltas_x first */
+        tuple_delta_t tuple;
+        if (!tuple.deltas_x.resize (item_count, false) ||
+            !tuple.indices.resize (item_count, false))
+          return false;
+  
+        for (unsigned i = 0; i < item_count; i++)
+        {
+          tuple.indices.arrayZ[i] = true;
+          tuple.deltas_x.arrayZ[i] = var_data.get_item_delta_fast (inner_map ? inner_map->backward (i) : i,
+                                                                   r, delta_bytes, row_size);
+        }
+  
+        unsigned region_index = var_data.get_region_index (r);
+        if (region_index >= regions.length) return false;
+        tuple.axis_tuples = regions.arrayZ[region_index];
+
+        tuple_vars.push (std::move (tuple));
+      }
+      return !tuple_vars.in_error ();
+    }
+
+    private:
+    static int _cmp_axis_tag (const void *pa, const void *pb)
+    {
+      const hb_tag_t *a = (const hb_tag_t*) pa;
+      const hb_tag_t *b = (const hb_tag_t*) pb;
+      return (int)(*a) - (int)(*b);
+    }
+
+    bool change_tuple_variations_axis_limits (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
                                               const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances)
     {
-      for (auto _ : normalized_axes_location)
+      /* sort axis_tag/axis_limits, make result deterministic */
+      hb_vector_t<hb_tag_t> axis_tags;
+      if (!axis_tags.alloc (normalized_axes_location.get_population ()))
+        return false;
+      for (auto t : normalized_axes_location.keys ())
+        axis_tags.push (t);
+
+      axis_tags.qsort (_cmp_axis_tag);
+      for (auto axis_tag : axis_tags)
       {
-        hb_tag_t axis_tag = _.first;
-        Triple axis_limit = _.second;
+        Triple *axis_limit;
+        if (!normalized_axes_location.has (axis_tag, &axis_limit))
+          return false;
         TripleDistances axis_triple_distances{1.f, 1.f};
         if (axes_triple_distances.has (axis_tag))
           axis_triple_distances = axes_triple_distances.get (axis_tag);
@@ -1006,12 +1244,13 @@ struct TupleVariationData
         hb_vector_t<tuple_delta_t> new_vars;
         for (const tuple_delta_t& var : tuple_vars)
         {
-          hb_vector_t<tuple_delta_t> out = var.change_tuple_var_axis_limit (axis_tag, axis_limit, axis_triple_distances);
+          hb_vector_t<tuple_delta_t> out = var.change_tuple_var_axis_limit (axis_tag, *axis_limit, axis_triple_distances);
           if (!out) continue;
+
           unsigned new_len = new_vars.length + out.length;
 
           if (unlikely (!new_vars.alloc (new_len, false)))
-          { fini (); return;}
+          { fini (); return false;}
 
           for (unsigned i = 0; i < out.length; i++)
             new_vars.push (std::move (out[i]));
@@ -1019,13 +1258,14 @@ struct TupleVariationData
         tuple_vars.fini ();
         tuple_vars = std::move (new_vars);
       }
+      return true;
     }
 
     /* merge tuple variations with overlapping tents */
     void merge_tuple_variations ()
     {
       hb_vector_t<tuple_delta_t> new_vars;
-      hb_hashmap_t<hb_hashmap_t<hb_tag_t, Triple>, unsigned> m;
+      hb_hashmap_t<const hb_hashmap_t<hb_tag_t, Triple>*, unsigned> m;
       unsigned i = 0;
       for (const tuple_delta_t& var : tuple_vars)
       {
@@ -1033,14 +1273,14 @@ struct TupleVariationData
         if (var.axis_tuples.is_empty ()) continue;
 
         unsigned *idx;
-        if (m.has (var.axis_tuples, &idx))
+        if (m.has (&(var.axis_tuples), &idx))
         {
           new_vars[*idx] += var;
         }
         else
         {
           new_vars.push (var);
-          m.set (var.axis_tuples, i);
+          m.set (&(var.axis_tuples), i);
           i++;
         }
       }
@@ -1187,19 +1427,46 @@ struct TupleVariationData
       return res;
     }
 
-    void instantiate (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
-                      const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances)
+    bool calc_inferred_deltas (contour_point_vector_t& contour_points)
+    {
+      for (tuple_delta_t& var : tuple_vars)
+        if (!var.calc_inferred_deltas (contour_points))
+          return false;
+      
+      return true;
+    }
+
+    public:
+    bool instantiate (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
+                      const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances,
+                      contour_point_vector_t* contour_points = nullptr)
     {
-      change_tuple_variations_axis_limits (normalized_axes_location, axes_triple_distances);
+      if (!tuple_vars) return true;
+      if (!change_tuple_variations_axis_limits (normalized_axes_location, axes_triple_distances))
+        return false;
+      /* compute inferred deltas only for gvar */
+      if (contour_points)
+        if (!calc_inferred_deltas (*contour_points))
+          return false;
+
       merge_tuple_variations ();
+      return !tuple_vars.in_error ();
     }
 
     bool compile_bytes (const hb_map_t& axes_index_map,
-                        const hb_map_t& axes_old_index_tag_map)
+                        const hb_map_t& axes_old_index_tag_map,
+                        bool use_shared_points,
+                        const hb_hashmap_t<const hb_vector_t<char>*, unsigned>* shared_tuples_idx_map = nullptr)
     {
       // compile points set and store data in hashmap
       if (!compile_all_point_sets ())
         return false;
+
+      if (use_shared_points)
+      {
+        shared_points_bytes = find_shared_points ();
+        compiled_byte_size += shared_points_bytes.length;
+      }
       // compile delta and tuple var header for each tuple variation
       for (auto& tuple: tuple_vars)
       {
@@ -1211,8 +1478,11 @@ struct TupleVariationData
         if (!tuple.compile_deltas ())
           return false;
 
-        if (!tuple.compile_tuple_var_header (axes_index_map, points_data->length, axes_old_index_tag_map))
+        unsigned points_data_length = (*points_data != shared_points_bytes) ? points_data->length : 0;
+        if (!tuple.compile_tuple_var_header (axes_index_map, points_data_length, axes_old_index_tag_map,
+                                             shared_tuples_idx_map))
           return false;
+        compiled_byte_size += tuple.compiled_tuple_header.length + points_data_length + tuple.compiled_deltas.length;
       }
       return true;
     }
@@ -1229,9 +1499,12 @@ struct TupleVariationData
       return_trace (true);
     }
 
-    bool serialize_var_data (hb_serialize_context_t *c) const
+    bool serialize_var_data (hb_serialize_context_t *c, bool is_gvar) const
     {
       TRACE_SERIALIZE (this);
+      if (is_gvar)
+        shared_points_bytes.copy (c);
+
       for (const auto& tuple: tuple_vars)
       {
         const hb_vector_t<bool>* points_set = &(tuple.indices);
@@ -1239,10 +1512,20 @@ struct TupleVariationData
         if (!point_data_map.has (points_set, &point_data))
           return_trace (false);
 
-        point_data->copy (c);
+        if (!is_gvar || *point_data != shared_points_bytes)
+          point_data->copy (c);
+
         tuple.compiled_deltas.as_array ().copy (c);
         if (c->in_error ()) return_trace (false);
       }
+
+      /* padding for gvar */
+      if (is_gvar && (compiled_byte_size % 2))
+      {
+        HBUINT8 pad;
+        pad = 0;
+        if (!c->embed (pad)) return_trace (false);
+      }
       return_trace (true);
     }
   };
@@ -1428,9 +1711,12 @@ struct TupleVariationData
 
   bool serialize (hb_serialize_context_t *c,
                   bool is_gvar,
-                  tuple_variations_t& tuple_variations) const
+                  const tuple_variations_t& tuple_variations) const
   {
     TRACE_SERIALIZE (this);
+    /* empty tuple variations, just return and skip serialization. */
+    if (!tuple_variations) return_trace (true);
+
     auto *out = c->start_embed (this);
     if (unlikely (!c->extend_min (out))) return_trace (false);
 
@@ -1446,15 +1732,17 @@ struct TupleVariationData
     if (!is_gvar) data_offset += 4;
     if (!c->check_assign (out->data, data_offset, HB_SERIALIZE_ERROR_INT_OVERFLOW)) return_trace (false);
 
-    return tuple_variations.serialize_var_data (c);
+    return tuple_variations.serialize_var_data (c, is_gvar);
   }
 
   protected:
   struct TupleVarCount : HBUINT16
   {
+    friend struct tuple_variations_t;
     bool has_shared_point_numbers () const { return ((*this) & SharedPointNumbers); }
     unsigned int get_count () const { return (*this) & CountMask; }
     TupleVarCount& operator = (uint16_t i) { HBUINT16::operator= (i); return *this; }
+    explicit operator bool () const { return get_count (); }
 
     protected:
     enum Flags
@@ -1478,6 +1766,463 @@ struct TupleVariationData
   DEFINE_SIZE_MIN (4);
 };
 
+using tuple_variations_t = TupleVariationData::tuple_variations_t;
+struct item_variations_t
+{
+  using region_t = const hb_hashmap_t<hb_tag_t, Triple>*;
+  private:
+  /* each subtable is decompiled into a tuple_variations_t, in which all tuples
+   * have the same num of deltas (rows) */
+  hb_vector_t<tuple_variations_t> vars;
+
+  /* original region list, decompiled from item varstore, used when rebuilding
+   * region list after instantiation */
+  hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>> orig_region_list;
+
+  /* region list: vector of Regions, maintain the original order for the regions
+   * that existed before instantiate (), append the new regions at the end.
+   * Regions are stored in each tuple already, save pointers only.
+   * When converting back to item varstore, unused regions will be pruned */
+  hb_vector_t<region_t> region_list;
+
+  /* region -> idx map after instantiation and pruning unused regions */
+  hb_hashmap_t<region_t, unsigned> region_map;
+
+  /* all delta rows after instantiation */
+  hb_vector_t<hb_vector_t<int>> delta_rows;
+  /* final optimized vector of encoding objects used to assemble the varstore */
+  hb_vector_t<delta_row_encoding_t> encodings;
+
+  /* old varidxes -> new var_idxes map */
+  hb_map_t varidx_map;
+
+  /* has long words */
+  bool has_long = false;
+
+  public:
+  bool has_long_word () const
+  { return has_long; }
+
+  const hb_vector_t<region_t>& get_region_list () const
+  { return region_list; }
+
+  const hb_vector_t<delta_row_encoding_t>& get_vardata_encodings () const
+  { return encodings; }
+
+  const hb_map_t& get_varidx_map () const
+  { return varidx_map; }
+
+  bool instantiate (const VariationStore& varStore,
+                    const hb_subset_plan_t *plan,
+                    bool optimize=true,
+                    bool use_no_variation_idx=true,
+                    const hb_array_t <const hb_inc_bimap_t> inner_maps = hb_array_t<const hb_inc_bimap_t> ())
+  {
+    if (!create_from_item_varstore (varStore, plan->axes_old_index_tag_map, inner_maps))
+      return false;
+    if (!instantiate_tuple_vars (plan->axes_location, plan->axes_triple_distances))
+      return false;
+    return as_item_varstore (optimize, use_no_variation_idx);
+  }
+
+  /* keep below APIs public only for unit test: test-item-varstore */
+  bool create_from_item_varstore (const VariationStore& varStore,
+                                  const hb_map_t& axes_old_index_tag_map,
+                                  const hb_array_t <const hb_inc_bimap_t> inner_maps = hb_array_t<const hb_inc_bimap_t> ())
+  {
+    const VarRegionList& regionList = varStore.get_region_list ();
+    if (!regionList.get_var_regions (axes_old_index_tag_map, orig_region_list))
+      return false;
+
+    unsigned num_var_data = varStore.get_sub_table_count ();
+    if (inner_maps && inner_maps.length != num_var_data) return false;
+    if (!vars.alloc (num_var_data)) return false;
+
+    for (unsigned i = 0; i < num_var_data; i++)
+    {
+      if (inner_maps && !inner_maps.arrayZ[i].get_population ())
+          continue;
+      tuple_variations_t var_data_tuples;
+      if (!var_data_tuples.create_from_item_var_data (varStore.get_sub_table (i),
+                                                      orig_region_list,
+                                                      axes_old_index_tag_map,
+                                                      inner_maps ? &(inner_maps.arrayZ[i]) : nullptr))
+        return false;
+
+      vars.push (std::move (var_data_tuples));
+    }
+    return !vars.in_error ();
+  }
+
+  bool instantiate_tuple_vars (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
+                               const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances)
+  {
+    for (tuple_variations_t& tuple_vars : vars)
+      if (!tuple_vars.instantiate (normalized_axes_location, axes_triple_distances))
+        return false;
+
+    if (!build_region_list ()) return false;
+    return true;
+  }
+
+  bool build_region_list ()
+  {
+    /* scan all tuples and collect all unique regions, prune unused regions */
+    hb_hashmap_t<region_t, unsigned> all_regions;
+    hb_hashmap_t<region_t, unsigned> used_regions;
+
+    /* use a vector when inserting new regions, make result deterministic */
+    hb_vector_t<region_t> all_unique_regions;
+    for (const tuple_variations_t& sub_table : vars)
+    {
+      for (const tuple_delta_t& tuple : sub_table.tuple_vars)
+      {
+        region_t r = &(tuple.axis_tuples);
+        if (!used_regions.has (r))
+        {
+          bool all_zeros = true;
+          for (float d : tuple.deltas_x)
+          {
+            int delta = (int) roundf (d);
+            if (delta != 0)
+            {
+              all_zeros = false;
+              break;
+            }
+          }
+          if (!all_zeros)
+          {
+            if (!used_regions.set (r, 1))
+              return false;
+          }
+        }
+        if (all_regions.has (r))
+          continue;
+        if (!all_regions.set (r, 1))
+          return false;
+        all_unique_regions.push (r);
+      }
+    }
+
+    if (!all_regions || !all_unique_regions) return false;
+    if (!region_list.alloc (all_regions.get_population ()))
+      return false;
+
+    unsigned idx = 0;
+    /* append the original regions that pre-existed */
+    for (const auto& r : orig_region_list)
+    {
+      if (!all_regions.has (&r) || !used_regions.has (&r))
+        continue;
+
+      region_list.push (&r);
+      if (!region_map.set (&r, idx))
+        return false;
+      all_regions.del (&r);
+      idx++;
+    }
+
+    /* append the new regions at the end */
+    for (const auto& r: all_unique_regions)
+    {
+      if (!all_regions.has (r) || !used_regions.has (r))
+        continue;
+      region_list.push (r);
+      if (!region_map.set (r, idx))
+        return false;
+      all_regions.del (r);
+      idx++;
+    }
+    return (!region_list.in_error ()) && (!region_map.in_error ());
+  }
+
+  /* main algorithm ported from fonttools VarStore_optimize() method, optimize
+   * varstore by default */
+
+  struct combined_gain_idx_tuple_t
+  {
+    int gain;
+    unsigned idx_1;
+    unsigned idx_2;
+
+    combined_gain_idx_tuple_t () = default;
+    combined_gain_idx_tuple_t (int gain_, unsigned i, unsigned j)
+        :gain (gain_), idx_1 (i), idx_2 (j) {}
+
+    bool operator < (const combined_gain_idx_tuple_t& o)
+    {
+      if (gain != o.gain)
+        return gain < o.gain;
+
+      if (idx_1 != o.idx_1)
+        return idx_1 < o.idx_1;
+
+      return idx_2 < o.idx_2;
+    }
+
+    bool operator <= (const combined_gain_idx_tuple_t& o)
+    {
+      if (*this < o) return true;
+      return gain == o.gain && idx_1 == o.idx_1 && idx_2 == o.idx_2;
+    }
+  };
+
+  bool as_item_varstore (bool optimize=true, bool use_no_variation_idx=true)
+  {
+    if (!region_list) return false;
+    unsigned num_cols = region_list.length;
+    /* pre-alloc a 2D vector for all sub_table's VarData rows */
+    unsigned total_rows = 0;
+    for (unsigned major = 0; major < vars.length; major++)
+    {
+      const tuple_variations_t& tuples = vars[major];
+      /* all tuples in each sub_table should have same num of deltas(num rows) */
+      total_rows += tuples.tuple_vars[0].deltas_x.length;
+    }
+
+    if (!delta_rows.resize (total_rows)) return false;
+    /* init all rows to [0]*num_cols */
+    for (unsigned i = 0; i < total_rows; i++)
+      if (!(delta_rows[i].resize (num_cols))) return false;
+
+    /* old VarIdxes -> full encoding_row mapping */
+    hb_hashmap_t<unsigned, const hb_vector_t<int>*> front_mapping;
+    unsigned start_row = 0;
+    hb_vector_t<delta_row_encoding_t> encoding_objs;
+    hb_hashmap_t<hb_vector_t<uint8_t>, unsigned> chars_idx_map;
+
+    /* delta_rows map, used for filtering out duplicate rows */
+    hb_hashmap_t<const hb_vector_t<int>*, unsigned> delta_rows_map;
+    for (unsigned major = 0; major < vars.length; major++)
+    {
+      /* deltas are stored in tuples(column based), convert them back into items
+       * (row based) delta */
+      const tuple_variations_t& tuples = vars[major];
+      unsigned num_rows = tuples.tuple_vars[0].deltas_x.length;
+      for (const tuple_delta_t& tuple: tuples.tuple_vars)
+      {
+        if (tuple.deltas_x.length != num_rows)
+          return false;
+
+        /* skip unused regions */
+        unsigned *col_idx;
+        if (!region_map.has (&(tuple.axis_tuples), &col_idx))
+          continue;
+
+        for (unsigned i = 0; i < num_rows; i++)
+        {
+          int rounded_delta = roundf (tuple.deltas_x[i]);
+          delta_rows[start_row + i][*col_idx] += rounded_delta;
+          if ((!has_long) && (rounded_delta < -65536 || rounded_delta > 65535))
+            has_long = true;
+        }
+      }
+
+      if (!optimize)
+      {
+        /* assemble a delta_row_encoding_t for this subtable, skip optimization so
+         * chars is not initialized, we only need delta rows for serialization */
+        delta_row_encoding_t obj;
+        for (unsigned r = start_row; r < start_row + num_rows; r++)
+          obj.add_row (&(delta_rows.arrayZ[r]));
+
+        encodings.push (std::move (obj));
+        start_row += num_rows;
+        continue;
+      }
+
+      for (unsigned minor = 0; minor < num_rows; minor++)
+      {
+        const hb_vector_t<int>& row = delta_rows[start_row + minor];
+        if (use_no_variation_idx)
+        {
+          bool all_zeros = true;
+          for (int delta : row)
+          {
+            if (delta != 0)
+            {
+              all_zeros = false;
+              break;
+            }
+          }
+          if (all_zeros)
+            continue;
+        }
+
+        if (!front_mapping.set ((major<<16) + minor, &row))
+          return false;
+
+        hb_vector_t<uint8_t> chars = delta_row_encoding_t::get_row_chars (row);
+        if (!chars) return false;
+
+        if (delta_rows_map.has (&row))
+          continue;
+
+        delta_rows_map.set (&row, 1);
+        unsigned *obj_idx;
+        if (chars_idx_map.has (chars, &obj_idx))
+        {
+          delta_row_encoding_t& obj = encoding_objs[*obj_idx];
+          if (!obj.add_row (&row))
+            return false;
+        }
+        else
+        {
+          if (!chars_idx_map.set (chars, encoding_objs.length))
+            return false;
+          delta_row_encoding_t obj (std::move (chars), &row);
+          encoding_objs.push (std::move (obj));
+        }
+      }
+
+      start_row += num_rows;
+    }
+
+    /* return directly if no optimization, maintain original VariationIndex so
+     * varidx_map would be empty */
+    if (!optimize) return !encodings.in_error ();
+
+    /* sort encoding_objs */
+    encoding_objs.qsort ();
+
+    /* main algorithm: repeatedly pick 2 best encodings to combine, and combine
+     * them */
+    hb_priority_queue_t<combined_gain_idx_tuple_t> queue;
+    unsigned num_todos = encoding_objs.length;
+    for (unsigned i = 0; i < num_todos; i++)
+    {
+      for (unsigned j = i + 1; j < num_todos; j++)
+      {
+        int combining_gain = encoding_objs.arrayZ[i].gain_from_merging (encoding_objs.arrayZ[j]);
+        if (combining_gain > 0)
+          queue.insert (combined_gain_idx_tuple_t (-combining_gain, i, j), 0);
+      }
+    }
+
+    hb_set_t removed_todo_idxes;
+    while (queue)
+    {
+      auto t = queue.pop_minimum ().first;
+      unsigned i = t.idx_1;
+      unsigned j = t.idx_2;
+
+      if (removed_todo_idxes.has (i) || removed_todo_idxes.has (j))
+        continue;
+
+      delta_row_encoding_t& encoding = encoding_objs.arrayZ[i];
+      delta_row_encoding_t& other_encoding = encoding_objs.arrayZ[j];
+
+      removed_todo_idxes.add (i);
+      removed_todo_idxes.add (j);
+
+      hb_vector_t<uint8_t> combined_chars;
+      if (!combined_chars.alloc (encoding.chars.length))
+        return false;
+
+      for (unsigned idx = 0; idx < encoding.chars.length; idx++)
+      {
+        uint8_t v = hb_max (encoding.chars.arrayZ[idx], other_encoding.chars.arrayZ[idx]);
+        combined_chars.push (v);
+      }
+
+      delta_row_encoding_t combined_encoding_obj (std::move (combined_chars));
+      for (const auto& row : hb_concat (encoding.items, other_encoding.items))
+        combined_encoding_obj.add_row (row);
+
+      for (unsigned idx = 0; idx < encoding_objs.length; idx++)
+      {
+        if (removed_todo_idxes.has (idx)) continue;
+
+        const delta_row_encoding_t& obj = encoding_objs.arrayZ[idx];
+        if (obj.chars == combined_chars)
+        {
+          for (const auto& row : obj.items)
+            combined_encoding_obj.add_row (row);
+
+          removed_todo_idxes.add (idx);
+          continue;
+        }
+
+        int combined_gain = combined_encoding_obj.gain_from_merging (obj);
+        if (combined_gain > 0)
+          queue.insert (combined_gain_idx_tuple_t (-combined_gain, idx, encoding_objs.length), 0);
+      }
+
+      encoding_objs.push (std::move (combined_encoding_obj));
+    }
+
+    int num_final_encodings = (int) encoding_objs.length - (int) removed_todo_idxes.get_population ();
+    if (num_final_encodings <= 0) return false;
+
+    if (!encodings.alloc (num_final_encodings)) return false;
+    for (unsigned i = 0; i < encoding_objs.length; i++)
+    {
+      if (removed_todo_idxes.has (i)) continue;
+      encodings.push (std::move (encoding_objs.arrayZ[i]));
+    }
+
+    /* sort again based on width, make result deterministic */
+    encodings.qsort (delta_row_encoding_t::cmp_width);
+
+    return compile_varidx_map (front_mapping);
+  }
+
+  private:
+  /* compile varidx_map for one VarData subtable (index specified by major) */
+  bool compile_varidx_map (const hb_hashmap_t<unsigned, const hb_vector_t<int>*>& front_mapping)
+  {
+    /* full encoding_row -> new VarIdxes mapping */
+    hb_hashmap_t<const hb_vector_t<int>*, unsigned> back_mapping;
+
+    for (unsigned major = 0; major < encodings.length; major++)
+    {
+      delta_row_encoding_t& encoding = encodings[major];
+      /* just sanity check, this shouldn't happen */
+      if (encoding.is_empty ())
+        return false;
+  
+      unsigned num_rows = encoding.items.length;
+  
+      /* sort rows, make result deterministic */
+      encoding.items.qsort (_cmp_row);
+  
+      /* compile old to new var_idxes mapping */
+      for (unsigned minor = 0; minor < num_rows; minor++)
+      {
+        unsigned new_varidx = (major << 16) + minor;
+        back_mapping.set (encoding.items.arrayZ[minor], new_varidx);
+      }
+    }
+
+    for (auto _ : front_mapping.iter ())
+    {
+      unsigned old_varidx = _.first;
+      unsigned *new_varidx;
+      if (back_mapping.has (_.second, &new_varidx))
+        varidx_map.set (old_varidx, *new_varidx);
+      else
+        varidx_map.set (old_varidx, HB_OT_LAYOUT_NO_VARIATIONS_INDEX);
+    }
+    return !varidx_map.in_error ();
+  }
+
+  static int _cmp_row (const void *pa, const void *pb)
+  {
+    /* compare pointers of vectors(const hb_vector_t<int>*) that represent a row */
+    const hb_vector_t<int>** a = (const hb_vector_t<int>**) pa;
+    const hb_vector_t<int>** b = (const hb_vector_t<int>**) pb;
+
+    for (unsigned i = 0; i < (*b)->length; i++)
+    {
+      int va = (*a)->arrayZ[i];
+      int vb = (*b)->arrayZ[i];
+      if (va != vb)
+        return va < vb ? -1 : 1;
+    }
+    return 0;
+  }
+};
+
 } /* namespace OT */
 
 

+ 10 - 18
thirdparty/harfbuzz/src/hb-ot-var-cvar-table.hh

@@ -54,14 +54,14 @@ struct cvar
 
   bool decompile_tuple_variations (unsigned axis_count,
                                    unsigned point_count,
+                                   hb_blob_t *blob,
                                    bool is_gvar,
                                    const hb_map_t *axes_old_index_tag_map,
                                    TupleVariationData::tuple_variations_t& tuple_variations /* OUT */) const
   {
     hb_vector_t<unsigned> shared_indices;
     TupleVariationData::tuple_iterator_t iterator;
-    unsigned var_data_length = tupleVariationData.get_size (axis_count);
-    hb_bytes_t var_data_bytes = hb_bytes_t (reinterpret_cast<const char*> (get_tuple_var_data ()), var_data_length);
+    hb_bytes_t var_data_bytes = blob->as_bytes ().sub_array (4);
     if (!TupleVariationData::get_tuple_iterator (var_data_bytes, axis_count, this,
                                                  shared_indices, &iterator))
       return false;
@@ -131,6 +131,7 @@ struct cvar
                   TupleVariationData::tuple_variations_t& tuple_variations) const
   {
     TRACE_SERIALIZE (this);
+    if (!tuple_variations) return_trace (false);
     if (unlikely (!c->embed (version))) return_trace (false);
 
     return_trace (tupleVariationData.serialize (c, false, tuple_variations));
@@ -142,19 +143,6 @@ struct cvar
     if (c->plan->all_axes_pinned)
       return_trace (false);
 
-    /* subset() for cvar is called by partial instancing only, we always pass
-     * through cvar table in other cases */
-    if (!c->plan->normalized_coords)
-    {
-      unsigned axis_count = c->plan->source->table.fvar->get_axis_count ();
-      unsigned total_size = min_size + tupleVariationData.get_size (axis_count);
-      char *out = c->serializer->allocate_size<char> (total_size);
-      if (unlikely (!out)) return_trace (false);
-
-      hb_memcpy (out, this, total_size);
-      return_trace (true);
-    }
-
     OT::TupleVariationData::tuple_variations_t tuple_variations;
     unsigned axis_count = c->plan->axes_old_index_tag_map.get_population ();
 
@@ -163,13 +151,17 @@ struct cvar
     unsigned point_count = hb_blob_get_length (cvt_blob) / FWORD::static_size;
     hb_blob_destroy (cvt_blob);
 
-    if (!decompile_tuple_variations (axis_count, point_count, false,
+    if (!decompile_tuple_variations (axis_count, point_count,
+                                     c->source_blob, false,
                                      &(c->plan->axes_old_index_tag_map),
                                      tuple_variations))
       return_trace (false);
 
-    tuple_variations.instantiate (c->plan->axes_location, c->plan->axes_triple_distances);
-    if (!tuple_variations.compile_bytes (c->plan->axes_index_map, c->plan->axes_old_index_tag_map))
+    if (!tuple_variations.instantiate (c->plan->axes_location, c->plan->axes_triple_distances))
+      return_trace (false);
+
+    if (!tuple_variations.compile_bytes (c->plan->axes_index_map, c->plan->axes_old_index_tag_map,
+                                         false /* do not use shared points */))
       return_trace (false);
 
     return_trace (serialize (c->serializer, tuple_variations));

+ 331 - 26
thirdparty/harfbuzz/src/hb-ot-var-gvar-table.hh

@@ -39,42 +39,255 @@
 
 namespace OT {
 
-struct contour_point_t
+struct GlyphVariationData : TupleVariationData
+{};
+
+struct glyph_variations_t
 {
-  void init (float x_ = 0.f, float y_ = 0.f, bool is_end_point_ = false)
-  { flag = 0; x = x_; y = y_; is_end_point = is_end_point_; }
+  using tuple_variations_t = TupleVariationData::tuple_variations_t;
+  hb_vector_t<tuple_variations_t> glyph_variations;
+
+  hb_vector_t<char> compiled_shared_tuples;
+  private:
+  unsigned shared_tuples_count = 0;
 
-  void transform (const float (&matrix)[4])
+  /* shared coords-> index map after instantiation */
+  hb_hashmap_t<const hb_vector_t<char>*, unsigned> shared_tuples_idx_map;
+
+  public:
+  unsigned compiled_shared_tuples_count () const
+  { return shared_tuples_count; }
+
+  unsigned compiled_byte_size () const
   {
-    float x_ = x * matrix[0] + y * matrix[2];
-	  y  = x * matrix[1] + y * matrix[3];
-    x  = x_;
+    unsigned byte_size = 0;
+    for (const auto& _ : glyph_variations)
+      byte_size += _.get_compiled_byte_size ();
+
+    return byte_size;
   }
-  HB_ALWAYS_INLINE
-  void translate (const contour_point_t &p) { x += p.x; y += p.y; }
 
+  bool create_from_glyphs_var_data (unsigned axis_count,
+                                    const hb_array_t<const F2DOT14> shared_tuples,
+                                    const hb_subset_plan_t *plan,
+                                    const hb_hashmap_t<hb_codepoint_t, hb_bytes_t>& new_gid_var_data_map)
+  {
+    if (unlikely (!glyph_variations.alloc (plan->new_to_old_gid_list.length, true)))
+      return false;
 
-  float x;
-  float y;
-  uint8_t flag;
-  bool is_end_point;
-};
+    auto it = hb_iter (plan->new_to_old_gid_list);
+    for (auto &_ : it)
+    {
+      hb_codepoint_t new_gid = _.first;
+      contour_point_vector_t *all_contour_points;
+      if (!new_gid_var_data_map.has (new_gid) ||
+          !plan->new_gid_contour_points_map.has (new_gid, &all_contour_points))
+        return false;
+      hb_bytes_t var_data = new_gid_var_data_map.get (new_gid);
+
+      const GlyphVariationData* p = reinterpret_cast<const GlyphVariationData*> (var_data.arrayZ);
+      hb_vector_t<unsigned> shared_indices;
+      GlyphVariationData::tuple_iterator_t iterator;
+      tuple_variations_t tuple_vars;
+
+      /* in case variation data is empty, push an empty struct into the vector,
+       * keep the vector in sync with the new_to_old_gid_list */
+      if (!var_data || ! p->has_data () || !all_contour_points->length ||
+          !GlyphVariationData::get_tuple_iterator (var_data, axis_count,
+                                                   var_data.arrayZ,
+                                                   shared_indices, &iterator))
+      {
+        glyph_variations.push (std::move (tuple_vars));
+        continue;
+      }
 
-struct contour_point_vector_t : hb_vector_t<contour_point_t>
-{
-  void extend (const hb_array_t<contour_point_t> &a)
+      if (!p->decompile_tuple_variations (all_contour_points->length, true /* is_gvar */,
+                                          iterator, &(plan->axes_old_index_tag_map),
+                                          shared_indices, shared_tuples,
+                                          tuple_vars /* OUT */))
+        return false;
+      glyph_variations.push (std::move (tuple_vars));
+    }
+    return !glyph_variations.in_error () && glyph_variations.length == plan->new_to_old_gid_list.length;
+  }
+
+  bool instantiate (const hb_subset_plan_t *plan)
   {
-    unsigned int old_len = length;
-    if (unlikely (!resize (old_len + a.length, false)))
-      return;
-    auto arrayZ = this->arrayZ + old_len;
-    unsigned count = a.length;
-    hb_memcpy (arrayZ, a.arrayZ, count * sizeof (arrayZ[0]));
+    unsigned count = plan->new_to_old_gid_list.length;
+    for (unsigned i = 0; i < count; i++)
+    {
+      hb_codepoint_t new_gid = plan->new_to_old_gid_list[i].first;
+      contour_point_vector_t *all_points;
+      if (!plan->new_gid_contour_points_map.has (new_gid, &all_points))
+        return false;
+      if (!glyph_variations[i].instantiate (plan->axes_location, plan->axes_triple_distances, all_points))
+        return false;
+    }
+    return true;
   }
-};
 
-struct GlyphVariationData : TupleVariationData
-{};
+  bool compile_bytes (const hb_map_t& axes_index_map,
+                      const hb_map_t& axes_old_index_tag_map)
+  {
+    if (!compile_shared_tuples (axes_index_map, axes_old_index_tag_map))
+      return false;
+    for (tuple_variations_t& vars: glyph_variations)
+      if (!vars.compile_bytes (axes_index_map, axes_old_index_tag_map,
+                               true, /* use shared points*/
+                               &shared_tuples_idx_map))
+        return false;
+
+    return true;
+  }
+
+  bool compile_shared_tuples (const hb_map_t& axes_index_map,
+                              const hb_map_t& axes_old_index_tag_map)
+  {
+    /* key is pointer to compiled_peak_coords inside each tuple, hashing
+     * function will always deref pointers first */
+    hb_hashmap_t<const hb_vector_t<char>*, unsigned> coords_count_map;
+
+    /* count the num of shared coords */
+    for (tuple_variations_t& vars: glyph_variations)
+    {
+      for (tuple_delta_t& var : vars.tuple_vars)
+      {
+        if (!var.compile_peak_coords (axes_index_map, axes_old_index_tag_map))
+          return false;
+        unsigned* count;
+        if (coords_count_map.has (&(var.compiled_peak_coords), &count))
+          coords_count_map.set (&(var.compiled_peak_coords), *count + 1);
+        else
+          coords_count_map.set (&(var.compiled_peak_coords), 1);
+      }
+    }
+
+    if (!coords_count_map || coords_count_map.in_error ())
+      return false;
+
+    /* add only those coords that are used more than once into the vector and sort */
+    hb_vector_t<const hb_vector_t<char>*> shared_coords;
+    if (unlikely (!shared_coords.alloc (coords_count_map.get_population ())))
+      return false;
+
+    for (const auto _ : coords_count_map.iter ())
+    {
+      if (_.second == 1) continue;
+      shared_coords.push (_.first);
+    }
+
+    /* no shared tuples: no coords are used more than once */
+    if (!shared_coords) return true;
+    /* sorting based on the coords frequency first (high to low), then compare
+     * the coords bytes */
+    hb_qsort (shared_coords.arrayZ, shared_coords.length, sizeof (hb_vector_t<char>*), _cmp_coords, (void *) (&coords_count_map));
+
+    /* build shared_coords->idx map and shared tuples byte array */
+
+    shared_tuples_count = hb_min (0xFFFu + 1, shared_coords.length);
+    unsigned len = shared_tuples_count * (shared_coords[0]->length);
+    if (unlikely (!compiled_shared_tuples.alloc (len)))
+      return false;
+
+    for (unsigned i = 0; i < shared_tuples_count; i++)
+    {
+      shared_tuples_idx_map.set (shared_coords[i], i);
+      /* add a concat() in hb_vector_t? */
+      for (char c : shared_coords[i]->iter ())
+        compiled_shared_tuples.push (c);
+    }
+
+    return true;
+  }
+
+  static int _cmp_coords (const void *pa, const void *pb, void *arg)
+  {
+    const hb_hashmap_t<const hb_vector_t<char>*, unsigned>* coords_count_map =
+        reinterpret_cast<const hb_hashmap_t<const hb_vector_t<char>*, unsigned>*> (arg);
+
+    /* shared_coords is hb_vector_t<const hb_vector_t<char>*> so casting pa/pb
+     * to be a pointer to a pointer */
+    const hb_vector_t<char>** a = reinterpret_cast<const hb_vector_t<char>**> (const_cast<void*>(pa));
+    const hb_vector_t<char>** b = reinterpret_cast<const hb_vector_t<char>**> (const_cast<void*>(pb));
+
+    bool has_a = coords_count_map->has (*a);
+    bool has_b = coords_count_map->has (*b);
+
+    if (has_a && has_b)
+    {
+      unsigned a_num = coords_count_map->get (*a);
+      unsigned b_num = coords_count_map->get (*b);
+
+      if (a_num != b_num)
+        return b_num - a_num;
+
+      return (*b)->as_array().cmp ((*a)->as_array ());
+    }
+    else if (has_a) return -1;
+    else if (has_b) return 1;
+    else return 0;
+  }
+
+  template<typename Iterator,
+           hb_requires (hb_is_iterator (Iterator))>
+  bool serialize_glyph_var_data (hb_serialize_context_t *c,
+                                 Iterator it,
+                                 bool long_offset,
+                                 unsigned num_glyphs,
+                                 char* glyph_var_data_offsets /* OUT: glyph var data offsets array */) const
+  {
+    TRACE_SERIALIZE (this);
+
+    if (long_offset)
+    {
+      ((HBUINT32 *) glyph_var_data_offsets)[0] = 0;
+      glyph_var_data_offsets += 4;
+    }
+    else
+    {
+      ((HBUINT16 *) glyph_var_data_offsets)[0] = 0;
+      glyph_var_data_offsets += 2;
+    }
+    unsigned glyph_offset = 0;
+    hb_codepoint_t last_gid = 0;
+    unsigned idx = 0;
+
+    TupleVariationData* cur_glyph = c->start_embed<TupleVariationData> ();
+    if (!cur_glyph) return_trace (false);
+    for (auto &_ : it)
+    {
+      hb_codepoint_t gid = _.first;
+      if (long_offset)
+        for (; last_gid < gid; last_gid++)
+          ((HBUINT32 *) glyph_var_data_offsets)[last_gid] = glyph_offset;
+      else
+        for (; last_gid < gid; last_gid++)
+          ((HBUINT16 *) glyph_var_data_offsets)[last_gid] = glyph_offset / 2;
+
+      if (idx >= glyph_variations.length) return_trace (false);
+      if (!cur_glyph->serialize (c, true, glyph_variations[idx])) return_trace (false);
+      TupleVariationData* next_glyph = c->start_embed<TupleVariationData> ();
+      glyph_offset += (char *) next_glyph - (char *) cur_glyph;
+
+      if (long_offset)
+        ((HBUINT32 *) glyph_var_data_offsets)[gid] = glyph_offset;
+      else
+        ((HBUINT16 *) glyph_var_data_offsets)[gid] = glyph_offset / 2;
+
+      last_gid++;
+      idx++;
+      cur_glyph = next_glyph;
+    }
+
+    if (long_offset)
+      for (; last_gid < num_glyphs; last_gid++)
+        ((HBUINT32 *) glyph_var_data_offsets)[last_gid] = glyph_offset;
+    else
+      for (; last_gid < num_glyphs; last_gid++)
+        ((HBUINT16 *) glyph_var_data_offsets)[last_gid] = glyph_offset / 2;
+    return_trace (true);
+  }
+};
 
 struct gvar
 {
@@ -94,9 +307,101 @@ struct gvar
   bool sanitize (hb_sanitize_context_t *c) const
   { return sanitize_shallow (c); }
 
+  bool decompile_glyph_variations (hb_subset_context_t *c,
+                                   glyph_variations_t& glyph_vars /* OUT */) const
+  {
+    hb_hashmap_t<hb_codepoint_t, hb_bytes_t> new_gid_var_data_map;
+    auto it = hb_iter (c->plan->new_to_old_gid_list);
+    if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE))
+    {
+      new_gid_var_data_map.set (0, hb_bytes_t ());
+      it++;
+    }
+
+    for (auto &_ : it)
+    {
+      hb_codepoint_t new_gid = _.first;
+      hb_codepoint_t old_gid = _.second;
+      hb_bytes_t var_data_bytes = get_glyph_var_data_bytes (c->source_blob, glyphCountX, old_gid);
+      new_gid_var_data_map.set (new_gid, var_data_bytes);
+    }
+
+    if (new_gid_var_data_map.in_error ()) return false;
+
+    hb_array_t<const F2DOT14> shared_tuples = (this+sharedTuples).as_array ((unsigned) sharedTupleCount * (unsigned) axisCount);
+    return glyph_vars.create_from_glyphs_var_data (axisCount, shared_tuples, c->plan, new_gid_var_data_map);
+  }
+
+  template<typename Iterator,
+           hb_requires (hb_is_iterator (Iterator))>
+  bool serialize (hb_serialize_context_t *c,
+                  const glyph_variations_t& glyph_vars,
+                  Iterator it,
+                  unsigned axis_count,
+                  unsigned num_glyphs) const
+  {
+    TRACE_SERIALIZE (this);
+    gvar *out = c->allocate_min<gvar> ();
+    if (unlikely (!out)) return_trace (false);
+
+    out->version.major = 1;
+    out->version.minor = 0;
+    out->axisCount = axis_count;
+    out->glyphCountX = hb_min (0xFFFFu, num_glyphs);
+
+    unsigned glyph_var_data_size = glyph_vars.compiled_byte_size ();
+    bool long_offset = glyph_var_data_size & ~0xFFFFu;
+    out->flags = long_offset ? 1 : 0;
+
+    HBUINT8 *glyph_var_data_offsets = c->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false);
+    if (!glyph_var_data_offsets) return_trace (false);
+
+    /* shared tuples */
+    unsigned shared_tuple_count = glyph_vars.compiled_shared_tuples_count ();
+    out->sharedTupleCount = shared_tuple_count;
+
+    if (!shared_tuple_count)
+      out->sharedTuples = 0;
+    else
+    {
+      hb_array_t<const char> shared_tuples = glyph_vars.compiled_shared_tuples.as_array ().copy (c);
+      if (!shared_tuples.arrayZ) return_trace (false);
+      out->sharedTuples = shared_tuples.arrayZ - (char *) out;
+    }
+
+    char *glyph_var_data = c->start_embed<char> ();
+    if (!glyph_var_data) return_trace (false);
+    out->dataZ = glyph_var_data - (char *) out;
+
+    return_trace (glyph_vars.serialize_glyph_var_data (c, it, long_offset, num_glyphs,
+                                                       (char *) glyph_var_data_offsets));
+  }
+
+  bool instantiate (hb_subset_context_t *c) const
+  {
+    TRACE_SUBSET (this);
+    glyph_variations_t glyph_vars;
+    if (!decompile_glyph_variations (c, glyph_vars))
+      return_trace (false);
+
+    if (!glyph_vars.instantiate (c->plan)) return_trace (false);
+    if (!glyph_vars.compile_bytes (c->plan->axes_index_map, c->plan->axes_old_index_tag_map))
+      return_trace (false);
+
+    unsigned axis_count = c->plan->axes_index_map.get_population ();
+    unsigned num_glyphs = c->plan->num_output_glyphs ();
+    auto it = hb_iter (c->plan->new_to_old_gid_list);
+    return_trace (serialize (c->serializer, glyph_vars, it, axis_count, num_glyphs));
+  }
+
   bool subset (hb_subset_context_t *c) const
   {
     TRACE_SUBSET (this);
+    if (c->plan->all_axes_pinned)
+      return_trace (false);
+
+    if (c->plan->normalized_coords)
+      return_trace (instantiate (c));
 
     unsigned glyph_count = version.to_int () ? c->plan->source->get_num_glyphs () : 0;
 

+ 73 - 4
thirdparty/harfbuzz/src/hb-ot-var-hvar-table.hh

@@ -134,6 +134,36 @@ struct index_map_subset_plan_t
     }
   }
 
+  bool remap_after_instantiation (const hb_subset_plan_t *plan,
+                                  const hb_map_t& varidx_map)
+  {
+    /* recalculate bit_count after remapping */
+    outer_bit_count = 1;
+    inner_bit_count = 1;
+
+    for (const auto &_ : plan->new_to_old_gid_list)
+    {
+      hb_codepoint_t new_gid = _.first;
+      if (unlikely (new_gid >= map_count)) break;
+
+      uint32_t v = output_map.arrayZ[new_gid];
+      uint32_t *new_varidx;
+      if (!varidx_map.has (v, &new_varidx))
+        return false;
+
+      output_map.arrayZ[new_gid] = *new_varidx;
+
+      unsigned outer = (*new_varidx) >> 16;
+      unsigned bit_count = (outer == 0) ? 1 : hb_bit_storage (outer);
+      outer_bit_count = hb_max (bit_count, outer_bit_count);
+      
+      unsigned inner = (*new_varidx) & 0xFFFF;
+      bit_count = (inner == 0) ? 1 : hb_bit_storage (inner);
+      inner_bit_count = hb_max (bit_count, inner_bit_count);
+    }
+    return true;
+  }
+
   unsigned int get_inner_bit_count () const { return inner_bit_count; }
   unsigned int get_width ()           const { return ((outer_bit_count + inner_bit_count + 7) / 8); }
   unsigned int get_map_count ()       const { return map_count; }
@@ -211,6 +241,16 @@ struct hvarvvar_subset_plan_t
       index_map_plans[i].remap (index_maps[i], outer_map, inner_maps, plan);
   }
 
+  /* remap */
+  bool remap_index_map_plans (const hb_subset_plan_t *plan,
+                              const hb_map_t& varidx_map)
+  {
+    for (unsigned i = 0; i < index_map_plans.length; i++)
+      if (!index_map_plans[i].remap_after_instantiation (plan, varidx_map))
+        return false;
+    return true;
+  }
+
   void fini ()
   {
     for (unsigned int i = 0; i < inner_sets.length; i++)
@@ -289,6 +329,9 @@ struct HVARVVAR
   bool _subset (hb_subset_context_t *c) const
   {
     TRACE_SUBSET (this);
+    if (c->plan->all_axes_pinned)
+      return_trace (false);
+
     hvarvvar_subset_plan_t	hvar_plan;
     hb_vector_t<const DeltaSetIndexMap *>
 				index_maps;
@@ -302,11 +345,37 @@ struct HVARVVAR
     out->version.major = 1;
     out->version.minor = 0;
 
-    if (unlikely (!out->varStore
-		      .serialize_serialize (c->serializer,
-					    hvar_plan.var_store,
-					    hvar_plan.inner_maps.as_array ())))
+    if (c->plan->normalized_coords)
+    {
+      item_variations_t item_vars;
+      if (!item_vars.instantiate (this+varStore, c->plan,
+                                  advMap == 0 ? false : true,
+                                  false, /* use_no_variation_idx = false */
+                                  hvar_plan.inner_maps.as_array ()))
+        return_trace (false);
+
+      if (!out->varStore.serialize_serialize (c->serializer,
+                                              item_vars.has_long_word (),
+                                              c->plan->axis_tags,
+                                              item_vars.get_region_list (),
+                                              item_vars.get_vardata_encodings ()))
+        return_trace (false);
+
+      /* if varstore is optimized, remap output_map */
+      if (advMap)
+      {
+        if (!hvar_plan.remap_index_map_plans (c->plan, item_vars.get_varidx_map ()))
+          return_trace (false);
+      }
+    }
+    else
+    {
+      if (unlikely (!out->varStore
+		    .serialize_serialize (c->serializer,
+					  hvar_plan.var_store,
+					  hvar_plan.inner_maps.as_array ())))
       return_trace (false);
+    }
 
     return_trace (out->T::serialize_index_maps (c->serializer,
 						hvar_plan.index_map_plans.as_array ()));

+ 55 - 1
thirdparty/harfbuzz/src/hb-ot-var-mvar-table.hh

@@ -27,7 +27,7 @@
 #ifndef HB_OT_VAR_MVAR_TABLE_HH
 #define HB_OT_VAR_MVAR_TABLE_HH
 
-#include "hb-ot-layout-common.hh"
+#include "hb-ot-var-common.hh"
 
 
 namespace OT {
@@ -41,6 +41,19 @@ struct VariationValueRecord
     return_trace (c->check_struct (this));
   }
 
+  bool subset (hb_subset_context_t *c,
+               const hb_map_t& varidx_map) const
+  {
+    TRACE_SUBSET (this);
+    auto *out = c->serializer->embed (*this);
+    if (unlikely (!out)) return_trace (false);
+
+    hb_codepoint_t *new_idx;
+    return_trace (c->serializer->check_assign (out->varIdx,
+                                               (varidx_map.has (varIdx, &new_idx)) ? *new_idx : HB_OT_LAYOUT_NO_VARIATIONS_INDEX,
+                                               HB_SERIALIZE_ERROR_INT_OVERFLOW));
+  }
+
   public:
   Tag		valueTag;	/* Four-byte tag identifying a font-wide measure. */
   VarIdx	varIdx;		/* Outer/inner index into VariationStore item. */
@@ -73,6 +86,47 @@ struct MVAR
 				  valueRecordSize));
   }
 
+  bool subset (hb_subset_context_t *c) const
+  {
+    TRACE_SUBSET (this);
+#ifdef HB_NO_VAR
+    return_trace (false);
+#endif
+
+    if (c->plan->all_axes_pinned)
+      return_trace (false);
+
+    MVAR *out = c->serializer->start_embed (*this);
+    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+    out->version = version;
+    out->reserved = reserved;
+    out->valueRecordSize = valueRecordSize;
+    out->valueRecordCount = valueRecordCount;
+
+    item_variations_t item_vars;
+    const VariationStore& src_var_store = this+varStore;
+
+    if (!item_vars.instantiate (src_var_store, c->plan))
+      return_trace (false);
+
+    /* serialize varstore */
+    if (!out->varStore.serialize_serialize (c->serializer, item_vars.has_long_word (),
+                                            c->plan->axis_tags,
+                                            item_vars.get_region_list (),
+                                            item_vars.get_vardata_encodings ()))
+      return_trace (false);
+
+    /* serialize value records array */
+    unsigned value_rec_count = valueRecordCount;
+    const VariationValueRecord *record = reinterpret_cast<const VariationValueRecord*> (valuesZ.arrayZ);
+    for (unsigned i = 0; i < value_rec_count; i++)
+    {
+      if (!record->subset (c, item_vars.get_varidx_map ())) return_trace (false);
+      record++;
+    }
+    return_trace (true);
+  }
+
   float get_var (hb_tag_t tag,
 		 const int *coords, unsigned int coord_count) const
   {

+ 25 - 0
thirdparty/harfbuzz/src/hb-paint.cc

@@ -54,6 +54,12 @@ static void
 hb_paint_pop_transform_nil (hb_paint_funcs_t *funcs, void *paint_data,
                             void *user_data) {}
 
+static hb_bool_t
+hb_paint_color_glyph_nil (hb_paint_funcs_t *funcs, void *paint_data,
+                          hb_codepoint_t glyph,
+                          hb_font_t *font,
+                          void *user_data) { return false; }
+
 static void
 hb_paint_push_clip_glyph_nil (hb_paint_funcs_t *funcs, void *paint_data,
                               hb_codepoint_t glyph,
@@ -473,6 +479,25 @@ hb_paint_pop_transform (hb_paint_funcs_t *funcs, void *paint_data)
   funcs->pop_transform (paint_data);
 }
 
+/**
+ * hb_paint_color_glyph:
+ * @funcs: paint functions
+ * @paint_data: associated data passed by the caller
+ * @glyph: the glyph ID
+ * @font: the font
+ *
+ * Perform a "color-glyph" paint operation.
+ *
+ * Since: 8.2.0
+ */
+hb_bool_t
+hb_paint_color_glyph (hb_paint_funcs_t *funcs, void *paint_data,
+                      hb_codepoint_t glyph,
+                      hb_font_t *font)
+{
+  return funcs->color_glyph (paint_data, glyph, font);
+}
+
 /**
  * hb_paint_push_clip_glyph:
  * @funcs: paint functions

+ 42 - 0
thirdparty/harfbuzz/src/hb-paint.h

@@ -136,6 +136,26 @@ typedef void (*hb_paint_pop_transform_func_t) (hb_paint_funcs_t *funcs,
                                                void *paint_data,
                                                void *user_data);
 
+/**
+ * hb_paint_color_glyph_func_t:
+ * @funcs: paint functions object
+ * @paint_data: The data accompanying the paint functions in hb_font_paint_glyph()
+ * @glyph: the glyph ID
+ * @font: the font
+ * @user_data: User data pointer passed to hb_paint_funcs_set_color_glyph_func()
+ *
+ * A virtual method for the #hb_paint_funcs_t to render a color glyph by glyph index.
+ *
+ * Return value: %true if the glyph was painted, %false otherwise.
+ *
+ * Since: 8.2.0
+ */
+typedef hb_bool_t (*hb_paint_color_glyph_func_t) (hb_paint_funcs_t *funcs,
+                                                  void *paint_data,
+                                                  hb_codepoint_t glyph,
+                                                  hb_font_t *font,
+                                                  void *user_data);
+
 /**
  * hb_paint_push_clip_glyph_func_t:
  * @funcs: paint functions object
@@ -723,6 +743,23 @@ hb_paint_funcs_set_pop_transform_func (hb_paint_funcs_t              *funcs,
                                        void                          *user_data,
                                        hb_destroy_func_t              destroy);
 
+/**
+ * hb_paint_funcs_set_color_glyph_func:
+ * @funcs: A paint functions struct
+ * @func: (closure user_data) (destroy destroy) (scope notified): The color-glyph callback
+ * @user_data: Data to pass to @func
+ * @destroy: (nullable): Function to call when @user_data is no longer needed
+ *
+ * Sets the color-glyph callback on the paint functions struct.
+ *
+ * Since: 8.2.0
+ */
+HB_EXTERN void
+hb_paint_funcs_set_color_glyph_func (hb_paint_funcs_t                *funcs,
+				     hb_paint_color_glyph_func_t     func,
+				     void                            *user_data,
+				     hb_destroy_func_t                destroy);
+
 /**
  * hb_paint_funcs_set_push_clip_glyph_func:
  * @funcs: A paint functions struct
@@ -922,6 +959,11 @@ hb_paint_push_transform (hb_paint_funcs_t *funcs, void *paint_data,
 HB_EXTERN void
 hb_paint_pop_transform (hb_paint_funcs_t *funcs, void *paint_data);
 
+HB_EXTERN hb_bool_t
+hb_paint_color_glyph (hb_paint_funcs_t *funcs, void *paint_data,
+                      hb_codepoint_t glyph,
+                      hb_font_t *font);
+
 HB_EXTERN void
 hb_paint_push_clip_glyph (hb_paint_funcs_t *funcs, void *paint_data,
                           hb_codepoint_t glyph,

+ 8 - 0
thirdparty/harfbuzz/src/hb-paint.hh

@@ -32,6 +32,7 @@
 #define HB_PAINT_FUNCS_IMPLEMENT_CALLBACKS \
   HB_PAINT_FUNC_IMPLEMENT (push_transform) \
   HB_PAINT_FUNC_IMPLEMENT (pop_transform) \
+  HB_PAINT_FUNC_IMPLEMENT (color_glyph) \
   HB_PAINT_FUNC_IMPLEMENT (push_clip_glyph) \
   HB_PAINT_FUNC_IMPLEMENT (push_clip_rectangle) \
   HB_PAINT_FUNC_IMPLEMENT (pop_clip) \
@@ -77,6 +78,13 @@ struct hb_paint_funcs_t
   void pop_transform (void *paint_data)
   { func.pop_transform (this, paint_data,
                         !user_data ? nullptr : user_data->pop_transform); }
+  bool color_glyph (void *paint_data,
+                    hb_codepoint_t glyph,
+                    hb_font_t *font)
+  { return func.color_glyph (this, paint_data,
+                             glyph,
+                             font,
+                             !user_data ? nullptr : user_data->push_clip_glyph); }
   void push_clip_glyph (void *paint_data,
                         hb_codepoint_t glyph,
                         hb_font_t *font)

+ 3 - 2
thirdparty/harfbuzz/src/hb-priority-queue.hh

@@ -42,10 +42,11 @@
  * priority of its children. The heap is stored in an array, with the
  * children of node i stored at indices 2i + 1 and 2i + 2.
  */
+template <typename K>
 struct hb_priority_queue_t
 {
  private:
-  typedef hb_pair_t<int64_t, unsigned> item_t;
+  typedef hb_pair_t<K, unsigned> item_t;
   hb_vector_t<item_t> heap;
 
  public:
@@ -57,7 +58,7 @@ struct hb_priority_queue_t
 #ifndef HB_OPTIMIZE_SIZE
   HB_ALWAYS_INLINE
 #endif
-  void insert (int64_t priority, unsigned value)
+  void insert (K priority, unsigned value)
   {
     heap.push (item_t (priority, value));
     if (unlikely (heap.in_error ())) return;

+ 15 - 5
thirdparty/harfbuzz/src/hb-repacker.hh

@@ -79,7 +79,12 @@ bool _presplit_subtables_if_needed (graph::gsubgpos_graph_context_t& ext_context
   //                pass after this processing is done. Not super necessary as splits are
   //                only done where overflow is likely, so de-dup probably will get undone
   //                later anyways.
-  for (unsigned lookup_index : ext_context.lookups.keys ())
+
+  // The loop below can modify the contents of ext_context.lookups if new subtables are added
+  // to a lookup during a split. So save the initial set of lookup indices so the iteration doesn't
+  // risk access free'd memory if ext_context.lookups gets resized.
+  hb_set_t lookup_indices(ext_context.lookups.keys ());
+  for (unsigned lookup_index : lookup_indices)
   {
     graph::Lookup* lookup = ext_context.lookups.get(lookup_index);
     if (!lookup->split_subtables_if_needed (ext_context, lookup_index))
@@ -114,11 +119,15 @@ bool _promote_extensions_if_needed (graph::gsubgpos_graph_context_t& ext_context
   // TODO(grieger): skip this for the 24 bit case.
   if (!ext_context.lookups) return true;
 
+  unsigned total_lookup_table_sizes = 0;
   hb_vector_t<lookup_size_t> lookup_sizes;
   lookup_sizes.alloc (ext_context.lookups.get_population (), true);
 
   for (unsigned lookup_index : ext_context.lookups.keys ())
   {
+    const auto& lookup_v = ext_context.graph.vertices_[lookup_index];
+    total_lookup_table_sizes += lookup_v.table_size ();
+
     const graph::Lookup* lookup = ext_context.lookups.get(lookup_index);
     hb_set_t visited;
     lookup_sizes.push (lookup_size_t {
@@ -131,14 +140,16 @@ bool _promote_extensions_if_needed (graph::gsubgpos_graph_context_t& ext_context
   lookup_sizes.qsort ();
 
   size_t lookup_list_size = ext_context.graph.vertices_[ext_context.lookup_list_index].table_size ();
-  size_t l2_l3_size = lookup_list_size; // Lookup List + Lookups
-  size_t l3_l4_size = 0; // Lookups + SubTables
+  size_t l2_l3_size = lookup_list_size + total_lookup_table_sizes; // Lookup List + Lookups
+  size_t l3_l4_size = total_lookup_table_sizes; // Lookups + SubTables
   size_t l4_plus_size = 0; // SubTables + their descendants
 
   // Start by assuming all lookups are using extension subtables, this size will be removed later
   // if it's decided to not make a lookup extension.
   for (auto p : lookup_sizes)
   {
+    // TODO(garretrieger): this overestimates the extension subtables size because some extension subtables may be
+    //                     reused. However, we can't correct this until we have connected component analysis in place.
     unsigned subtables_size = p.num_subtables * 8;
     l3_l4_size += subtables_size;
     l4_plus_size += subtables_size;
@@ -159,8 +170,7 @@ bool _promote_extensions_if_needed (graph::gsubgpos_graph_context_t& ext_context
       size_t subtables_size = ext_context.graph.find_subgraph_size (p.lookup_index, visited, 1) - lookup_size;
       size_t remaining_size = p.size - subtables_size - lookup_size;
 
-      l2_l3_size   += lookup_size;
-      l3_l4_size   += lookup_size + subtables_size;
+      l3_l4_size   += subtables_size;
       l3_l4_size   -= p.num_subtables * 8;
       l4_plus_size += subtables_size + remaining_size;
 

+ 1 - 1
thirdparty/harfbuzz/src/hb-sanitize.hh

@@ -453,7 +453,7 @@ struct hb_sanitize_context_t :
 	edit_count = 0;
 	sane = t->sanitize (this);
 	if (edit_count) {
-	  DEBUG_MSG_FUNC (SANITIZE, start, "requested %u edits in second round; FAILLING", edit_count);
+	  DEBUG_MSG_FUNC (SANITIZE, start, "requested %u edits in second round; FAILING", edit_count);
 	  sane = false;
 	}
       }

+ 12 - 12
thirdparty/harfbuzz/src/hb-set.cc

@@ -200,7 +200,7 @@ hb_set_copy (const hb_set_t *set)
 void
 hb_set_clear (hb_set_t *set)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->clear ();
 }
 
@@ -251,7 +251,7 @@ void
 hb_set_add (hb_set_t       *set,
 	    hb_codepoint_t  codepoint)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->add (codepoint);
 }
 
@@ -272,7 +272,7 @@ hb_set_add_sorted_array (hb_set_t             *set,
 		         const hb_codepoint_t *sorted_codepoints,
 		         unsigned int          num_codepoints)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->add_sorted_array (sorted_codepoints,
 		         num_codepoints,
 		         sizeof(hb_codepoint_t));
@@ -294,7 +294,7 @@ hb_set_add_range (hb_set_t       *set,
 		  hb_codepoint_t  first,
 		  hb_codepoint_t  last)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->add_range (first, last);
 }
 
@@ -311,7 +311,7 @@ void
 hb_set_del (hb_set_t       *set,
 	    hb_codepoint_t  codepoint)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->del (codepoint);
 }
 
@@ -334,7 +334,7 @@ hb_set_del_range (hb_set_t       *set,
 		  hb_codepoint_t  first,
 		  hb_codepoint_t  last)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->del_range (first, last);
 }
 
@@ -405,7 +405,7 @@ void
 hb_set_set (hb_set_t       *set,
 	    const hb_set_t *other)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->set (*other);
 }
 
@@ -422,7 +422,7 @@ void
 hb_set_union (hb_set_t       *set,
 	      const hb_set_t *other)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->union_ (*other);
 }
 
@@ -439,7 +439,7 @@ void
 hb_set_intersect (hb_set_t       *set,
 		  const hb_set_t *other)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->intersect (*other);
 }
 
@@ -456,7 +456,7 @@ void
 hb_set_subtract (hb_set_t       *set,
 		 const hb_set_t *other)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->subtract (*other);
 }
 
@@ -474,7 +474,7 @@ void
 hb_set_symmetric_difference (hb_set_t       *set,
 			     const hb_set_t *other)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->symmetric_difference (*other);
 }
 
@@ -489,7 +489,7 @@ hb_set_symmetric_difference (hb_set_t       *set,
 void
 hb_set_invert (hb_set_t *set)
 {
-  /* Immutible-safe. */
+  /* Immutable-safe. */
   set->invert ();
 }
 

+ 13 - 8
thirdparty/harfbuzz/src/hb-subset-input.cc

@@ -74,8 +74,6 @@ hb_subset_input_t::hb_subset_input_t ()
     HB_TAG ('p', 'r', 'e', 'p'),
     HB_TAG ('V', 'D', 'M', 'X'),
     HB_TAG ('D', 'S', 'I', 'G'),
-    HB_TAG ('M', 'V', 'A', 'R'),
-    HB_TAG ('c', 'v', 'a', 'r'),
   };
   sets.no_subset_tables->add_array (default_no_subset_tables,
 					 ARRAY_LENGTH (default_no_subset_tables));
@@ -479,16 +477,21 @@ hb_subset_input_pin_axis_location (hb_subset_input_t  *input,
  * @axis_tag: Tag of the axis
  * @axis_min_value: Minimum value of the axis variation range to set
  * @axis_max_value: Maximum value of the axis variation range to set
+ * @axis_def_value: Default value of the axis variation range to set, in case of
+ * null, it'll be determined automatically
  *
  * Restricting the range of variation on an axis in the given subset input object.
- * New min/max values will be clamped if they're not within the fvar axis range.
+ * New min/default/max values will be clamped if they're not within the fvar axis range.
+ * If the new default value is null:
+ * If the fvar axis default value is within the new range, then new default
+ * value is the same as original default value.
  * If the fvar axis default value is not within the new range, the new default
  * value will be changed to the new min or max value, whichever is closer to the fvar
  * axis default.
  *
- * Note: input min value can not be bigger than input max value
- * Note: currently this API does not support changing axis limits yet.It'd be only
- * used internally for setting axis limits in the internal data structures
+ * Note: input min value can not be bigger than input max value. If the input
+ * default value is not within the new min/max range, it'll be clamped.
+ * Note: currently it supports gvar and cvar tables only.
  *
  * Return value: `true` if success, `false` otherwise
  *
@@ -499,7 +502,8 @@ hb_subset_input_set_axis_range (hb_subset_input_t  *input,
                                 hb_face_t          *face,
                                 hb_tag_t            axis_tag,
                                 float               axis_min_value,
-                                float               axis_max_value)
+                                float               axis_max_value,
+                                float              *axis_def_value /* IN, maybe NULL */)
 {
   if (axis_min_value > axis_max_value)
     return false;
@@ -510,7 +514,8 @@ hb_subset_input_set_axis_range (hb_subset_input_t  *input,
 
   float new_min_val = hb_clamp(axis_min_value, axis_info.min_value, axis_info.max_value);
   float new_max_val = hb_clamp(axis_max_value, axis_info.min_value, axis_info.max_value);
-  float new_default_val = hb_clamp(axis_info.default_value, new_min_val, new_max_val);
+  float new_default_val = axis_def_value ? *axis_def_value : axis_info.default_value;
+  new_default_val = hb_clamp(new_default_val, new_min_val, new_max_val);
   return input->axes_location.set (axis_tag, Triple (new_min_val, new_default_val, new_max_val));
 }
 #endif

+ 6 - 1
thirdparty/harfbuzz/src/hb-subset-plan-member-list.hh

@@ -92,7 +92,7 @@ HB_SUBSET_PLAN_MEMBER (hb_map_t, colrv1_layers)
 HB_SUBSET_PLAN_MEMBER (hb_map_t, colr_palettes)
 
 //Old layout item variation index -> (New varidx, delta) mapping
-HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb_pair_t E(<unsigned, int>)>), layout_variation_idx_delta_map)
+HB_SUBSET_PLAN_MEMBER (mutable hb_hashmap_t E(<unsigned, hb_pair_t E(<unsigned, int>)>), layout_variation_idx_delta_map)
 
 //gdef varstore retained varidx mapping
 HB_SUBSET_PLAN_MEMBER (hb_vector_t<hb_inc_bimap_t>, gdef_varstore_inner_maps)
@@ -113,6 +113,8 @@ HB_SUBSET_PLAN_MEMBER (hb_map_t, axes_index_map)
 
 //axis_index->axis_tag mapping in fvar axis array
 HB_SUBSET_PLAN_MEMBER (hb_map_t, axes_old_index_tag_map)
+//vector of retained axis tags in the order of axes given in the 'fvar' table
+HB_SUBSET_PLAN_MEMBER (hb_vector_t<hb_tag_t>, axis_tags)
 
 //hmtx metrics map: new gid->(advance, lsb)
 HB_SUBSET_PLAN_MEMBER (mutable hb_hashmap_t E(<hb_codepoint_t, hb_pair_t E(<unsigned, int>)>), hmtx_map)
@@ -123,6 +125,9 @@ HB_SUBSET_PLAN_MEMBER (mutable hb_vector_t<unsigned>, bounds_width_vec)
 //boundsHeight map: new gid->boundsHeight, boundsHeight=yMax - yMin
 HB_SUBSET_PLAN_MEMBER (mutable hb_vector_t<unsigned>, bounds_height_vec)
 
+//map: new_gid -> contour points vector
+HB_SUBSET_PLAN_MEMBER (mutable hb_hashmap_t E(<hb_codepoint_t, contour_point_vector_t>), new_gid_contour_points_map)
+
 #ifdef HB_EXPERIMENTAL_API
 // name table overrides map: hb_ot_name_record_ids_t-> name string new value or
 // None to indicate should remove

+ 39 - 20
thirdparty/harfbuzz/src/hb-subset-plan.cc

@@ -399,34 +399,20 @@ _collect_layout_variation_indices (hb_subset_plan_t* plan)
     return;
   }
 
-  const OT::VariationStore *var_store = nullptr;
   hb_set_t varidx_set;
-  float *store_cache = nullptr;
-  bool collect_delta = plan->pinned_at_default ? false : true;
-  if (collect_delta)
-  {
-    if (gdef->has_var_store ())
-    {
-      var_store = &(gdef->get_var_store ());
-      store_cache = var_store->create_cache ();
-    }
-  }
-
   OT::hb_collect_variation_indices_context_t c (&varidx_set,
-                                                &plan->layout_variation_idx_delta_map,
-                                                plan->normalized_coords ? &(plan->normalized_coords) : nullptr,
-                                                var_store,
                                                 &plan->_glyphset_gsub,
-                                                &plan->gpos_lookups,
-                                                store_cache);
+                                                &plan->gpos_lookups);
   gdef->collect_variation_indices (&c);
 
   if (hb_ot_layout_has_positioning (plan->source))
     gpos->collect_variation_indices (&c);
 
-  var_store->destroy_cache (store_cache);
-
-  gdef->remap_layout_variation_indices (&varidx_set, &plan->layout_variation_idx_delta_map);
+  gdef->remap_layout_variation_indices (&varidx_set,
+                                        plan->normalized_coords,
+                                        !plan->pinned_at_default,
+                                        plan->all_axes_pinned,
+                                        &plan->layout_variation_idx_delta_map);
 
   unsigned subtable_count = gdef->has_var_store () ? gdef->get_var_store ().get_sub_table_count () : 0;
   _generate_varstore_inner_maps (varidx_set, subtable_count, plan->gdef_varstore_inner_maps);
@@ -927,6 +913,7 @@ _normalize_axes_location (hb_face_t *face, hb_subset_plan_t *plan)
     {
       axis_not_pinned = true;
       plan->axes_index_map.set (old_axis_idx, new_axis_idx);
+      plan->axis_tags.push (axis_tag);
       new_axis_idx++;
     }
 
@@ -1045,6 +1032,36 @@ _update_instance_metrics_map_from_cff2 (hb_subset_plan_t *plan)
   if (vvar_store_cache)
     _vmtx.var_table->get_var_store ().destroy_cache (vvar_store_cache);
 }
+
+static bool
+_get_instance_glyphs_contour_points (hb_subset_plan_t *plan)
+{
+  /* contour_points vector only needed for updating gvar table (infer delta)
+   * during partial instancing */
+  if (plan->user_axes_location.is_empty () || plan->all_axes_pinned)
+    return true;
+
+  OT::glyf_accelerator_t glyf (plan->source);
+
+  for (auto &_ : plan->new_to_old_gid_list)
+  {
+    hb_codepoint_t new_gid = _.first;
+    contour_point_vector_t all_points;
+    if (new_gid == 0 && !(plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE))
+    {
+      if (unlikely (!plan->new_gid_contour_points_map.set (new_gid, all_points)))
+        return false;
+      continue;
+    }
+
+    hb_codepoint_t old_gid = _.second;
+    if (unlikely (!glyf.glyph_for_gid (old_gid).get_all_points_without_var (plan->source, all_points)))
+      return false;
+    if (unlikely (!plan->new_gid_contour_points_map.set (new_gid, all_points)))
+      return false;
+  }
+  return true;
+}
 #endif
 
 hb_subset_plan_t::hb_subset_plan_t (hb_face_t *face,
@@ -1148,6 +1165,8 @@ hb_subset_plan_t::hb_subset_plan_t (hb_face_t *face,
 
 #ifndef HB_NO_VAR
   _update_instance_metrics_map_from_cff2 (this);
+  if (!check_success (_get_instance_glyphs_contour_points (this)))
+      return;
 #endif
 
   if (attach_accelerator_data)

+ 34 - 0
thirdparty/harfbuzz/src/hb-subset-plan.hh

@@ -67,6 +67,40 @@ struct head_maxp_info_t
 
 typedef struct head_maxp_info_t head_maxp_info_t;
 
+struct contour_point_t
+{
+  void init (float x_ = 0.f, float y_ = 0.f, bool is_end_point_ = false)
+  { flag = 0; x = x_; y = y_; is_end_point = is_end_point_; }
+
+  void transform (const float (&matrix)[4])
+  {
+    float x_ = x * matrix[0] + y * matrix[2];
+	  y  = x * matrix[1] + y * matrix[3];
+    x  = x_;
+  }
+  HB_ALWAYS_INLINE
+  void translate (const contour_point_t &p) { x += p.x; y += p.y; }
+
+
+  float x;
+  float y;
+  uint8_t flag;
+  bool is_end_point;
+};
+
+struct contour_point_vector_t : hb_vector_t<contour_point_t>
+{
+  void extend (const hb_array_t<contour_point_t> &a)
+  {
+    unsigned int old_len = length;
+    if (unlikely (!resize (old_len + a.length, false)))
+      return;
+    auto arrayZ = this->arrayZ + old_len;
+    unsigned count = a.length;
+    hb_memcpy (arrayZ, a.arrayZ, count * sizeof (arrayZ[0]));
+  }
+};
+
 namespace OT {
   struct cff1_subset_accelerator_t;
   struct cff2_subset_accelerator_t;

+ 13 - 0
thirdparty/harfbuzz/src/hb-subset.cc

@@ -55,6 +55,7 @@
 #include "hb-ot-var-fvar-table.hh"
 #include "hb-ot-var-gvar-table.hh"
 #include "hb-ot-var-hvar-table.hh"
+#include "hb-ot-var-mvar-table.hh"
 #include "hb-ot-math-table.hh"
 #include "hb-ot-stat-table.hh"
 #include "hb-repacker.hh"
@@ -460,6 +461,8 @@ _dependencies_satisfied (hb_subset_plan_t *plan, hb_tag_t tag,
   case HB_OT_TAG_vmtx:
   case HB_OT_TAG_maxp:
     return !plan->normalized_coords || !pending_subset_tags.has (HB_OT_TAG_glyf);
+  case HB_OT_TAG_GPOS:
+    return !plan->normalized_coords || plan->all_axes_pinned || !pending_subset_tags.has (HB_OT_TAG_GDEF);
   default:
     return true;
   }
@@ -514,12 +517,22 @@ _subset_table (hb_subset_plan_t *plan,
   case HB_OT_TAG_HVAR: return _subset<const OT::HVAR> (plan, buf);
   case HB_OT_TAG_VVAR: return _subset<const OT::VVAR> (plan, buf);
 #endif
+
+#ifndef HB_NO_VAR
   case HB_OT_TAG_fvar:
     if (plan->user_axes_location.is_empty ()) return _passthrough (plan, tag);
     return _subset<const OT::fvar> (plan, buf);
   case HB_OT_TAG_avar:
     if (plan->user_axes_location.is_empty ()) return _passthrough (plan, tag);
     return _subset<const OT::avar> (plan, buf);
+  case HB_OT_TAG_cvar:
+    if (plan->user_axes_location.is_empty ()) return _passthrough (plan, tag);
+    return _subset<const OT::cvar> (plan, buf);
+  case HB_OT_TAG_MVAR:
+    if (plan->user_axes_location.is_empty ()) return _passthrough (plan, tag);
+    return _subset<const OT::MVAR> (plan, buf);
+#endif
+
   case HB_OT_TAG_STAT:
     if (!plan->user_axes_location.is_empty ()) return _subset<const OT::STAT> (plan, buf);
     else return _passthrough (plan, tag);

+ 2 - 1
thirdparty/harfbuzz/src/hb-subset.h

@@ -181,7 +181,8 @@ hb_subset_input_set_axis_range (hb_subset_input_t  *input,
 				hb_face_t          *face,
 				hb_tag_t            axis_tag,
 				float               axis_min_value,
-				float               axis_max_value);
+				float               axis_max_value,
+				float              *axis_def_value);
 
 HB_EXTERN hb_bool_t
 hb_subset_input_override_name_table (hb_subset_input_t  *input,

File diff suppressed because it is too large
+ 863 - 861
thirdparty/harfbuzz/src/hb-ucd-table.hh


+ 3 - 3
thirdparty/harfbuzz/src/hb-unicode-emoji-table.hh

@@ -7,13 +7,13 @@
  * on file with this header:
  *
  * # emoji-data.txt
- * # Date: 2022-08-02, 00:26:10 GMT
- * # © 2022 Unicode®, Inc.
+ * # Date: 2023-02-01, 02:22:54 GMT
+ * # © 2023 Unicode®, Inc.
  * # Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries.
  * # For terms of use, see https://www.unicode.org/terms_of_use.html
  * #
  * # Emoji Data for UTS #51
- * # Used with Emoji Version 15.0 and subsequent minor revisions (if any)
+ * # Used with Emoji Version 15.1 and subsequent minor revisions (if any)
  * #
  * # For documentation and usage, see https://www.unicode.org/reports/tr51
  */

+ 4 - 22
thirdparty/harfbuzz/src/hb-vector.hh

@@ -102,7 +102,7 @@ struct hb_vector_t
 
   void fini ()
   {
-    /* We allow a hack to make the vector point to a foriegn array
+    /* We allow a hack to make the vector point to a foreign array
      * by the user. In that case length/arrayZ are non-zero but
      * allocated is zero. Don't free anything. */
     if (allocated)
@@ -208,25 +208,7 @@ struct hb_vector_t
       return std::addressof (Crap (Type));
     return std::addressof (arrayZ[length - 1]);
   }
-  template <typename T,
-	    typename T2 = Type,
-	    hb_enable_if (!std::is_copy_constructible<T2>::value &&
-			  std::is_copy_assignable<T>::value)>
-  Type *push (T&& v)
-  {
-    Type *p = push ();
-    if (p == std::addressof (Crap (Type)))
-      // If push failed to allocate then don't copy v, since this may cause
-      // the created copy to leak memory since we won't have stored a
-      // reference to it.
-      return p;
-    *p = std::forward<T> (v);
-    return p;
-  }
-  template <typename T,
-	    typename T2 = Type,
-	    hb_enable_if (std::is_copy_constructible<T2>::value)>
-  Type *push (T&& v)
+  template <typename... Args> Type *push (Args&&... args)
   {
     if (unlikely ((int) length >= allocated && !alloc (length + 1)))
       // If push failed to allocate then don't copy v, since this may cause
@@ -236,7 +218,7 @@ struct hb_vector_t
 
     /* Emplace. */
     Type *p = std::addressof (arrayZ[length++]);
-    return new (p) Type (std::forward<T> (v));
+    return new (p) Type (std::forward<Args> (args)...);
   }
 
   bool in_error () const { return allocated < 0; }
@@ -478,7 +460,7 @@ struct hb_vector_t
   Type pop ()
   {
     if (!length) return Null (Type);
-    Type v {std::move (arrayZ[length - 1])};
+    Type v (std::move (arrayZ[length - 1]));
     arrayZ[length - 1].~Type ();
     length--;
     return v;

+ 3 - 3
thirdparty/harfbuzz/src/hb-version.h

@@ -47,20 +47,20 @@ HB_BEGIN_DECLS
  *
  * The minor component of the library version available at compile-time.
  */
-#define HB_VERSION_MINOR 1
+#define HB_VERSION_MINOR 2
 /**
  * HB_VERSION_MICRO:
  *
  * The micro component of the library version available at compile-time.
  */
-#define HB_VERSION_MICRO 1
+#define HB_VERSION_MICRO 2
 
 /**
  * HB_VERSION_STRING:
  *
  * A string literal containing the library version available at compile-time.
  */
-#define HB_VERSION_STRING "8.1.1"
+#define HB_VERSION_STRING "8.2.2"
 
 /**
  * HB_VERSION_ATLEAST:

Some files were not shown because too many files changed in this diff