|
@@ -402,16 +402,6 @@ struct hb_ot_apply_context_t :
|
|
|
{
|
|
|
struct matcher_t
|
|
|
{
|
|
|
- matcher_t () :
|
|
|
- lookup_props (0),
|
|
|
- mask (-1),
|
|
|
- ignore_zwnj (false),
|
|
|
- ignore_zwj (false),
|
|
|
- per_syllable (false),
|
|
|
- syllable {0},
|
|
|
- match_func (nullptr),
|
|
|
- match_data (nullptr) {}
|
|
|
-
|
|
|
typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
|
|
|
|
|
|
void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
|
|
@@ -470,14 +460,14 @@ struct hb_ot_apply_context_t :
|
|
|
}
|
|
|
|
|
|
protected:
|
|
|
- unsigned int lookup_props;
|
|
|
- hb_mask_t mask;
|
|
|
- bool ignore_zwnj;
|
|
|
- bool ignore_zwj;
|
|
|
- bool per_syllable;
|
|
|
- uint8_t syllable;
|
|
|
- match_func_t match_func;
|
|
|
- const void *match_data;
|
|
|
+ unsigned int lookup_props = 0;
|
|
|
+ hb_mask_t mask = -1;
|
|
|
+ bool ignore_zwnj = false;
|
|
|
+ bool ignore_zwj = false;
|
|
|
+ bool per_syllable = false;
|
|
|
+ uint8_t syllable = 0;
|
|
|
+ match_func_t match_func = nullptr;
|
|
|
+ const void *match_data = nullptr;
|
|
|
};
|
|
|
|
|
|
struct skipping_iterator_t
|
|
@@ -528,11 +518,9 @@ struct hb_ot_apply_context_t :
|
|
|
#ifndef HB_OPTIMIZE_SIZE
|
|
|
HB_ALWAYS_INLINE
|
|
|
#endif
|
|
|
- void reset (unsigned int start_index_,
|
|
|
- unsigned int num_items_)
|
|
|
+ void reset (unsigned int start_index_)
|
|
|
{
|
|
|
idx = start_index_;
|
|
|
- num_items = num_items_;
|
|
|
end = c->buffer->len;
|
|
|
matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
|
|
|
}
|
|
@@ -540,17 +528,14 @@ struct hb_ot_apply_context_t :
|
|
|
#ifndef HB_OPTIMIZE_SIZE
|
|
|
HB_ALWAYS_INLINE
|
|
|
#endif
|
|
|
- void reset_fast (unsigned int start_index_,
|
|
|
- unsigned int num_items_)
|
|
|
+ void reset_fast (unsigned int start_index_)
|
|
|
{
|
|
|
// Doesn't set end or syllable. Used by GPOS which doesn't care / change.
|
|
|
idx = start_index_;
|
|
|
- num_items = num_items_;
|
|
|
}
|
|
|
|
|
|
void reject ()
|
|
|
{
|
|
|
- num_items++;
|
|
|
backup_glyph_data ();
|
|
|
}
|
|
|
|
|
@@ -593,12 +578,7 @@ struct hb_ot_apply_context_t :
|
|
|
#endif
|
|
|
bool next (unsigned *unsafe_to = nullptr)
|
|
|
{
|
|
|
- assert (num_items > 0);
|
|
|
- /* The alternate condition below is faster at string boundaries,
|
|
|
- * but produces subpar "unsafe-to-concat" values. */
|
|
|
- signed stop = (signed) end - (signed) num_items;
|
|
|
- if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
|
|
|
- stop = (signed) end - 1;
|
|
|
+ const signed stop = (signed) end - 1;
|
|
|
while ((signed) idx < stop)
|
|
|
{
|
|
|
idx++;
|
|
@@ -606,7 +586,6 @@ struct hb_ot_apply_context_t :
|
|
|
{
|
|
|
case MATCH:
|
|
|
{
|
|
|
- num_items--;
|
|
|
advance_glyph_data ();
|
|
|
return true;
|
|
|
}
|
|
@@ -629,12 +608,7 @@ struct hb_ot_apply_context_t :
|
|
|
#endif
|
|
|
bool prev (unsigned *unsafe_from = nullptr)
|
|
|
{
|
|
|
- assert (num_items > 0);
|
|
|
- /* The alternate condition below is faster at string boundaries,
|
|
|
- * but produces subpar "unsafe-to-concat" values. */
|
|
|
- unsigned stop = num_items - 1;
|
|
|
- if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
|
|
|
- stop = 1 - 1;
|
|
|
+ const unsigned stop = 0;
|
|
|
while (idx > stop)
|
|
|
{
|
|
|
idx--;
|
|
@@ -642,7 +616,6 @@ struct hb_ot_apply_context_t :
|
|
|
{
|
|
|
case MATCH:
|
|
|
{
|
|
|
- num_items--;
|
|
|
advance_glyph_data ();
|
|
|
return true;
|
|
|
}
|
|
@@ -661,6 +634,7 @@ struct hb_ot_apply_context_t :
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
+ HB_ALWAYS_INLINE
|
|
|
hb_codepoint_t
|
|
|
get_glyph_data ()
|
|
|
{
|
|
@@ -671,6 +645,7 @@ struct hb_ot_apply_context_t :
|
|
|
#endif
|
|
|
return 0;
|
|
|
}
|
|
|
+ HB_ALWAYS_INLINE
|
|
|
void
|
|
|
advance_glyph_data ()
|
|
|
{
|
|
@@ -699,7 +674,6 @@ struct hb_ot_apply_context_t :
|
|
|
const HBUINT24 *match_glyph_data24;
|
|
|
#endif
|
|
|
|
|
|
- unsigned int num_items;
|
|
|
unsigned int end;
|
|
|
};
|
|
|
|
|
@@ -826,7 +800,7 @@ struct hb_ot_apply_context_t :
|
|
|
* match_props has the set index.
|
|
|
*/
|
|
|
if (match_props & LookupFlag::UseMarkFilteringSet)
|
|
|
- return gdef.mark_set_covers (match_props >> 16, glyph);
|
|
|
+ return gdef_accel.mark_set_covers (match_props >> 16, glyph);
|
|
|
|
|
|
/* The second byte of match_props has the meaning
|
|
|
* "ignore marks of attachment type different than
|
|
@@ -1198,6 +1172,10 @@ static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
|
|
|
}
|
|
|
|
|
|
|
|
|
+static inline bool match_always (hb_glyph_info_t &info HB_UNUSED, unsigned value HB_UNUSED, const void *data HB_UNUSED)
|
|
|
+{
|
|
|
+ return true;
|
|
|
+}
|
|
|
static inline bool match_glyph (hb_glyph_info_t &info, unsigned value, const void *data HB_UNUSED)
|
|
|
{
|
|
|
return info.codepoint == value;
|
|
@@ -1218,6 +1196,28 @@ static inline bool match_class_cached (hb_glyph_info_t &info, unsigned value, co
|
|
|
info.syllable() = klass;
|
|
|
return klass == value;
|
|
|
}
|
|
|
+static inline bool match_class_cached1 (hb_glyph_info_t &info, unsigned value, const void *data)
|
|
|
+{
|
|
|
+ unsigned klass = info.syllable() & 0x0F;
|
|
|
+ if (klass < 15)
|
|
|
+ return klass == value;
|
|
|
+ const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
|
|
|
+ klass = class_def.get_class (info.codepoint);
|
|
|
+ if (likely (klass < 15))
|
|
|
+ info.syllable() = (info.syllable() & 0xF0) | klass;
|
|
|
+ return klass == value;
|
|
|
+}
|
|
|
+static inline bool match_class_cached2 (hb_glyph_info_t &info, unsigned value, const void *data)
|
|
|
+{
|
|
|
+ unsigned klass = (info.syllable() & 0xF0) >> 4;
|
|
|
+ if (klass < 15)
|
|
|
+ return klass == value;
|
|
|
+ const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
|
|
|
+ klass = class_def.get_class (info.codepoint);
|
|
|
+ if (likely (klass < 15))
|
|
|
+ info.syllable() = (info.syllable() & 0x0F) | (klass << 4);
|
|
|
+ return klass == value;
|
|
|
+}
|
|
|
static inline bool match_coverage (hb_glyph_info_t &info, unsigned value, const void *data)
|
|
|
{
|
|
|
Offset16To<Coverage> coverage;
|
|
@@ -1265,7 +1265,7 @@ static bool match_input (hb_ot_apply_context_t *c,
|
|
|
hb_buffer_t *buffer = c->buffer;
|
|
|
|
|
|
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
|
|
- skippy_iter.reset (buffer->idx, count - 1);
|
|
|
+ skippy_iter.reset (buffer->idx);
|
|
|
skippy_iter.set_match_func (match_func, match_data);
|
|
|
skippy_iter.set_glyph_data (input);
|
|
|
|
|
@@ -1505,7 +1505,7 @@ static bool match_backtrack (hb_ot_apply_context_t *c,
|
|
|
TRACE_APPLY (nullptr);
|
|
|
|
|
|
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
|
|
|
- skippy_iter.reset (c->buffer->backtrack_len (), count);
|
|
|
+ skippy_iter.reset (c->buffer->backtrack_len ());
|
|
|
skippy_iter.set_match_func (match_func, match_data);
|
|
|
skippy_iter.set_glyph_data (backtrack);
|
|
|
|
|
@@ -1538,7 +1538,7 @@ static bool match_lookahead (hb_ot_apply_context_t *c,
|
|
|
TRACE_APPLY (nullptr);
|
|
|
|
|
|
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
|
|
|
- skippy_iter.reset (start_index - 1, count);
|
|
|
+ skippy_iter.reset (start_index - 1);
|
|
|
skippy_iter.set_match_func (match_func, match_data);
|
|
|
skippy_iter.set_glyph_data (lookahead);
|
|
|
|
|
@@ -1913,12 +1913,13 @@ static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
|
|
|
}
|
|
|
|
|
|
template <typename HBUINT>
|
|
|
-static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
|
|
|
- unsigned int inputCount, /* Including the first glyph (not matched) */
|
|
|
- const HBUINT input[], /* Array of input values--start with second glyph */
|
|
|
- unsigned int lookupCount,
|
|
|
- const LookupRecord lookupRecord[],
|
|
|
- const ContextApplyLookupContext &lookup_context)
|
|
|
+HB_ALWAYS_INLINE
|
|
|
+static bool context_apply_lookup (hb_ot_apply_context_t *c,
|
|
|
+ unsigned int inputCount, /* Including the first glyph (not matched) */
|
|
|
+ const HBUINT input[], /* Array of input values--start with second glyph */
|
|
|
+ unsigned int lookupCount,
|
|
|
+ const LookupRecord lookupRecord[],
|
|
|
+ const ContextApplyLookupContext &lookup_context)
|
|
|
{
|
|
|
unsigned match_end = 0;
|
|
|
unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
|
|
@@ -1944,6 +1945,9 @@ static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
|
|
|
template <typename Types>
|
|
|
struct Rule
|
|
|
{
|
|
|
+ template <typename T>
|
|
|
+ friend struct RuleSet;
|
|
|
+
|
|
|
bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
|
|
|
{
|
|
|
return context_intersects (glyphs,
|
|
@@ -2065,7 +2069,6 @@ struct Rule
|
|
|
* design order */
|
|
|
public:
|
|
|
DEFINE_SIZE_ARRAY (4, inputZ);
|
|
|
- DEFINE_SIZE_MAX (65536 * (Types::HBUINT::static_size + LookupRecord::static_size));
|
|
|
};
|
|
|
|
|
|
template <typename Types>
|
|
@@ -2131,13 +2134,105 @@ struct RuleSet
|
|
|
const ContextApplyLookupContext &lookup_context) const
|
|
|
{
|
|
|
TRACE_APPLY (this);
|
|
|
- return_trace (
|
|
|
- + hb_iter (rule)
|
|
|
- | hb_map (hb_add (this))
|
|
|
- | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
|
|
|
- | hb_any
|
|
|
- )
|
|
|
- ;
|
|
|
+
|
|
|
+ unsigned num_rules = rule.len;
|
|
|
+
|
|
|
+#ifndef HB_NO_OT_RULESETS_FAST_PATH
|
|
|
+ if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
|
|
|
+#endif
|
|
|
+ {
|
|
|
+ slow:
|
|
|
+ return_trace (
|
|
|
+ + hb_iter (rule)
|
|
|
+ | hb_map (hb_add (this))
|
|
|
+ | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
|
|
|
+ | hb_any
|
|
|
+ )
|
|
|
+ ;
|
|
|
+ }
|
|
|
+
|
|
|
+ /* This version is optimized for speed by matching the first & second
|
|
|
+ * components of the rule here, instead of calling into the matching code.
|
|
|
+ *
|
|
|
+ * Replicated from LigatureSet::apply(). */
|
|
|
+
|
|
|
+ hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
|
|
+ skippy_iter.reset (c->buffer->idx);
|
|
|
+ skippy_iter.set_match_func (match_always, nullptr);
|
|
|
+ skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
|
|
|
+ unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
|
|
|
+ hb_glyph_info_t *first = nullptr, *second = nullptr;
|
|
|
+ bool matched = skippy_iter.next ();
|
|
|
+ if (likely (matched))
|
|
|
+ {
|
|
|
+ first = &c->buffer->info[skippy_iter.idx];
|
|
|
+ unsafe_to = skippy_iter.idx + 1;
|
|
|
+
|
|
|
+ if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
|
|
|
+ {
|
|
|
+ /* Can't use the fast path if eg. the next char is a default-ignorable
|
|
|
+ * or other skippable. */
|
|
|
+ goto slow;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ /* Failed to match a next glyph. Only try applying rules that have
|
|
|
+ * no further input. */
|
|
|
+ return_trace (
|
|
|
+ + hb_iter (rule)
|
|
|
+ | hb_map (hb_add (this))
|
|
|
+ | hb_filter ([&] (const Rule &_) { return _.inputCount <= 1; })
|
|
|
+ | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
|
|
|
+ | hb_any
|
|
|
+ )
|
|
|
+ ;
|
|
|
+ }
|
|
|
+ matched = skippy_iter.next ();
|
|
|
+ if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])))
|
|
|
+ {
|
|
|
+ second = &c->buffer->info[skippy_iter.idx];
|
|
|
+ unsafe_to2 = skippy_iter.idx + 1;
|
|
|
+ }
|
|
|
+
|
|
|
+ auto match_input = lookup_context.funcs.match;
|
|
|
+ auto *input_data = lookup_context.match_data;
|
|
|
+ for (unsigned int i = 0; i < num_rules; i++)
|
|
|
+ {
|
|
|
+ const auto &r = this+rule.arrayZ[i];
|
|
|
+
|
|
|
+ const auto &input = r.inputZ;
|
|
|
+
|
|
|
+ if (r.inputCount <= 1 ||
|
|
|
+ (!match_input ||
|
|
|
+ match_input (*first, input.arrayZ[0], input_data)))
|
|
|
+ {
|
|
|
+ if (!second ||
|
|
|
+ (r.inputCount <= 2 ||
|
|
|
+ (!match_input ||
|
|
|
+ match_input (*second, input.arrayZ[1], input_data)))
|
|
|
+ )
|
|
|
+ {
|
|
|
+ if (r.apply (c, lookup_context))
|
|
|
+ {
|
|
|
+ if (unsafe_to != (unsigned) -1)
|
|
|
+ c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
|
|
|
+ return_trace (true);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else
|
|
|
+ unsafe_to = unsafe_to2;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ if (unsafe_to == (unsigned) -1)
|
|
|
+ unsafe_to = unsafe_to1;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ if (likely (unsafe_to != (unsigned) -1))
|
|
|
+ c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
|
|
|
+
|
|
|
+ return_trace (false);
|
|
|
}
|
|
|
|
|
|
bool subset (hb_subset_context_t *c,
|
|
@@ -2516,11 +2611,7 @@ struct ContextFormat2_5
|
|
|
if (cached && c->buffer->cur().syllable() < 255)
|
|
|
index = c->buffer->cur().syllable ();
|
|
|
else
|
|
|
- {
|
|
|
index = class_def.get_class (c->buffer->cur().codepoint);
|
|
|
- if (cached && index < 255)
|
|
|
- c->buffer->cur().syllable() = index;
|
|
|
- }
|
|
|
const RuleSet &rule_set = this+ruleSet[index];
|
|
|
return_trace (rule_set.apply (c, lookup_context));
|
|
|
}
|
|
@@ -2914,16 +3005,17 @@ static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c
|
|
|
}
|
|
|
|
|
|
template <typename HBUINT>
|
|
|
-static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
|
|
|
- unsigned int backtrackCount,
|
|
|
- const HBUINT backtrack[],
|
|
|
- unsigned int inputCount, /* Including the first glyph (not matched) */
|
|
|
- const HBUINT input[], /* Array of input values--start with second glyph */
|
|
|
- unsigned int lookaheadCount,
|
|
|
- const HBUINT lookahead[],
|
|
|
- unsigned int lookupCount,
|
|
|
- const LookupRecord lookupRecord[],
|
|
|
- const ChainContextApplyLookupContext &lookup_context)
|
|
|
+HB_ALWAYS_INLINE
|
|
|
+static bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
|
|
|
+ unsigned int backtrackCount,
|
|
|
+ const HBUINT backtrack[],
|
|
|
+ unsigned int inputCount, /* Including the first glyph (not matched) */
|
|
|
+ const HBUINT input[], /* Array of input values--start with second glyph */
|
|
|
+ unsigned int lookaheadCount,
|
|
|
+ const HBUINT lookahead[],
|
|
|
+ unsigned int lookupCount,
|
|
|
+ const LookupRecord lookupRecord[],
|
|
|
+ const ChainContextApplyLookupContext &lookup_context)
|
|
|
{
|
|
|
unsigned end_index = c->buffer->idx;
|
|
|
unsigned match_end = 0;
|
|
@@ -2962,6 +3054,9 @@ static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
|
|
|
template <typename Types>
|
|
|
struct ChainRule
|
|
|
{
|
|
|
+ template <typename T>
|
|
|
+ friend struct ChainRuleSet;
|
|
|
+
|
|
|
bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
|
|
|
{
|
|
|
const auto &input = StructAfter<decltype (inputX)> (backtrack);
|
|
@@ -3148,7 +3243,6 @@ struct ChainRule
|
|
|
* design order) */
|
|
|
public:
|
|
|
DEFINE_SIZE_MIN (8);
|
|
|
- DEFINE_SIZE_MAX (65536 * (3 * Types::HBUINT::static_size + LookupRecord::static_size));
|
|
|
};
|
|
|
|
|
|
template <typename Types>
|
|
@@ -3211,13 +3305,119 @@ struct ChainRuleSet
|
|
|
const ChainContextApplyLookupContext &lookup_context) const
|
|
|
{
|
|
|
TRACE_APPLY (this);
|
|
|
- return_trace (
|
|
|
- + hb_iter (rule)
|
|
|
- | hb_map (hb_add (this))
|
|
|
- | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
|
|
|
- | hb_any
|
|
|
- )
|
|
|
- ;
|
|
|
+
|
|
|
+ unsigned num_rules = rule.len;
|
|
|
+
|
|
|
+#ifndef HB_NO_OT_RULESETS_FAST_PATH
|
|
|
+ if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
|
|
|
+#endif
|
|
|
+ {
|
|
|
+ slow:
|
|
|
+ return_trace (
|
|
|
+ + hb_iter (rule)
|
|
|
+ | hb_map (hb_add (this))
|
|
|
+ | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
|
|
|
+ | hb_any
|
|
|
+ )
|
|
|
+ ;
|
|
|
+ }
|
|
|
+
|
|
|
+ /* This version is optimized for speed by matching the first & second
|
|
|
+ * components of the rule here, instead of calling into the matching code.
|
|
|
+ *
|
|
|
+ * Replicated from LigatureSet::apply(). */
|
|
|
+
|
|
|
+ hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
|
|
+ skippy_iter.reset (c->buffer->idx);
|
|
|
+ skippy_iter.set_match_func (match_always, nullptr);
|
|
|
+ skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
|
|
|
+ unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
|
|
|
+ hb_glyph_info_t *first = nullptr, *second = nullptr;
|
|
|
+ bool matched = skippy_iter.next ();
|
|
|
+ if (likely (matched))
|
|
|
+ {
|
|
|
+ first = &c->buffer->info[skippy_iter.idx];
|
|
|
+ unsafe_to1 = skippy_iter.idx + 1;
|
|
|
+
|
|
|
+ if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
|
|
|
+ {
|
|
|
+ /* Can't use the fast path if eg. the next char is a default-ignorable
|
|
|
+ * or other skippable. */
|
|
|
+ goto slow;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ /* Failed to match a next glyph. Only try applying rules that have
|
|
|
+ * no further input and lookahead. */
|
|
|
+ return_trace (
|
|
|
+ + hb_iter (rule)
|
|
|
+ | hb_map (hb_add (this))
|
|
|
+ | hb_filter ([&] (const ChainRule &_)
|
|
|
+ {
|
|
|
+ const auto &input = StructAfter<decltype (_.inputX)> (_.backtrack);
|
|
|
+ const auto &lookahead = StructAfter<decltype (_.lookaheadX)> (input);
|
|
|
+ return input.lenP1 <= 1 && lookahead.len == 0;
|
|
|
+ })
|
|
|
+ | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
|
|
|
+ | hb_any
|
|
|
+ )
|
|
|
+ ;
|
|
|
+ }
|
|
|
+ matched = skippy_iter.next ();
|
|
|
+ if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])))
|
|
|
+ {
|
|
|
+ second = &c->buffer->info[skippy_iter.idx];
|
|
|
+ unsafe_to2 = skippy_iter.idx + 1;
|
|
|
+ }
|
|
|
+
|
|
|
+ auto match_input = lookup_context.funcs.match[1];
|
|
|
+ auto match_lookahead = lookup_context.funcs.match[2];
|
|
|
+ auto *input_data = lookup_context.match_data[1];
|
|
|
+ auto *lookahead_data = lookup_context.match_data[2];
|
|
|
+ for (unsigned int i = 0; i < num_rules; i++)
|
|
|
+ {
|
|
|
+ const auto &r = this+rule.arrayZ[i];
|
|
|
+
|
|
|
+ const auto &input = StructAfter<decltype (r.inputX)> (r.backtrack);
|
|
|
+ const auto &lookahead = StructAfter<decltype (r.lookaheadX)> (input);
|
|
|
+
|
|
|
+ unsigned lenP1 = hb_max ((unsigned) input.lenP1, 1u);
|
|
|
+ if (lenP1 > 1 ?
|
|
|
+ (!match_input ||
|
|
|
+ match_input (*first, input.arrayZ[0], input_data))
|
|
|
+ :
|
|
|
+ (!lookahead.len || !match_lookahead ||
|
|
|
+ match_lookahead (*first, lookahead.arrayZ[0], lookahead_data)))
|
|
|
+ {
|
|
|
+ if (!second ||
|
|
|
+ (lenP1 > 2 ?
|
|
|
+ (!match_input ||
|
|
|
+ match_input (*second, input.arrayZ[1], input_data))
|
|
|
+ :
|
|
|
+ (lookahead.len <= 2 - lenP1 || !match_lookahead ||
|
|
|
+ match_lookahead (*second, lookahead.arrayZ[2 - lenP1], lookahead_data))))
|
|
|
+ {
|
|
|
+ if (r.apply (c, lookup_context))
|
|
|
+ {
|
|
|
+ if (unsafe_to != (unsigned) -1)
|
|
|
+ c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
|
|
|
+ return_trace (true);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else
|
|
|
+ unsafe_to = unsafe_to2;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ if (unsafe_to == (unsigned) -1)
|
|
|
+ unsafe_to = unsafe_to1;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ if (likely (unsafe_to != (unsigned) -1))
|
|
|
+ c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
|
|
|
+
|
|
|
+ return_trace (false);
|
|
|
}
|
|
|
|
|
|
bool subset (hb_subset_context_t *c,
|
|
@@ -3616,26 +3816,22 @@ struct ChainContextFormat2_5
|
|
|
const ClassDef &input_class_def = this+inputClassDef;
|
|
|
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
|
|
|
|
|
|
- /* For ChainContextFormat2_5 we cache the LookaheadClassDef instead of InputClassDef.
|
|
|
- * The reason is that most heavy fonts want to identify a glyph in context and apply
|
|
|
- * a lookup to it. In this scenario, the length of the input sequence is one, whereas
|
|
|
- * the lookahead / backtrack are typically longer. The one glyph in input sequence is
|
|
|
- * looked-up below and no input glyph is looked up in individual rules, whereas the
|
|
|
- * lookahead and backtrack glyphs are tried. Since we match lookahead before backtrack,
|
|
|
- * we should cache lookahead. This decisions showed a 20% improvement in shaping of
|
|
|
- * the Gulzar font.
|
|
|
- */
|
|
|
-
|
|
|
+ /* match_class_caches1 is slightly faster. Use it for lookahead,
|
|
|
+ * which is typically longer. */
|
|
|
struct ChainContextApplyLookupContext lookup_context = {
|
|
|
- {{cached && &backtrack_class_def == &lookahead_class_def ? match_class_cached : match_class,
|
|
|
- cached && &input_class_def == &lookahead_class_def ? match_class_cached : match_class,
|
|
|
- cached ? match_class_cached : match_class}},
|
|
|
+ {{cached && &backtrack_class_def == &lookahead_class_def ? match_class_cached1 : match_class,
|
|
|
+ cached ? match_class_cached2 : match_class,
|
|
|
+ cached ? match_class_cached1 : match_class}},
|
|
|
{&backtrack_class_def,
|
|
|
&input_class_def,
|
|
|
&lookahead_class_def}
|
|
|
};
|
|
|
|
|
|
- index = input_class_def.get_class (c->buffer->cur().codepoint);
|
|
|
+ // Note: Corresponds to match_class_cached2
|
|
|
+ if (cached && ((c->buffer->cur().syllable() & 0xF0) >> 4) < 15)
|
|
|
+ index = (c->buffer->cur().syllable () & 0xF0) >> 4;
|
|
|
+ else
|
|
|
+ index = input_class_def.get_class (c->buffer->cur().codepoint);
|
|
|
const ChainRuleSet &rule_set = this+ruleSet[index];
|
|
|
return_trace (rule_set.apply (c, lookup_context));
|
|
|
}
|
|
@@ -4139,6 +4335,9 @@ struct hb_ot_layout_lookup_accelerator_t
|
|
|
bool may_have (hb_codepoint_t g) const
|
|
|
{ return digest.may_have (g); }
|
|
|
|
|
|
+#ifndef HB_OPTIMIZE_SIZE
|
|
|
+ HB_ALWAYS_INLINE
|
|
|
+#endif
|
|
|
bool apply (hb_ot_apply_context_t *c, unsigned subtables_count, bool use_cache) const
|
|
|
{
|
|
|
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|