mirror of
https://github.com/juce-framework/JUCE.git
synced 2026-01-10 23:44:24 +00:00
Upgrade to harfbuzz 9.0.0
This commit is contained in:
parent
5b30f2571e
commit
18fa0bfa15
117 changed files with 8186 additions and 2535 deletions
|
|
@ -68,11 +68,11 @@ public:
|
|||
hb_font_t *font;
|
||||
unsigned int palette_index;
|
||||
hb_color_t foreground;
|
||||
VarStoreInstancer &instancer;
|
||||
ItemVarStoreInstancer &instancer;
|
||||
hb_map_t current_glyphs;
|
||||
hb_map_t current_layers;
|
||||
int depth_left = HB_MAX_NESTING_LEVEL;
|
||||
int edge_count = HB_COLRV1_MAX_EDGE_COUNT;
|
||||
int edge_count = HB_MAX_GRAPH_EDGE_COUNT;
|
||||
|
||||
hb_paint_context_t (const void *base_,
|
||||
hb_paint_funcs_t *funcs_,
|
||||
|
|
@ -80,7 +80,7 @@ public:
|
|||
hb_font_t *font_,
|
||||
unsigned int palette_,
|
||||
hb_color_t foreground_,
|
||||
VarStoreInstancer &instancer_) :
|
||||
ItemVarStoreInstancer &instancer_) :
|
||||
base (base_),
|
||||
funcs (funcs_),
|
||||
data (data_),
|
||||
|
|
@ -159,23 +159,35 @@ struct hb_colrv1_closure_context_t :
|
|||
void add_palette_index (unsigned palette_index)
|
||||
{ palette_indices->add (palette_index); }
|
||||
|
||||
void add_var_idxes (unsigned first_var_idx, unsigned num_idxes)
|
||||
{
|
||||
if (!num_idxes || first_var_idx == VarIdx::NO_VARIATION) return;
|
||||
variation_indices->add_range (first_var_idx, first_var_idx + num_idxes - 1);
|
||||
}
|
||||
|
||||
public:
|
||||
const void *base;
|
||||
hb_set_t visited_paint;
|
||||
hb_set_t *glyphs;
|
||||
hb_set_t *layer_indices;
|
||||
hb_set_t *palette_indices;
|
||||
hb_set_t *variation_indices;
|
||||
unsigned num_var_idxes;
|
||||
unsigned nesting_level_left;
|
||||
|
||||
hb_colrv1_closure_context_t (const void *base_,
|
||||
hb_set_t *glyphs_,
|
||||
hb_set_t *layer_indices_,
|
||||
hb_set_t *palette_indices_,
|
||||
hb_set_t *variation_indices_,
|
||||
unsigned num_var_idxes_ = 1,
|
||||
unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
|
||||
base (base_),
|
||||
glyphs (glyphs_),
|
||||
layer_indices (layer_indices_),
|
||||
palette_indices (palette_indices_),
|
||||
variation_indices (variation_indices_),
|
||||
num_var_idxes (num_var_idxes_),
|
||||
nesting_level_left (nesting_level_left_)
|
||||
{}
|
||||
};
|
||||
|
|
@ -242,18 +254,33 @@ struct Variable
|
|||
}
|
||||
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ value.closurev1 (c); }
|
||||
{
|
||||
c->num_var_idxes = 0;
|
||||
// update c->num_var_idxes during value closure
|
||||
value.closurev1 (c);
|
||||
c->add_var_idxes (varIdxBase, c->num_var_idxes);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
if (!value.subset (c, instancer, varIdxBase)) return_trace (false);
|
||||
if (c->plan->all_axes_pinned)
|
||||
return_trace (true);
|
||||
|
||||
//TODO: update varIdxBase for partial-instancing
|
||||
return_trace (c->serializer->embed (varIdxBase));
|
||||
VarIdx new_varidx;
|
||||
new_varidx = varIdxBase;
|
||||
if (varIdxBase != VarIdx::NO_VARIATION)
|
||||
{
|
||||
hb_pair_t<unsigned, int> *new_varidx_delta;
|
||||
if (!c->plan->colrv1_variation_idx_delta_map.has (varIdxBase, &new_varidx_delta))
|
||||
return_trace (false);
|
||||
|
||||
new_varidx = hb_first (*new_varidx_delta);
|
||||
}
|
||||
|
||||
return_trace (c->serializer->embed (new_varidx));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
|
@ -270,7 +297,7 @@ struct Variable
|
|||
|
||||
void get_color_stop (hb_paint_context_t *c,
|
||||
hb_color_stop_t *stop,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
value.get_color_stop (c, stop, varIdxBase, instancer);
|
||||
}
|
||||
|
|
@ -305,7 +332,7 @@ struct NoVariable
|
|||
{ value.closurev1 (c); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
return_trace (value.subset (c, instancer, varIdxBase));
|
||||
|
|
@ -325,7 +352,7 @@ struct NoVariable
|
|||
|
||||
void get_color_stop (hb_paint_context_t *c,
|
||||
hb_color_stop_t *stop,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
value.get_color_stop (c, stop, VarIdx::NO_VARIATION, instancer);
|
||||
}
|
||||
|
|
@ -345,10 +372,13 @@ struct NoVariable
|
|||
struct ColorStop
|
||||
{
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ c->add_palette_index (paletteIndex); }
|
||||
{
|
||||
c->add_palette_index (paletteIndex);
|
||||
c->num_var_idxes = 2;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -374,7 +404,7 @@ struct ColorStop
|
|||
void get_color_stop (hb_paint_context_t *c,
|
||||
hb_color_stop_t *out,
|
||||
uint32_t varIdx,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
out->offset = stopOffset.to_float(instancer (varIdx, 0));
|
||||
out->color = c->get_color (paletteIndex,
|
||||
|
|
@ -410,7 +440,7 @@ struct ColorLine
|
|||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
|
|
@ -439,7 +469,7 @@ struct ColorLine
|
|||
unsigned int start,
|
||||
unsigned int *count,
|
||||
hb_color_stop_t *color_stops,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
unsigned int len = stops.len;
|
||||
|
||||
|
|
@ -542,8 +572,11 @@ struct Affine2x3
|
|||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ c->num_var_idxes = 6; }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -588,7 +621,7 @@ struct PaintColrLayers
|
|||
void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer HB_UNUSED) const
|
||||
const ItemVarStoreInstancer &instancer HB_UNUSED) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
|
|
@ -617,10 +650,13 @@ struct PaintColrLayers
|
|||
struct PaintSolid
|
||||
{
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ c->add_palette_index (paletteIndex); }
|
||||
{
|
||||
c->add_palette_index (paletteIndex);
|
||||
c->num_var_idxes = 1;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -666,10 +702,13 @@ template <template<typename> class Var>
|
|||
struct PaintLinearGradient
|
||||
{
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+colorLine).closurev1 (c); }
|
||||
{
|
||||
(this+colorLine).closurev1 (c);
|
||||
c->num_var_idxes = 6;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -733,10 +772,13 @@ template <template<typename> class Var>
|
|||
struct PaintRadialGradient
|
||||
{
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+colorLine).closurev1 (c); }
|
||||
{
|
||||
(this+colorLine).closurev1 (c);
|
||||
c->num_var_idxes = 6;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -800,10 +842,13 @@ template <template<typename> class Var>
|
|||
struct PaintSweepGradient
|
||||
{
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+colorLine).closurev1 (c); }
|
||||
{
|
||||
(this+colorLine).closurev1 (c);
|
||||
c->num_var_idxes = 4;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -863,7 +908,7 @@ struct PaintGlyph
|
|||
void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
|
|
@ -906,7 +951,7 @@ struct PaintColrGlyph
|
|||
void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer HB_UNUSED) const
|
||||
const ItemVarStoreInstancer &instancer HB_UNUSED) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
|
|
@ -936,7 +981,7 @@ struct PaintTransform
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
|
|
@ -975,7 +1020,7 @@ struct PaintTranslate
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1024,7 +1069,7 @@ struct PaintScale
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1073,7 +1118,7 @@ struct PaintScaleAroundCenter
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1132,7 +1177,7 @@ struct PaintScaleUniform
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1176,7 +1221,7 @@ struct PaintScaleUniformAroundCenter
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1232,7 +1277,7 @@ struct PaintRotate
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1276,7 +1321,7 @@ struct PaintRotateAroundCenter
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1332,7 +1377,7 @@ struct PaintSkew
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1381,7 +1426,7 @@ struct PaintSkewAroundCenter
|
|||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1440,7 +1485,7 @@ struct PaintComposite
|
|||
void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
|
|
@ -1491,7 +1536,7 @@ struct ClipBoxFormat1
|
|||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer HB_UNUSED) const
|
||||
void get_clip_box (ClipBoxData &clip_box, const ItemVarStoreInstancer &instancer HB_UNUSED) const
|
||||
{
|
||||
clip_box.xMin = xMin;
|
||||
clip_box.yMin = yMin;
|
||||
|
|
@ -1500,7 +1545,7 @@ struct ClipBoxFormat1
|
|||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -1533,7 +1578,7 @@ struct ClipBoxFormat1
|
|||
|
||||
struct ClipBoxFormat2 : Variable<ClipBoxFormat1>
|
||||
{
|
||||
void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer) const
|
||||
void get_clip_box (ClipBoxData &clip_box, const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
value.get_clip_box(clip_box, instancer);
|
||||
if (instancer)
|
||||
|
|
@ -1544,12 +1589,15 @@ struct ClipBoxFormat2 : Variable<ClipBoxFormat1>
|
|||
clip_box.yMax += roundf (instancer (varIdxBase, 3));
|
||||
}
|
||||
}
|
||||
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ c->variation_indices->add_range (varIdxBase, varIdxBase + 3); }
|
||||
};
|
||||
|
||||
struct ClipBox
|
||||
{
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
switch (u.format) {
|
||||
|
|
@ -1559,6 +1607,14 @@ struct ClipBox
|
|||
}
|
||||
}
|
||||
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{
|
||||
switch (u.format) {
|
||||
case 2: u.format2.closurev1 (c);
|
||||
default:return;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
|
|
@ -1572,7 +1628,7 @@ struct ClipBox
|
|||
}
|
||||
|
||||
bool get_extents (hb_glyph_extents_t *extents,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
ClipBoxData clip_box;
|
||||
switch (u.format) {
|
||||
|
|
@ -1606,9 +1662,15 @@ struct ClipRecord
|
|||
int cmp (hb_codepoint_t g) const
|
||||
{ return g < startGlyphID ? -1 : g <= endGlyphID ? 0 : +1; }
|
||||
|
||||
void closurev1 (hb_colrv1_closure_context_t* c, const void *base) const
|
||||
{
|
||||
if (!c->glyphs->intersects (startGlyphID, endGlyphID)) return;
|
||||
(base+clipBox).closurev1 (c);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const void *base,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
|
|
@ -1625,7 +1687,7 @@ struct ClipRecord
|
|||
|
||||
bool get_extents (hb_glyph_extents_t *extents,
|
||||
const void *base,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
return (base+clipBox).get_extents (extents, instancer);
|
||||
}
|
||||
|
|
@ -1642,7 +1704,7 @@ DECLARE_NULL_NAMESPACE_BYTES (OT, ClipRecord);
|
|||
struct ClipList
|
||||
{
|
||||
unsigned serialize_clip_records (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
const hb_set_t& gids,
|
||||
const hb_map_t& gid_offset_map) const
|
||||
{
|
||||
|
|
@ -1695,7 +1757,7 @@ struct ClipList
|
|||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
|
|
@ -1735,7 +1797,7 @@ struct ClipList
|
|||
bool
|
||||
get_extents (hb_codepoint_t gid,
|
||||
hb_glyph_extents_t *extents,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
auto *rec = clips.as_array ().bsearch (gid);
|
||||
if (rec)
|
||||
|
|
@ -1855,7 +1917,7 @@ struct BaseGlyphPaintRecord
|
|||
|
||||
bool serialize (hb_serialize_context_t *s, const hb_map_t* glyph_map,
|
||||
const void* src_base, hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = s->embed (this);
|
||||
|
|
@ -1884,7 +1946,7 @@ struct BaseGlyphPaintRecord
|
|||
struct BaseGlyphList : SortedArray32Of<BaseGlyphPaintRecord>
|
||||
{
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
|
|
@ -1916,7 +1978,7 @@ struct LayerList : Array32OfOffset32To<Paint>
|
|||
{ return this+(*this)[i]; }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
|
|
@ -1941,6 +2003,76 @@ struct LayerList : Array32OfOffset32To<Paint>
|
|||
}
|
||||
};
|
||||
|
||||
struct delta_set_index_map_subset_plan_t
|
||||
{
|
||||
unsigned get_inner_bit_count () const { return inner_bit_count; }
|
||||
unsigned get_width () const { return ((outer_bit_count + inner_bit_count + 7) / 8); }
|
||||
hb_array_t<const uint32_t> get_output_map () const { return output_map.as_array (); }
|
||||
|
||||
delta_set_index_map_subset_plan_t (const hb_map_t &new_deltaset_idx_varidx_map)
|
||||
{
|
||||
map_count = 0;
|
||||
outer_bit_count = 0;
|
||||
inner_bit_count = 1;
|
||||
output_map.init ();
|
||||
|
||||
/* search backwards */
|
||||
unsigned count = new_deltaset_idx_varidx_map.get_population ();
|
||||
if (!count) return;
|
||||
|
||||
unsigned last_idx = (unsigned)-1;
|
||||
unsigned last_varidx = (unsigned)-1;
|
||||
|
||||
for (unsigned i = count; i; i--)
|
||||
{
|
||||
unsigned delta_set_idx = i - 1;
|
||||
unsigned var_idx = new_deltaset_idx_varidx_map.get (delta_set_idx);
|
||||
if (i == count)
|
||||
{
|
||||
last_idx = delta_set_idx;
|
||||
last_varidx = var_idx;
|
||||
continue;
|
||||
}
|
||||
if (var_idx != last_varidx)
|
||||
break;
|
||||
last_idx = delta_set_idx;
|
||||
}
|
||||
|
||||
map_count = last_idx + 1;
|
||||
}
|
||||
|
||||
bool remap (const hb_map_t &new_deltaset_idx_varidx_map)
|
||||
{
|
||||
/* recalculate bit_count */
|
||||
outer_bit_count = 1;
|
||||
inner_bit_count = 1;
|
||||
|
||||
if (unlikely (!output_map.resize (map_count, false))) return false;
|
||||
|
||||
for (unsigned idx = 0; idx < map_count; idx++)
|
||||
{
|
||||
uint32_t *var_idx;
|
||||
if (!new_deltaset_idx_varidx_map.has (idx, &var_idx)) return false;
|
||||
output_map.arrayZ[idx] = *var_idx;
|
||||
|
||||
unsigned outer = (*var_idx) >> 16;
|
||||
unsigned bit_count = (outer == 0) ? 1 : hb_bit_storage (outer);
|
||||
outer_bit_count = hb_max (bit_count, outer_bit_count);
|
||||
|
||||
unsigned inner = (*var_idx) & 0xFFFF;
|
||||
bit_count = (inner == 0) ? 1 : hb_bit_storage (inner);
|
||||
inner_bit_count = hb_max (bit_count, inner_bit_count);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
unsigned map_count;
|
||||
unsigned outer_bit_count;
|
||||
unsigned inner_bit_count;
|
||||
hb_vector_t<uint32_t> output_map;
|
||||
};
|
||||
|
||||
struct COLR
|
||||
{
|
||||
static constexpr hb_tag_t tableTag = HB_OT_TAG_COLR;
|
||||
|
|
@ -1992,8 +2124,22 @@ struct COLR
|
|||
|
||||
void closure_forV1 (hb_set_t *glyphset,
|
||||
hb_set_t *layer_indices,
|
||||
hb_set_t *palette_indices) const
|
||||
{ colr->closure_forV1 (glyphset, layer_indices, palette_indices); }
|
||||
hb_set_t *palette_indices,
|
||||
hb_set_t *variation_indices,
|
||||
hb_set_t *delta_set_indices) const
|
||||
{ colr->closure_forV1 (glyphset, layer_indices, palette_indices, variation_indices, delta_set_indices); }
|
||||
|
||||
bool has_var_store () const
|
||||
{ return colr->has_var_store (); }
|
||||
|
||||
const ItemVariationStore &get_var_store () const
|
||||
{ return colr->get_var_store (); }
|
||||
|
||||
bool has_delta_set_index_map () const
|
||||
{ return colr->has_delta_set_index_map (); }
|
||||
|
||||
const DeltaSetIndexMap &get_delta_set_index_map () const
|
||||
{ return colr->get_delta_set_index_map (); }
|
||||
|
||||
private:
|
||||
hb_blob_ptr_t<COLR> colr;
|
||||
|
|
@ -2030,14 +2176,16 @@ struct COLR
|
|||
|
||||
void closure_forV1 (hb_set_t *glyphset,
|
||||
hb_set_t *layer_indices,
|
||||
hb_set_t *palette_indices) const
|
||||
hb_set_t *palette_indices,
|
||||
hb_set_t *variation_indices,
|
||||
hb_set_t *delta_set_indices) const
|
||||
{
|
||||
if (version != 1) return;
|
||||
hb_barrier ();
|
||||
|
||||
hb_set_t visited_glyphs;
|
||||
|
||||
hb_colrv1_closure_context_t c (this, &visited_glyphs, layer_indices, palette_indices);
|
||||
hb_colrv1_closure_context_t c (this, &visited_glyphs, layer_indices, palette_indices, variation_indices);
|
||||
const BaseGlyphList &baseglyph_paintrecords = this+baseGlyphList;
|
||||
|
||||
for (const BaseGlyphPaintRecord &baseglyph_paintrecord: baseglyph_paintrecords.iter ())
|
||||
|
|
@ -2049,6 +2197,22 @@ struct COLR
|
|||
paint.dispatch (&c);
|
||||
}
|
||||
hb_set_union (glyphset, &visited_glyphs);
|
||||
|
||||
const ClipList &cliplist = this+clipList;
|
||||
c.glyphs = glyphset;
|
||||
for (const ClipRecord &clip_record : cliplist.clips.iter())
|
||||
clip_record.closurev1 (&c, &cliplist);
|
||||
|
||||
// if a DeltaSetIndexMap is included, collected variation indices are
|
||||
// actually delta set indices, we need to map them into variation indices
|
||||
if (has_delta_set_index_map ())
|
||||
{
|
||||
const DeltaSetIndexMap &var_idx_map = this+varIdxMap;
|
||||
delta_set_indices->set (*variation_indices);
|
||||
variation_indices->clear ();
|
||||
for (unsigned delta_set_idx : *delta_set_indices)
|
||||
variation_indices->add (var_idx_map.map (delta_set_idx));
|
||||
}
|
||||
}
|
||||
|
||||
const LayerList& get_layerList () const
|
||||
|
|
@ -2057,6 +2221,18 @@ struct COLR
|
|||
const BaseGlyphList& get_baseglyphList () const
|
||||
{ return (this+baseGlyphList); }
|
||||
|
||||
bool has_var_store () const
|
||||
{ return version >= 1 && varStore != 0; }
|
||||
|
||||
bool has_delta_set_index_map () const
|
||||
{ return version >= 1 && varIdxMap != 0; }
|
||||
|
||||
const DeltaSetIndexMap &get_delta_set_index_map () const
|
||||
{ return (version == 0 || varIdxMap == 0) ? Null (DeltaSetIndexMap) : this+varIdxMap; }
|
||||
|
||||
const ItemVariationStore &get_var_store () const
|
||||
{ return (version == 0 || varStore == 0) ? Null (ItemVariationStore) : this+varStore; }
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -2132,6 +2308,88 @@ struct COLR
|
|||
return record;
|
||||
}
|
||||
|
||||
bool downgrade_to_V0 (const hb_set_t &glyphset) const
|
||||
{
|
||||
//no more COLRv1 glyphs, downgrade to version 0
|
||||
for (const BaseGlyphPaintRecord& _ : get_baseglyphList ())
|
||||
if (glyphset.has (_.glyphId))
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool subset_varstore (hb_subset_context_t *c,
|
||||
COLR* out /* OUT */) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
if (!varStore || c->plan->all_axes_pinned ||
|
||||
!c->plan->colrv1_variation_idx_delta_map)
|
||||
return_trace (true);
|
||||
|
||||
const ItemVariationStore& var_store = this+varStore;
|
||||
if (c->plan->normalized_coords)
|
||||
{
|
||||
item_variations_t item_vars;
|
||||
/* turn off varstore optimization when varIdxMap is null, so we maintain
|
||||
* original var_idx sequence */
|
||||
bool optimize = (varIdxMap != 0) ? true : false;
|
||||
if (!item_vars.instantiate (var_store, c->plan,
|
||||
optimize, /* optimization */
|
||||
optimize, /* use_no_variation_idx = false */
|
||||
c->plan->colrv1_varstore_inner_maps.as_array ()))
|
||||
return_trace (false);
|
||||
|
||||
if (!out->varStore.serialize_serialize (c->serializer,
|
||||
item_vars.has_long_word (),
|
||||
c->plan->axis_tags,
|
||||
item_vars.get_region_list (),
|
||||
item_vars.get_vardata_encodings ()))
|
||||
return_trace (false);
|
||||
|
||||
/* if varstore is optimized, update colrv1_new_deltaset_idx_varidx_map in
|
||||
* subset plan */
|
||||
if (optimize)
|
||||
{
|
||||
const hb_map_t &varidx_map = item_vars.get_varidx_map ();
|
||||
for (auto _ : c->plan->colrv1_new_deltaset_idx_varidx_map.iter_ref ())
|
||||
{
|
||||
uint32_t varidx = _.second;
|
||||
uint32_t *new_varidx;
|
||||
if (varidx_map.has (varidx, &new_varidx))
|
||||
_.second = *new_varidx;
|
||||
else
|
||||
_.second = VarIdx::NO_VARIATION;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (unlikely (!out->varStore.serialize_serialize (c->serializer,
|
||||
&var_store,
|
||||
c->plan->colrv1_varstore_inner_maps.as_array ())))
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool subset_delta_set_index_map (hb_subset_context_t *c,
|
||||
COLR* out /* OUT */) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
if (!varIdxMap || c->plan->all_axes_pinned ||
|
||||
!c->plan->colrv1_new_deltaset_idx_varidx_map)
|
||||
return_trace (true);
|
||||
|
||||
const hb_map_t &deltaset_idx_varidx_map = c->plan->colrv1_new_deltaset_idx_varidx_map;
|
||||
delta_set_index_map_subset_plan_t index_map_plan (deltaset_idx_varidx_map);
|
||||
|
||||
if (unlikely (!index_map_plan.remap (deltaset_idx_varidx_map)))
|
||||
return_trace (false);
|
||||
|
||||
return_trace (out->varIdxMap.serialize_serialize (c->serializer, index_map_plan));
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
|
@ -2200,34 +2458,28 @@ struct COLR
|
|||
auto *colr_prime = c->serializer->start_embed<COLR> ();
|
||||
if (unlikely (!c->serializer->extend_min (colr_prime))) return_trace (false);
|
||||
|
||||
if (version == 0)
|
||||
return_trace (colr_prime->serialize_V0 (c->serializer, version, base_it, layer_it));
|
||||
if (version == 0 || downgrade_to_V0 (glyphset))
|
||||
return_trace (colr_prime->serialize_V0 (c->serializer, 0, base_it, layer_it));
|
||||
|
||||
auto snap = c->serializer->snapshot ();
|
||||
//start version 1
|
||||
if (!c->serializer->allocate_size<void> (5 * HBUINT32::static_size)) return_trace (false);
|
||||
if (!colr_prime->serialize_V0 (c->serializer, version, base_it, layer_it)) return_trace (false);
|
||||
|
||||
VarStoreInstancer instancer (varStore ? &(this+varStore) : nullptr,
|
||||
/* subset ItemVariationStore first, cause varidx_map needs to be updated
|
||||
* after instancing */
|
||||
if (!subset_varstore (c, colr_prime)) return_trace (false);
|
||||
|
||||
ItemVarStoreInstancer instancer (varStore ? &(this+varStore) : nullptr,
|
||||
varIdxMap ? &(this+varIdxMap) : nullptr,
|
||||
c->plan->normalized_coords.as_array ());
|
||||
|
||||
if (!colr_prime->baseGlyphList.serialize_subset (c, baseGlyphList, this, instancer))
|
||||
{
|
||||
if (c->serializer->in_error ()) return_trace (false);
|
||||
//no more COLRv1 glyphs: downgrade to version 0
|
||||
c->serializer->revert (snap);
|
||||
return_trace (colr_prime->serialize_V0 (c->serializer, 0, base_it, layer_it));
|
||||
}
|
||||
|
||||
if (!colr_prime->serialize_V0 (c->serializer, version, base_it, layer_it)) return_trace (false);
|
||||
return_trace (false);
|
||||
|
||||
colr_prime->layerList.serialize_subset (c, layerList, this, instancer);
|
||||
colr_prime->clipList.serialize_subset (c, clipList, this, instancer);
|
||||
if (!varStore || c->plan->all_axes_pinned)
|
||||
return_trace (true);
|
||||
|
||||
colr_prime->varIdxMap.serialize_copy (c->serializer, varIdxMap, this);
|
||||
colr_prime->varStore.serialize_copy (c->serializer, varStore, this);
|
||||
return_trace (true);
|
||||
return_trace (subset_delta_set_index_map (c, colr_prime));
|
||||
}
|
||||
|
||||
const Paint *get_base_glyph_paint (hb_codepoint_t glyph) const
|
||||
|
|
@ -2250,7 +2502,7 @@ struct COLR
|
|||
if (version != 1)
|
||||
return false;
|
||||
|
||||
VarStoreInstancer instancer (&(this+varStore),
|
||||
ItemVarStoreInstancer instancer (&(this+varStore),
|
||||
&(this+varIdxMap),
|
||||
hb_array (font->coords, font->num_coords));
|
||||
|
||||
|
|
@ -2301,7 +2553,7 @@ struct COLR
|
|||
|
||||
bool get_clip (hb_codepoint_t glyph,
|
||||
hb_glyph_extents_t *extents,
|
||||
const VarStoreInstancer instancer) const
|
||||
const ItemVarStoreInstancer instancer) const
|
||||
{
|
||||
return (this+clipList).get_extents (glyph,
|
||||
extents,
|
||||
|
|
@ -2312,7 +2564,7 @@ struct COLR
|
|||
bool
|
||||
paint_glyph (hb_font_t *font, hb_codepoint_t glyph, hb_paint_funcs_t *funcs, void *data, unsigned int palette_index, hb_color_t foreground, bool clip = true) const
|
||||
{
|
||||
VarStoreInstancer instancer (&(this+varStore),
|
||||
ItemVarStoreInstancer instancer (&(this+varStore),
|
||||
&(this+varIdxMap),
|
||||
hb_array (font->coords, font->num_coords));
|
||||
hb_paint_context_t c (this, funcs, data, font, palette_index, foreground, instancer);
|
||||
|
|
@ -2327,10 +2579,6 @@ struct COLR
|
|||
{
|
||||
// COLRv1 glyph
|
||||
|
||||
VarStoreInstancer instancer (&(this+varStore),
|
||||
&(this+varIdxMap),
|
||||
hb_array (font->coords, font->num_coords));
|
||||
|
||||
bool is_bounded = true;
|
||||
if (clip)
|
||||
{
|
||||
|
|
@ -2413,7 +2661,7 @@ struct COLR
|
|||
Offset32To<LayerList> layerList;
|
||||
Offset32To<ClipList> clipList; // Offset to ClipList table (may be NULL)
|
||||
Offset32To<DeltaSetIndexMap> varIdxMap; // Offset to DeltaSetIndexMap table (may be NULL)
|
||||
Offset32To<VariationStore> varStore;
|
||||
Offset32To<ItemVariationStore> varStore;
|
||||
public:
|
||||
DEFINE_SIZE_MIN (14);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -66,34 +66,64 @@ HB_INTERNAL void PaintColrGlyph::closurev1 (hb_colrv1_closure_context_t* c) cons
|
|||
|
||||
template <template<typename> class Var>
|
||||
HB_INTERNAL void PaintTransform<Var>::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
(this+transform).closurev1 (c);
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintTranslate::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
c->num_var_idxes = 2;
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintScale::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
c->num_var_idxes = 2;
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintScaleAroundCenter::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
c->num_var_idxes = 4;
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintScaleUniform::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
c->num_var_idxes = 1;
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintScaleUniformAroundCenter::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
c->num_var_idxes = 3;
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintRotate::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
c->num_var_idxes = 1;
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintRotateAroundCenter::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
c->num_var_idxes = 3;
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintSkew::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
c->num_var_idxes = 2;
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintSkewAroundCenter::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{ (this+src).dispatch (c); }
|
||||
{
|
||||
(this+src).dispatch (c);
|
||||
c->num_var_idxes = 4;
|
||||
}
|
||||
|
||||
HB_INTERNAL void PaintComposite::closurev1 (hb_colrv1_closure_context_t* c) const
|
||||
{
|
||||
|
|
|
|||
|
|
@ -189,7 +189,7 @@ struct CaretValueFormat3
|
|||
friend struct CaretValue;
|
||||
|
||||
hb_position_t get_caret_value (hb_font_t *font, hb_direction_t direction,
|
||||
const VariationStore &var_store) const
|
||||
const ItemVariationStore &var_store) const
|
||||
{
|
||||
return HB_DIRECTION_IS_HORIZONTAL (direction) ?
|
||||
font->em_scale_x (coordinate) + (this+deviceTable).get_x_delta (font, var_store) :
|
||||
|
|
@ -251,7 +251,7 @@ struct CaretValue
|
|||
hb_position_t get_caret_value (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_codepoint_t glyph_id,
|
||||
const VariationStore &var_store) const
|
||||
const ItemVariationStore &var_store) const
|
||||
{
|
||||
switch (u.format) {
|
||||
case 1: return u.format1.get_caret_value (font, direction);
|
||||
|
|
@ -316,7 +316,7 @@ struct LigGlyph
|
|||
unsigned get_lig_carets (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_codepoint_t glyph_id,
|
||||
const VariationStore &var_store,
|
||||
const ItemVariationStore &var_store,
|
||||
unsigned start_offset,
|
||||
unsigned *caret_count /* IN/OUT */,
|
||||
hb_position_t *caret_array /* OUT */) const
|
||||
|
|
@ -372,7 +372,7 @@ struct LigCaretList
|
|||
unsigned int get_lig_carets (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_codepoint_t glyph_id,
|
||||
const VariationStore &var_store,
|
||||
const ItemVariationStore &var_store,
|
||||
unsigned int start_offset,
|
||||
unsigned int *caret_count /* IN/OUT */,
|
||||
hb_position_t *caret_array /* OUT */) const
|
||||
|
|
@ -609,7 +609,7 @@ struct GDEFVersion1_2
|
|||
* definitions--from beginning of GDEF
|
||||
* header (may be NULL). Introduced
|
||||
* in version 0x00010002. */
|
||||
Offset32To<VariationStore>
|
||||
Offset32To<ItemVariationStore>
|
||||
varStore; /* Offset to the table of Item Variation
|
||||
* Store--from beginning of GDEF
|
||||
* header (may be NULL). Introduced
|
||||
|
|
@ -663,21 +663,16 @@ struct GDEFVersion1_2
|
|||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
out->version.major = version.major;
|
||||
out->version.minor = version.minor;
|
||||
bool subset_glyphclassdef = out->glyphClassDef.serialize_subset (c, glyphClassDef, this, nullptr, false, true);
|
||||
bool subset_attachlist = out->attachList.serialize_subset (c, attachList, this);
|
||||
bool subset_markattachclassdef = out->markAttachClassDef.serialize_subset (c, markAttachClassDef, this, nullptr, false, true);
|
||||
|
||||
bool subset_markglyphsetsdef = false;
|
||||
// Push var store first (if it's needed) so that it's last in the
|
||||
// serialization order. Some font consumers assume that varstore runs to
|
||||
// the end of the GDEF table.
|
||||
// See: https://github.com/harfbuzz/harfbuzz/issues/4636
|
||||
auto snapshot_version0 = c->serializer->snapshot ();
|
||||
if (version.to_int () >= 0x00010002u)
|
||||
{
|
||||
if (unlikely (!c->serializer->embed (markGlyphSetsDef))) return_trace (false);
|
||||
subset_markglyphsetsdef = out->markGlyphSetsDef.serialize_subset (c, markGlyphSetsDef, this);
|
||||
}
|
||||
if (unlikely (version.to_int () >= 0x00010002u && !c->serializer->embed (markGlyphSetsDef)))
|
||||
return_trace (false);
|
||||
|
||||
bool subset_varstore = false;
|
||||
unsigned varstore_index = (unsigned) -1;
|
||||
auto snapshot_version2 = c->serializer->snapshot ();
|
||||
if (version.to_int () >= 0x00010003u)
|
||||
{
|
||||
|
|
@ -690,20 +685,37 @@ struct GDEFVersion1_2
|
|||
{
|
||||
item_variations_t item_vars;
|
||||
if (item_vars.instantiate (this+varStore, c->plan, true, true,
|
||||
c->plan->gdef_varstore_inner_maps.as_array ()))
|
||||
c->plan->gdef_varstore_inner_maps.as_array ())) {
|
||||
subset_varstore = out->varStore.serialize_serialize (c->serializer,
|
||||
item_vars.has_long_word (),
|
||||
c->plan->axis_tags,
|
||||
item_vars.get_region_list (),
|
||||
item_vars.get_vardata_encodings ());
|
||||
varstore_index = c->serializer->last_added_child_index();
|
||||
}
|
||||
remap_varidx_after_instantiation (item_vars.get_varidx_map (),
|
||||
c->plan->layout_variation_idx_delta_map);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
subset_varstore = out->varStore.serialize_subset (c, varStore, this, c->plan->gdef_varstore_inner_maps.as_array ());
|
||||
varstore_index = c->serializer->last_added_child_index();
|
||||
}
|
||||
}
|
||||
|
||||
out->version.major = version.major;
|
||||
out->version.minor = version.minor;
|
||||
|
||||
if (!subset_varstore && version.to_int () >= 0x00010002u) {
|
||||
c->serializer->revert (snapshot_version2);
|
||||
}
|
||||
|
||||
bool subset_markglyphsetsdef = false;
|
||||
if (version.to_int () >= 0x00010002u)
|
||||
{
|
||||
subset_markglyphsetsdef = out->markGlyphSetsDef.serialize_subset (c, markGlyphSetsDef, this);
|
||||
}
|
||||
|
||||
if (subset_varstore)
|
||||
{
|
||||
|
|
@ -711,14 +723,20 @@ struct GDEFVersion1_2
|
|||
c->plan->has_gdef_varstore = true;
|
||||
} else if (subset_markglyphsetsdef) {
|
||||
out->version.minor = 2;
|
||||
c->serializer->revert (snapshot_version2);
|
||||
} else {
|
||||
out->version.minor = 0;
|
||||
c->serializer->revert (snapshot_version0);
|
||||
}
|
||||
|
||||
bool subset_glyphclassdef = out->glyphClassDef.serialize_subset (c, glyphClassDef, this, nullptr, false, true);
|
||||
bool subset_attachlist = out->attachList.serialize_subset (c, attachList, this);
|
||||
bool subset_markattachclassdef = out->markAttachClassDef.serialize_subset (c, markAttachClassDef, this, nullptr, false, true);
|
||||
bool subset_ligcaretlist = out->ligCaretList.serialize_subset (c, ligCaretList, this);
|
||||
|
||||
if (subset_varstore && varstore_index != (unsigned) -1) {
|
||||
c->serializer->repack_last(varstore_index);
|
||||
}
|
||||
|
||||
return_trace (subset_glyphclassdef || subset_attachlist ||
|
||||
subset_ligcaretlist || subset_markattachclassdef ||
|
||||
(out->version.to_int () >= 0x00010002u && subset_markglyphsetsdef) ||
|
||||
|
|
@ -884,14 +902,14 @@ struct GDEF
|
|||
default: return false;
|
||||
}
|
||||
}
|
||||
const VariationStore &get_var_store () const
|
||||
const ItemVariationStore &get_var_store () const
|
||||
{
|
||||
switch (u.version.major) {
|
||||
case 1: return u.version.to_int () >= 0x00010003u ? this+u.version1.varStore : Null(VariationStore);
|
||||
case 1: return u.version.to_int () >= 0x00010003u ? this+u.version1.varStore : Null(ItemVariationStore);
|
||||
#ifndef HB_NO_BEYOND_64K
|
||||
case 2: return this+u.version2.varStore;
|
||||
#endif
|
||||
default: return Null(VariationStore);
|
||||
default: return Null(ItemVariationStore);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1004,47 +1022,6 @@ struct GDEF
|
|||
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
|
||||
{ get_lig_caret_list ().collect_variation_indices (c); }
|
||||
|
||||
void remap_layout_variation_indices (const hb_set_t *layout_variation_indices,
|
||||
const hb_vector_t<int>& normalized_coords,
|
||||
bool calculate_delta, /* not pinned at default */
|
||||
bool no_variations, /* all axes pinned */
|
||||
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map /* OUT */) const
|
||||
{
|
||||
if (!has_var_store ()) return;
|
||||
const VariationStore &var_store = get_var_store ();
|
||||
float *store_cache = var_store.create_cache ();
|
||||
|
||||
unsigned new_major = 0, new_minor = 0;
|
||||
unsigned last_major = (layout_variation_indices->get_min ()) >> 16;
|
||||
for (unsigned idx : layout_variation_indices->iter ())
|
||||
{
|
||||
int delta = 0;
|
||||
if (calculate_delta)
|
||||
delta = roundf (var_store.get_delta (idx, normalized_coords.arrayZ,
|
||||
normalized_coords.length, store_cache));
|
||||
|
||||
if (no_variations)
|
||||
{
|
||||
layout_variation_idx_delta_map->set (idx, hb_pair_t<unsigned, int> (HB_OT_LAYOUT_NO_VARIATIONS_INDEX, delta));
|
||||
continue;
|
||||
}
|
||||
|
||||
uint16_t major = idx >> 16;
|
||||
if (major >= var_store.get_sub_table_count ()) break;
|
||||
if (major != last_major)
|
||||
{
|
||||
new_minor = 0;
|
||||
++new_major;
|
||||
}
|
||||
|
||||
unsigned new_idx = (new_major << 16) + new_minor;
|
||||
layout_variation_idx_delta_map->set (idx, hb_pair_t<unsigned, int> (new_idx, delta));
|
||||
++new_minor;
|
||||
last_major = major;
|
||||
}
|
||||
var_store.destroy_cache (store_cache);
|
||||
}
|
||||
|
||||
protected:
|
||||
union {
|
||||
FixedVersion<> version; /* Version identifier */
|
||||
|
|
|
|||
|
|
@ -324,17 +324,8 @@ struct PairPosFormat2_4 : ValueBase
|
|||
}
|
||||
}
|
||||
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
auto it =
|
||||
+ hb_iter (this+coverage)
|
||||
| hb_filter (glyphset)
|
||||
| hb_map_retains_sorting (glyph_map)
|
||||
;
|
||||
|
||||
out->coverage.serialize_serialize (c->serializer, it);
|
||||
return_trace (out->class1Count && out->class2Count && bool (it));
|
||||
bool ret = out->coverage.serialize_subset(c, coverage, this);
|
||||
return_trace (out->class1Count && out->class2Count && ret);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ struct ValueFormat : HBUINT16
|
|||
|
||||
if (!use_x_device && !use_y_device) return ret;
|
||||
|
||||
const VariationStore &store = c->var_store;
|
||||
const ItemVariationStore &store = c->var_store;
|
||||
auto *cache = c->var_store_cache;
|
||||
|
||||
/* pixel -> fractional pixel */
|
||||
|
|
|
|||
|
|
@ -90,8 +90,17 @@ struct Ligature
|
|||
|
||||
unsigned int total_component_count = 0;
|
||||
|
||||
if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return false;
|
||||
unsigned match_positions_stack[4];
|
||||
unsigned *match_positions = match_positions_stack;
|
||||
if (unlikely (count > ARRAY_LENGTH (match_positions_stack)))
|
||||
{
|
||||
match_positions = (unsigned *) hb_malloc (hb_max (count, 1u) * sizeof (unsigned));
|
||||
if (unlikely (!match_positions))
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
unsigned int match_end = 0;
|
||||
unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
|
||||
|
||||
if (likely (!match_input (c, count,
|
||||
&component[1],
|
||||
|
|
@ -102,6 +111,8 @@ struct Ligature
|
|||
&total_component_count)))
|
||||
{
|
||||
c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
|
||||
if (match_positions != match_positions_stack)
|
||||
hb_free (match_positions);
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
|
|
@ -145,6 +156,8 @@ struct Ligature
|
|||
pos);
|
||||
}
|
||||
|
||||
if (match_positions != match_positions_stack)
|
||||
hb_free (match_positions);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
|
|
|||
346
modules/juce_graphics/fonts/harfbuzz/OT/Var/VARC/VARC.cc
Normal file
346
modules/juce_graphics/fonts/harfbuzz/OT/Var/VARC/VARC.cc
Normal file
|
|
@ -0,0 +1,346 @@
|
|||
#include "VARC.hh"
|
||||
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
|
||||
#include "../../../hb-draw.hh"
|
||||
#include "../../../hb-geometry.hh"
|
||||
#include "../../../hb-ot-layout-common.hh"
|
||||
#include "../../../hb-ot-layout-gdef-table.hh"
|
||||
|
||||
namespace OT {
|
||||
|
||||
//namespace Var {
|
||||
|
||||
|
||||
struct hb_transforming_pen_context_t
|
||||
{
|
||||
hb_transform_t transform;
|
||||
hb_draw_funcs_t *dfuncs;
|
||||
void *data;
|
||||
hb_draw_state_t *st;
|
||||
};
|
||||
|
||||
static void
|
||||
hb_transforming_pen_move_to (hb_draw_funcs_t *dfuncs HB_UNUSED,
|
||||
void *data,
|
||||
hb_draw_state_t *st,
|
||||
float to_x, float to_y,
|
||||
void *user_data HB_UNUSED)
|
||||
{
|
||||
hb_transforming_pen_context_t *c = (hb_transforming_pen_context_t *) data;
|
||||
|
||||
c->transform.transform_point (to_x, to_y);
|
||||
|
||||
c->dfuncs->move_to (c->data, *c->st, to_x, to_y);
|
||||
}
|
||||
|
||||
static void
|
||||
hb_transforming_pen_line_to (hb_draw_funcs_t *dfuncs HB_UNUSED,
|
||||
void *data,
|
||||
hb_draw_state_t *st,
|
||||
float to_x, float to_y,
|
||||
void *user_data HB_UNUSED)
|
||||
{
|
||||
hb_transforming_pen_context_t *c = (hb_transforming_pen_context_t *) data;
|
||||
|
||||
c->transform.transform_point (to_x, to_y);
|
||||
|
||||
c->dfuncs->line_to (c->data, *c->st, to_x, to_y);
|
||||
}
|
||||
|
||||
static void
|
||||
hb_transforming_pen_quadratic_to (hb_draw_funcs_t *dfuncs HB_UNUSED,
|
||||
void *data,
|
||||
hb_draw_state_t *st,
|
||||
float control_x, float control_y,
|
||||
float to_x, float to_y,
|
||||
void *user_data HB_UNUSED)
|
||||
{
|
||||
hb_transforming_pen_context_t *c = (hb_transforming_pen_context_t *) data;
|
||||
|
||||
c->transform.transform_point (control_x, control_y);
|
||||
c->transform.transform_point (to_x, to_y);
|
||||
|
||||
c->dfuncs->quadratic_to (c->data, *c->st, control_x, control_y, to_x, to_y);
|
||||
}
|
||||
|
||||
static void
|
||||
hb_transforming_pen_cubic_to (hb_draw_funcs_t *dfuncs HB_UNUSED,
|
||||
void *data,
|
||||
hb_draw_state_t *st,
|
||||
float control1_x, float control1_y,
|
||||
float control2_x, float control2_y,
|
||||
float to_x, float to_y,
|
||||
void *user_data HB_UNUSED)
|
||||
{
|
||||
hb_transforming_pen_context_t *c = (hb_transforming_pen_context_t *) data;
|
||||
|
||||
c->transform.transform_point (control1_x, control1_y);
|
||||
c->transform.transform_point (control2_x, control2_y);
|
||||
c->transform.transform_point (to_x, to_y);
|
||||
|
||||
c->dfuncs->cubic_to (c->data, *c->st, control1_x, control1_y, control2_x, control2_y, to_x, to_y);
|
||||
}
|
||||
|
||||
static void
|
||||
hb_transforming_pen_close_path (hb_draw_funcs_t *dfuncs HB_UNUSED,
|
||||
void *data,
|
||||
hb_draw_state_t *st,
|
||||
void *user_data HB_UNUSED)
|
||||
{
|
||||
hb_transforming_pen_context_t *c = (hb_transforming_pen_context_t *) data;
|
||||
|
||||
c->dfuncs->close_path (c->data, *c->st);
|
||||
}
|
||||
|
||||
static inline void free_static_transforming_pen_funcs ();
|
||||
|
||||
static struct hb_transforming_pen_funcs_lazy_loader_t : hb_draw_funcs_lazy_loader_t<hb_transforming_pen_funcs_lazy_loader_t>
|
||||
{
|
||||
static hb_draw_funcs_t *create ()
|
||||
{
|
||||
hb_draw_funcs_t *funcs = hb_draw_funcs_create ();
|
||||
|
||||
hb_draw_funcs_set_move_to_func (funcs, hb_transforming_pen_move_to, nullptr, nullptr);
|
||||
hb_draw_funcs_set_line_to_func (funcs, hb_transforming_pen_line_to, nullptr, nullptr);
|
||||
hb_draw_funcs_set_quadratic_to_func (funcs, hb_transforming_pen_quadratic_to, nullptr, nullptr);
|
||||
hb_draw_funcs_set_cubic_to_func (funcs, hb_transforming_pen_cubic_to, nullptr, nullptr);
|
||||
hb_draw_funcs_set_close_path_func (funcs, hb_transforming_pen_close_path, nullptr, nullptr);
|
||||
|
||||
hb_draw_funcs_make_immutable (funcs);
|
||||
|
||||
hb_atexit (free_static_transforming_pen_funcs);
|
||||
|
||||
return funcs;
|
||||
}
|
||||
} static_transforming_pen_funcs;
|
||||
|
||||
static inline
|
||||
void free_static_transforming_pen_funcs ()
|
||||
{
|
||||
static_transforming_pen_funcs.free_instance ();
|
||||
}
|
||||
|
||||
static hb_draw_funcs_t *
|
||||
hb_transforming_pen_get_funcs ()
|
||||
{
|
||||
return static_transforming_pen_funcs.get_unconst ();
|
||||
}
|
||||
|
||||
|
||||
hb_ubytes_t
|
||||
VarComponent::get_path_at (hb_font_t *font,
|
||||
hb_codepoint_t parent_gid,
|
||||
hb_draw_session_t &draw_session,
|
||||
hb_array_t<const int> coords,
|
||||
hb_ubytes_t total_record,
|
||||
hb_set_t *visited,
|
||||
signed *edges_left,
|
||||
signed depth_left,
|
||||
VarRegionList::cache_t *cache) const
|
||||
{
|
||||
const unsigned char *end = total_record.arrayZ + total_record.length;
|
||||
const unsigned char *record = total_record.arrayZ;
|
||||
|
||||
auto &VARC = *font->face->table.VARC;
|
||||
auto &varStore = &VARC+VARC.varStore;
|
||||
auto instancer = MultiItemVarStoreInstancer(&varStore, nullptr, coords, cache);
|
||||
|
||||
#define READ_UINT32VAR(name) \
|
||||
HB_STMT_START { \
|
||||
if (unlikely (unsigned (end - record) < HBUINT32VAR::min_size)) return hb_ubytes_t (); \
|
||||
hb_barrier (); \
|
||||
auto &varint = * (const HBUINT32VAR *) record; \
|
||||
unsigned size = varint.get_size (); \
|
||||
if (unlikely (unsigned (end - record) < size)) return hb_ubytes_t (); \
|
||||
name = (uint32_t) varint; \
|
||||
record += size; \
|
||||
} HB_STMT_END
|
||||
|
||||
uint32_t flags;
|
||||
READ_UINT32VAR (flags);
|
||||
|
||||
// gid
|
||||
|
||||
hb_codepoint_t gid = 0;
|
||||
if (flags & (unsigned) flags_t::GID_IS_24BIT)
|
||||
{
|
||||
if (unlikely (unsigned (end - record) < HBGlyphID24::static_size))
|
||||
return hb_ubytes_t ();
|
||||
hb_barrier ();
|
||||
gid = * (const HBGlyphID24 *) record;
|
||||
record += HBGlyphID24::static_size;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (unlikely (unsigned (end - record) < HBGlyphID16::static_size))
|
||||
return hb_ubytes_t ();
|
||||
hb_barrier ();
|
||||
gid = * (const HBGlyphID16 *) record;
|
||||
record += HBGlyphID16::static_size;
|
||||
}
|
||||
|
||||
// Condition
|
||||
bool show = true;
|
||||
if (flags & (unsigned) flags_t::HAVE_CONDITION)
|
||||
{
|
||||
unsigned conditionIndex;
|
||||
READ_UINT32VAR (conditionIndex);
|
||||
const auto &condition = (&VARC+VARC.conditionList)[conditionIndex];
|
||||
show = condition.evaluate (coords.arrayZ, coords.length, &instancer);
|
||||
}
|
||||
|
||||
// Axis values
|
||||
|
||||
hb_vector_t<unsigned> axisIndices;
|
||||
hb_vector_t<float> axisValues;
|
||||
if (flags & (unsigned) flags_t::HAVE_AXES)
|
||||
{
|
||||
unsigned axisIndicesIndex;
|
||||
READ_UINT32VAR (axisIndicesIndex);
|
||||
axisIndices = (&VARC+VARC.axisIndicesList)[axisIndicesIndex];
|
||||
axisValues.resize (axisIndices.length);
|
||||
const HBUINT8 *p = (const HBUINT8 *) record;
|
||||
TupleValues::decompile (p, axisValues, (const HBUINT8 *) end);
|
||||
record += (const unsigned char *) p - record;
|
||||
}
|
||||
|
||||
// Apply variations if any
|
||||
if (flags & (unsigned) flags_t::AXIS_VALUES_HAVE_VARIATION)
|
||||
{
|
||||
uint32_t axisValuesVarIdx;
|
||||
READ_UINT32VAR (axisValuesVarIdx);
|
||||
if (show && coords && !axisValues.in_error ())
|
||||
varStore.get_delta (axisValuesVarIdx, coords, axisValues.as_array (), cache);
|
||||
}
|
||||
|
||||
auto component_coords = coords;
|
||||
/* Copying coords is expensive; so we have put an arbitrary
|
||||
* limit on the max number of coords for now. */
|
||||
if ((flags & (unsigned) flags_t::RESET_UNSPECIFIED_AXES) ||
|
||||
coords.length > HB_VAR_COMPOSITE_MAX_AXES)
|
||||
component_coords = hb_array<int> (font->coords, font->num_coords);
|
||||
|
||||
// Transform
|
||||
|
||||
uint32_t transformVarIdx = VarIdx::NO_VARIATION;
|
||||
if (flags & (unsigned) flags_t::TRANSFORM_HAS_VARIATION)
|
||||
READ_UINT32VAR (transformVarIdx);
|
||||
|
||||
#define PROCESS_TRANSFORM_COMPONENTS \
|
||||
HB_STMT_START { \
|
||||
PROCESS_TRANSFORM_COMPONENT (FWORD, HAVE_TRANSLATE_X, translateX); \
|
||||
PROCESS_TRANSFORM_COMPONENT (FWORD, HAVE_TRANSLATE_Y, translateY); \
|
||||
PROCESS_TRANSFORM_COMPONENT (F4DOT12, HAVE_ROTATION, rotation); \
|
||||
PROCESS_TRANSFORM_COMPONENT (F6DOT10, HAVE_SCALE_X, scaleX); \
|
||||
PROCESS_TRANSFORM_COMPONENT (F6DOT10, HAVE_SCALE_Y, scaleY); \
|
||||
PROCESS_TRANSFORM_COMPONENT (F4DOT12, HAVE_SKEW_X, skewX); \
|
||||
PROCESS_TRANSFORM_COMPONENT (F4DOT12, HAVE_SKEW_Y, skewY); \
|
||||
PROCESS_TRANSFORM_COMPONENT (FWORD, HAVE_TCENTER_X, tCenterX); \
|
||||
PROCESS_TRANSFORM_COMPONENT (FWORD, HAVE_TCENTER_Y, tCenterY); \
|
||||
} HB_STMT_END
|
||||
|
||||
hb_transform_decomposed_t transform;
|
||||
|
||||
// Read transform components
|
||||
#define PROCESS_TRANSFORM_COMPONENT(type, flag, name) \
|
||||
if (flags & (unsigned) flags_t::flag) \
|
||||
{ \
|
||||
static_assert (type::static_size == HBINT16::static_size, ""); \
|
||||
if (unlikely (unsigned (end - record) < HBINT16::static_size)) \
|
||||
return hb_ubytes_t (); \
|
||||
hb_barrier (); \
|
||||
transform.name = * (const HBINT16 *) record; \
|
||||
record += HBINT16::static_size; \
|
||||
}
|
||||
PROCESS_TRANSFORM_COMPONENTS;
|
||||
#undef PROCESS_TRANSFORM_COMPONENT
|
||||
|
||||
// Read reserved records
|
||||
unsigned i = flags & (unsigned) flags_t::RESERVED_MASK;
|
||||
while (i)
|
||||
{
|
||||
HB_UNUSED uint32_t discard;
|
||||
READ_UINT32VAR (discard);
|
||||
i &= i - 1;
|
||||
}
|
||||
|
||||
/* Parsing is over now. */
|
||||
|
||||
if (show)
|
||||
{
|
||||
// Only use coord_setter if there's actually any axis overrides.
|
||||
coord_setter_t coord_setter (axisIndices ? component_coords : hb_array<int> ());
|
||||
// Go backwards, to reduce coord_setter vector reallocations.
|
||||
for (unsigned i = axisIndices.length; i; i--)
|
||||
coord_setter[axisIndices[i - 1]] = axisValues[i - 1];
|
||||
if (axisIndices)
|
||||
component_coords = coord_setter.get_coords ();
|
||||
|
||||
// Apply transform variations if any
|
||||
if (transformVarIdx != VarIdx::NO_VARIATION && coords)
|
||||
{
|
||||
float transformValues[9];
|
||||
unsigned numTransformValues = 0;
|
||||
#define PROCESS_TRANSFORM_COMPONENT(type, flag, name) \
|
||||
if (flags & (unsigned) flags_t::flag) \
|
||||
transformValues[numTransformValues++] = transform.name;
|
||||
PROCESS_TRANSFORM_COMPONENTS;
|
||||
#undef PROCESS_TRANSFORM_COMPONENT
|
||||
varStore.get_delta (transformVarIdx, coords, hb_array (transformValues, numTransformValues), cache);
|
||||
numTransformValues = 0;
|
||||
#define PROCESS_TRANSFORM_COMPONENT(type, flag, name) \
|
||||
if (flags & (unsigned) flags_t::flag) \
|
||||
transform.name = transformValues[numTransformValues++];
|
||||
PROCESS_TRANSFORM_COMPONENTS;
|
||||
#undef PROCESS_TRANSFORM_COMPONENT
|
||||
}
|
||||
|
||||
// Divide them by their divisors
|
||||
#define PROCESS_TRANSFORM_COMPONENT(type, flag, name) \
|
||||
if (flags & (unsigned) flags_t::flag) \
|
||||
{ \
|
||||
HBINT16 int_v; \
|
||||
int_v = roundf (transform.name); \
|
||||
type typed_v = * (const type *) &int_v; \
|
||||
float float_v = (float) typed_v; \
|
||||
transform.name = float_v; \
|
||||
}
|
||||
PROCESS_TRANSFORM_COMPONENTS;
|
||||
#undef PROCESS_TRANSFORM_COMPONENT
|
||||
|
||||
if (!(flags & (unsigned) flags_t::HAVE_SCALE_Y))
|
||||
transform.scaleY = transform.scaleX;
|
||||
|
||||
// Scale the transform by the font's scale
|
||||
float x_scale = font->x_multf;
|
||||
float y_scale = font->y_multf;
|
||||
transform.translateX *= x_scale;
|
||||
transform.translateY *= y_scale;
|
||||
transform.tCenterX *= x_scale;
|
||||
transform.tCenterY *= y_scale;
|
||||
|
||||
// Build a transforming pen to apply the transform.
|
||||
hb_draw_funcs_t *transformer_funcs = hb_transforming_pen_get_funcs ();
|
||||
hb_transforming_pen_context_t context {transform.to_transform (),
|
||||
draw_session.funcs,
|
||||
draw_session.draw_data,
|
||||
&draw_session.st};
|
||||
hb_draw_session_t transformer_session {transformer_funcs, &context};
|
||||
|
||||
VARC.get_path_at (font, gid,
|
||||
transformer_session, component_coords,
|
||||
parent_gid,
|
||||
visited, edges_left, depth_left - 1);
|
||||
}
|
||||
|
||||
#undef PROCESS_TRANSFORM_COMPONENTS
|
||||
#undef READ_UINT32VAR
|
||||
|
||||
return hb_ubytes_t (record, end - record);
|
||||
}
|
||||
|
||||
//} // namespace Var
|
||||
} // namespace OT
|
||||
|
||||
#endif
|
||||
193
modules/juce_graphics/fonts/harfbuzz/OT/Var/VARC/VARC.hh
Normal file
193
modules/juce_graphics/fonts/harfbuzz/OT/Var/VARC/VARC.hh
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
#ifndef OT_VAR_VARC_VARC_HH
|
||||
#define OT_VAR_VARC_VARC_HH
|
||||
|
||||
#include "../../../hb-ot-layout-common.hh"
|
||||
#include "../../../hb-ot-glyf-table.hh"
|
||||
#include "../../../hb-ot-cff2-table.hh"
|
||||
#include "../../../hb-ot-cff1-table.hh"
|
||||
|
||||
#include "coord-setter.hh"
|
||||
|
||||
namespace OT {
|
||||
|
||||
//namespace Var {
|
||||
|
||||
/*
|
||||
* VARC -- Variable Composites
|
||||
* https://github.com/harfbuzz/boring-expansion-spec/blob/main/VARC.md
|
||||
*/
|
||||
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
|
||||
struct VarComponent
|
||||
{
|
||||
enum class flags_t : uint32_t
|
||||
{
|
||||
RESET_UNSPECIFIED_AXES = 1u << 0,
|
||||
HAVE_AXES = 1u << 1,
|
||||
AXIS_VALUES_HAVE_VARIATION = 1u << 2,
|
||||
TRANSFORM_HAS_VARIATION = 1u << 3,
|
||||
HAVE_TRANSLATE_X = 1u << 4,
|
||||
HAVE_TRANSLATE_Y = 1u << 5,
|
||||
HAVE_ROTATION = 1u << 6,
|
||||
HAVE_CONDITION = 1u << 7,
|
||||
HAVE_SCALE_X = 1u << 8,
|
||||
HAVE_SCALE_Y = 1u << 9,
|
||||
HAVE_TCENTER_X = 1u << 10,
|
||||
HAVE_TCENTER_Y = 1u << 11,
|
||||
GID_IS_24BIT = 1u << 12,
|
||||
HAVE_SKEW_X = 1u << 13,
|
||||
HAVE_SKEW_Y = 1u << 14,
|
||||
RESERVED_MASK = ~((1u << 15) - 1),
|
||||
};
|
||||
|
||||
HB_INTERNAL hb_ubytes_t
|
||||
get_path_at (hb_font_t *font,
|
||||
hb_codepoint_t parent_gid,
|
||||
hb_draw_session_t &draw_session,
|
||||
hb_array_t<const int> coords,
|
||||
hb_ubytes_t record,
|
||||
hb_set_t *visited,
|
||||
signed *edges_left,
|
||||
signed depth_left,
|
||||
VarRegionList::cache_t *cache = nullptr) const;
|
||||
};
|
||||
|
||||
struct VarCompositeGlyph
|
||||
{
|
||||
static void
|
||||
get_path_at (hb_font_t *font,
|
||||
hb_codepoint_t glyph,
|
||||
hb_draw_session_t &draw_session,
|
||||
hb_array_t<const int> coords,
|
||||
hb_ubytes_t record,
|
||||
hb_set_t *visited,
|
||||
signed *edges_left,
|
||||
signed depth_left,
|
||||
VarRegionList::cache_t *cache = nullptr)
|
||||
{
|
||||
while (record)
|
||||
{
|
||||
const VarComponent &comp = * (const VarComponent *) (record.arrayZ);
|
||||
record = comp.get_path_at (font, glyph,
|
||||
draw_session, coords,
|
||||
record,
|
||||
visited, edges_left, depth_left, cache);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
HB_MARK_AS_FLAG_T (VarComponent::flags_t);
|
||||
|
||||
struct VARC
|
||||
{
|
||||
friend struct VarComponent;
|
||||
|
||||
static constexpr hb_tag_t tableTag = HB_TAG ('V', 'A', 'R', 'C');
|
||||
|
||||
bool
|
||||
get_path_at (hb_font_t *font,
|
||||
hb_codepoint_t glyph,
|
||||
hb_draw_session_t &draw_session,
|
||||
hb_array_t<const int> coords,
|
||||
hb_codepoint_t parent_glyph = HB_CODEPOINT_INVALID,
|
||||
hb_set_t *visited = nullptr,
|
||||
signed *edges_left = nullptr,
|
||||
signed depth_left = HB_MAX_NESTING_LEVEL) const
|
||||
{
|
||||
hb_set_t stack_set;
|
||||
if (visited == nullptr)
|
||||
visited = &stack_set;
|
||||
signed stack_edges = HB_MAX_GRAPH_EDGE_COUNT;
|
||||
if (edges_left == nullptr)
|
||||
edges_left = &stack_edges;
|
||||
|
||||
// Don't recurse on the same glyph.
|
||||
unsigned idx = glyph == parent_glyph ?
|
||||
NOT_COVERED :
|
||||
(this+coverage).get_coverage (glyph);
|
||||
if (idx == NOT_COVERED)
|
||||
{
|
||||
if (!font->face->table.glyf->get_path_at (font, glyph, draw_session, coords))
|
||||
#ifndef HB_NO_CFF
|
||||
if (!font->face->table.cff2->get_path_at (font, glyph, draw_session, coords))
|
||||
if (!font->face->table.cff1->get_path (font, glyph, draw_session)) // Doesn't have variations
|
||||
#endif
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (depth_left <= 0)
|
||||
return true;
|
||||
|
||||
if (*edges_left <= 0)
|
||||
return true;
|
||||
(*edges_left)--;
|
||||
|
||||
if (visited->has (glyph) || visited->in_error ())
|
||||
return true;
|
||||
visited->add (glyph);
|
||||
|
||||
hb_ubytes_t record = (this+glyphRecords)[idx];
|
||||
|
||||
VarRegionList::cache_t *cache = record.length >= 64 ? // Heuristic
|
||||
(this+varStore).create_cache ()
|
||||
: nullptr;
|
||||
|
||||
VarCompositeGlyph::get_path_at (font, glyph,
|
||||
draw_session, coords,
|
||||
record,
|
||||
visited, edges_left, depth_left,
|
||||
cache);
|
||||
|
||||
(this+varStore).destroy_cache (cache);
|
||||
|
||||
visited->del (glyph);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
get_path (hb_font_t *font, hb_codepoint_t gid, hb_draw_session_t &draw_session) const
|
||||
{ return get_path_at (font, gid, draw_session, hb_array (font->coords, font->num_coords)); }
|
||||
|
||||
bool paint_glyph (hb_font_t *font, hb_codepoint_t gid, hb_paint_funcs_t *funcs, void *data, hb_color_t foreground) const
|
||||
{
|
||||
funcs->push_clip_glyph (data, gid, font);
|
||||
funcs->color (data, true, foreground);
|
||||
funcs->pop_clip (data);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (version.sanitize (c) &&
|
||||
hb_barrier () &&
|
||||
version.major == 1 &&
|
||||
coverage.sanitize (c, this) &&
|
||||
varStore.sanitize (c, this) &&
|
||||
conditionList.sanitize (c, this) &&
|
||||
axisIndicesList.sanitize (c, this) &&
|
||||
glyphRecords.sanitize (c, this));
|
||||
}
|
||||
|
||||
protected:
|
||||
FixedVersion<> version; /* Version identifier */
|
||||
Offset32To<Coverage> coverage;
|
||||
Offset32To<MultiItemVariationStore> varStore;
|
||||
Offset32To<ConditionList> conditionList;
|
||||
Offset32To<TupleList> axisIndicesList;
|
||||
Offset32To<CFF2Index/*Of<VarCompositeGlyph>*/> glyphRecords;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (24);
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
//}
|
||||
|
||||
}
|
||||
|
||||
#endif /* OT_VAR_VARC_VARC_HH */
|
||||
|
|
@ -1,22 +1,22 @@
|
|||
#ifndef OT_GLYF_COORD_SETTER_HH
|
||||
#define OT_GLYF_COORD_SETTER_HH
|
||||
#ifndef OT_VAR_VARC_COORD_SETTER_HH
|
||||
#define OT_VAR_VARC_COORD_SETTER_HH
|
||||
|
||||
|
||||
#include "../../hb.hh"
|
||||
#include "../../../hb.hh"
|
||||
|
||||
|
||||
namespace OT {
|
||||
namespace glyf_impl {
|
||||
//namespace Var {
|
||||
|
||||
|
||||
struct coord_setter_t
|
||||
{
|
||||
coord_setter_t (hb_array_t<int> coords) :
|
||||
coord_setter_t (hb_array_t<const int> coords) :
|
||||
coords (coords) {}
|
||||
|
||||
int& operator [] (unsigned idx)
|
||||
{
|
||||
if (unlikely (idx >= HB_GLYF_VAR_COMPOSITE_MAX_AXES))
|
||||
if (unlikely (idx >= HB_VAR_COMPOSITE_MAX_AXES))
|
||||
return Crap(int);
|
||||
if (coords.length < idx + 1)
|
||||
coords.resize (idx + 1);
|
||||
|
|
@ -30,7 +30,8 @@ struct coord_setter_t
|
|||
};
|
||||
|
||||
|
||||
} /* namespace glyf_impl */
|
||||
} /* namespace OT */
|
||||
//} // namespace Var
|
||||
|
||||
#endif /* OT_GLYF_COORD_SETTER_HH */
|
||||
} // namespace OT
|
||||
|
||||
#endif /* OT_VAR_VARC_COORD_SETTER_HH */
|
||||
|
|
@ -240,7 +240,8 @@ struct CompositeGlyphRecord
|
|||
}
|
||||
if (is_anchored ()) tx = ty = 0;
|
||||
|
||||
trans.init ((float) tx, (float) ty);
|
||||
/* set is_end_point flag to true, used by IUP delta optimization */
|
||||
trans.init ((float) tx, (float) ty, true);
|
||||
|
||||
{
|
||||
const F2DOT14 *points = (const F2DOT14 *) p;
|
||||
|
|
|
|||
|
|
@ -7,8 +7,6 @@
|
|||
#include "GlyphHeader.hh"
|
||||
#include "SimpleGlyph.hh"
|
||||
#include "CompositeGlyph.hh"
|
||||
#include "VarCompositeGlyph.hh"
|
||||
#include "coord-setter.hh"
|
||||
|
||||
|
||||
namespace OT {
|
||||
|
|
@ -33,9 +31,6 @@ struct Glyph
|
|||
EMPTY,
|
||||
SIMPLE,
|
||||
COMPOSITE,
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
VAR_COMPOSITE,
|
||||
#endif
|
||||
};
|
||||
|
||||
public:
|
||||
|
|
@ -44,22 +39,10 @@ struct Glyph
|
|||
if (type != COMPOSITE) return composite_iter_t ();
|
||||
return CompositeGlyph (*header, bytes).iter ();
|
||||
}
|
||||
var_composite_iter_t get_var_composite_iterator () const
|
||||
{
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
if (type != VAR_COMPOSITE) return var_composite_iter_t ();
|
||||
return VarCompositeGlyph (*header, bytes).iter ();
|
||||
#else
|
||||
return var_composite_iter_t ();
|
||||
#endif
|
||||
}
|
||||
|
||||
const hb_bytes_t trim_padding () const
|
||||
{
|
||||
switch (type) {
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
case VAR_COMPOSITE: return VarCompositeGlyph (*header, bytes).trim_padding ();
|
||||
#endif
|
||||
case COMPOSITE: return CompositeGlyph (*header, bytes).trim_padding ();
|
||||
case SIMPLE: return SimpleGlyph (*header, bytes).trim_padding ();
|
||||
case EMPTY: return bytes;
|
||||
|
|
@ -70,9 +53,6 @@ struct Glyph
|
|||
void drop_hints ()
|
||||
{
|
||||
switch (type) {
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
case VAR_COMPOSITE: return; // No hinting
|
||||
#endif
|
||||
case COMPOSITE: CompositeGlyph (*header, bytes).drop_hints (); return;
|
||||
case SIMPLE: SimpleGlyph (*header, bytes).drop_hints (); return;
|
||||
case EMPTY: return;
|
||||
|
|
@ -82,9 +62,6 @@ struct Glyph
|
|||
void set_overlaps_flag ()
|
||||
{
|
||||
switch (type) {
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
case VAR_COMPOSITE: return; // No overlaps flag
|
||||
#endif
|
||||
case COMPOSITE: CompositeGlyph (*header, bytes).set_overlaps_flag (); return;
|
||||
case SIMPLE: SimpleGlyph (*header, bytes).set_overlaps_flag (); return;
|
||||
case EMPTY: return;
|
||||
|
|
@ -94,15 +71,15 @@ struct Glyph
|
|||
void drop_hints_bytes (hb_bytes_t &dest_start, hb_bytes_t &dest_end) const
|
||||
{
|
||||
switch (type) {
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
case VAR_COMPOSITE: return; // No hinting
|
||||
#endif
|
||||
case COMPOSITE: CompositeGlyph (*header, bytes).drop_hints_bytes (dest_start); return;
|
||||
case SIMPLE: SimpleGlyph (*header, bytes).drop_hints_bytes (dest_start, dest_end); return;
|
||||
case EMPTY: return;
|
||||
}
|
||||
}
|
||||
|
||||
bool is_composite () const
|
||||
{ return type == COMPOSITE; }
|
||||
|
||||
bool get_all_points_without_var (const hb_face_t *face,
|
||||
contour_point_vector_t &points /* OUT */) const
|
||||
{
|
||||
|
|
@ -117,14 +94,6 @@ struct Glyph
|
|||
if (unlikely (!item.get_points (points))) return false;
|
||||
break;
|
||||
}
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
case VAR_COMPOSITE:
|
||||
{
|
||||
for (auto &item : get_var_composite_iterator ())
|
||||
if (unlikely (!item.get_points (points))) return false;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
case EMPTY:
|
||||
break;
|
||||
}
|
||||
|
|
@ -300,13 +269,6 @@ struct Glyph
|
|||
{
|
||||
switch (type)
|
||||
{
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
case VAR_COMPOSITE:
|
||||
// TODO
|
||||
dest_end = hb_bytes_t ();
|
||||
break;
|
||||
#endif
|
||||
|
||||
case COMPOSITE:
|
||||
if (!CompositeGlyph (*header, bytes).compile_bytes_with_deltas (dest_start,
|
||||
points_with_deltas,
|
||||
|
|
@ -349,7 +311,7 @@ struct Glyph
|
|||
bool shift_points_hori = true,
|
||||
bool use_my_metrics = true,
|
||||
bool phantom_only = false,
|
||||
hb_array_t<int> coords = hb_array_t<int> (),
|
||||
hb_array_t<const int> coords = hb_array_t<const int> (),
|
||||
hb_map_t *current_glyphs = nullptr,
|
||||
unsigned int depth = 0,
|
||||
unsigned *edge_count = nullptr) const
|
||||
|
|
@ -357,7 +319,7 @@ struct Glyph
|
|||
if (unlikely (depth > HB_MAX_NESTING_LEVEL)) return false;
|
||||
unsigned stack_edge_count = 0;
|
||||
if (!edge_count) edge_count = &stack_edge_count;
|
||||
if (unlikely (*edge_count > HB_GLYF_MAX_EDGE_COUNT)) return false;
|
||||
if (unlikely (*edge_count > HB_MAX_GRAPH_EDGE_COUNT)) return false;
|
||||
(*edge_count)++;
|
||||
|
||||
hb_map_t current_glyphs_stack;
|
||||
|
|
@ -391,14 +353,6 @@ struct Glyph
|
|||
if (unlikely (!item.get_points (points))) return false;
|
||||
break;
|
||||
}
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
case VAR_COMPOSITE:
|
||||
{
|
||||
for (auto &item : get_var_composite_iterator ())
|
||||
if (unlikely (!item.get_points (points))) return false;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
case EMPTY:
|
||||
break;
|
||||
}
|
||||
|
|
@ -539,81 +493,6 @@ struct Glyph
|
|||
}
|
||||
all_points.extend (phantoms);
|
||||
} break;
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
case VAR_COMPOSITE:
|
||||
{
|
||||
hb_array_t<contour_point_t> points_left = points.as_array ();
|
||||
for (auto &item : get_var_composite_iterator ())
|
||||
{
|
||||
hb_codepoint_t item_gid = item.get_gid ();
|
||||
|
||||
if (unlikely (current_glyphs->has (item_gid)))
|
||||
continue;
|
||||
|
||||
current_glyphs->add (item_gid);
|
||||
|
||||
unsigned item_num_points = item.get_num_points ();
|
||||
hb_array_t<contour_point_t> record_points = points_left.sub_array (0, item_num_points);
|
||||
assert (record_points.length == item_num_points);
|
||||
|
||||
auto component_coords = coords;
|
||||
/* Copying coords is expensive; so we have put an arbitrary
|
||||
* limit on the max number of coords for now. */
|
||||
if (item.is_reset_unspecified_axes () ||
|
||||
coords.length > HB_GLYF_VAR_COMPOSITE_MAX_AXES)
|
||||
component_coords = hb_array<int> ();
|
||||
|
||||
coord_setter_t coord_setter (component_coords);
|
||||
item.set_variations (coord_setter, record_points);
|
||||
|
||||
unsigned old_count = all_points.length;
|
||||
|
||||
if (unlikely ((!phantom_only || (use_my_metrics && item.is_use_my_metrics ())) &&
|
||||
!glyf_accelerator.glyph_for_gid (item_gid)
|
||||
.get_points (font,
|
||||
glyf_accelerator,
|
||||
all_points,
|
||||
points_with_deltas,
|
||||
head_maxp_info,
|
||||
nullptr,
|
||||
shift_points_hori,
|
||||
use_my_metrics,
|
||||
phantom_only,
|
||||
coord_setter.get_coords (),
|
||||
current_glyphs,
|
||||
depth + 1,
|
||||
edge_count)))
|
||||
{
|
||||
current_glyphs->del (item_gid);
|
||||
return false;
|
||||
}
|
||||
|
||||
auto comp_points = all_points.as_array ().sub_array (old_count);
|
||||
|
||||
/* Apply component transformation */
|
||||
if (comp_points) // Empty in case of phantom_only
|
||||
item.transform_points (record_points, comp_points);
|
||||
|
||||
/* Copy phantom points from component if USE_MY_METRICS flag set */
|
||||
if (use_my_metrics && item.is_use_my_metrics ())
|
||||
for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
|
||||
phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];
|
||||
|
||||
all_points.resize (all_points.length - PHANTOM_COUNT);
|
||||
|
||||
if (all_points.length > HB_GLYF_MAX_POINTS)
|
||||
{
|
||||
current_glyphs->del (item_gid);
|
||||
return false;
|
||||
}
|
||||
|
||||
points_left += item_num_points;
|
||||
|
||||
current_glyphs->del (item_gid);
|
||||
}
|
||||
all_points.extend (phantoms);
|
||||
} break;
|
||||
#endif
|
||||
case EMPTY:
|
||||
all_points.extend (phantoms);
|
||||
break;
|
||||
|
|
@ -624,7 +503,7 @@ struct Glyph
|
|||
/* Undocumented rasterizer behavior:
|
||||
* Shift points horizontally by the updated left side bearing
|
||||
*/
|
||||
int v = -phantoms[PHANTOM_LEFT].x;
|
||||
float v = -phantoms[PHANTOM_LEFT].x;
|
||||
if (v)
|
||||
for (auto &point : all_points)
|
||||
point.x += v;
|
||||
|
|
@ -658,10 +537,7 @@ struct Glyph
|
|||
int num_contours = header->numberOfContours;
|
||||
if (unlikely (num_contours == 0)) type = EMPTY;
|
||||
else if (num_contours > 0) type = SIMPLE;
|
||||
else if (num_contours == -1) type = COMPOSITE;
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
else if (num_contours == -2) type = VAR_COMPOSITE;
|
||||
#endif
|
||||
else if (num_contours <= -1) type = COMPOSITE;
|
||||
else type = EMPTY; // Spec deviation; Spec says COMPOSITE, but not seen in the wild.
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -53,23 +53,12 @@ struct SubsetGlyph
|
|||
if (plan->new_gid_for_old_gid (_.get_gid(), &new_gid))
|
||||
const_cast<CompositeGlyphRecord &> (_).set_gid (new_gid);
|
||||
}
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
for (auto &_ : Glyph (dest_glyph).get_var_composite_iterator ())
|
||||
{
|
||||
hb_codepoint_t new_gid;
|
||||
if (plan->new_gid_for_old_gid (_.get_gid(), &new_gid))
|
||||
const_cast<VarCompositeGlyphRecord &> (_).set_gid (new_gid);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef HB_NO_BEYOND_64K
|
||||
auto it = Glyph (dest_glyph).get_composite_iterator ();
|
||||
if (it)
|
||||
{
|
||||
/* lower GID24 to GID16 in components if possible.
|
||||
*
|
||||
* TODO: VarComposite. Not as critical, since VarComposite supports
|
||||
* gid24 from the first version. */
|
||||
/* lower GID24 to GID16 in components if possible. */
|
||||
char *p = it ? (char *) &*it : nullptr;
|
||||
char *q = p;
|
||||
const char *end = dest_glyph.arrayZ + dest_glyph.length;
|
||||
|
|
|
|||
|
|
@ -1,401 +0,0 @@
|
|||
#ifndef OT_GLYF_VARCOMPOSITEGLYPH_HH
|
||||
#define OT_GLYF_VARCOMPOSITEGLYPH_HH
|
||||
|
||||
|
||||
#include "../../hb-open-type.hh"
|
||||
#include "coord-setter.hh"
|
||||
|
||||
|
||||
namespace OT {
|
||||
namespace glyf_impl {
|
||||
|
||||
|
||||
struct VarCompositeGlyphRecord
|
||||
{
|
||||
protected:
|
||||
enum var_composite_glyph_flag_t
|
||||
{
|
||||
USE_MY_METRICS = 0x0001,
|
||||
AXIS_INDICES_ARE_SHORT = 0x0002,
|
||||
UNIFORM_SCALE = 0x0004,
|
||||
HAVE_TRANSLATE_X = 0x0008,
|
||||
HAVE_TRANSLATE_Y = 0x0010,
|
||||
HAVE_ROTATION = 0x0020,
|
||||
HAVE_SCALE_X = 0x0040,
|
||||
HAVE_SCALE_Y = 0x0080,
|
||||
HAVE_SKEW_X = 0x0100,
|
||||
HAVE_SKEW_Y = 0x0200,
|
||||
HAVE_TCENTER_X = 0x0400,
|
||||
HAVE_TCENTER_Y = 0x0800,
|
||||
GID_IS_24BIT = 0x1000,
|
||||
AXES_HAVE_VARIATION = 0x2000,
|
||||
RESET_UNSPECIFIED_AXES = 0x4000,
|
||||
};
|
||||
|
||||
public:
|
||||
|
||||
unsigned int get_size () const
|
||||
{
|
||||
unsigned fl = flags;
|
||||
unsigned int size = min_size;
|
||||
|
||||
unsigned axis_width = (fl & AXIS_INDICES_ARE_SHORT) ? 4 : 3;
|
||||
size += numAxes * axis_width;
|
||||
|
||||
if (fl & GID_IS_24BIT) size += 1;
|
||||
|
||||
// 2 bytes each for the following flags
|
||||
fl = fl & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y |
|
||||
HAVE_ROTATION |
|
||||
HAVE_SCALE_X | HAVE_SCALE_Y |
|
||||
HAVE_SKEW_X | HAVE_SKEW_Y |
|
||||
HAVE_TCENTER_X | HAVE_TCENTER_Y);
|
||||
size += hb_popcount (fl) * 2;
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
bool has_more () const { return true; }
|
||||
|
||||
bool is_use_my_metrics () const { return flags & USE_MY_METRICS; }
|
||||
bool is_reset_unspecified_axes () const { return flags & RESET_UNSPECIFIED_AXES; }
|
||||
|
||||
hb_codepoint_t get_gid () const
|
||||
{
|
||||
if (flags & GID_IS_24BIT)
|
||||
return * (const HBGlyphID24 *) &pad;
|
||||
else
|
||||
return * (const HBGlyphID16 *) &pad;
|
||||
}
|
||||
|
||||
void set_gid (hb_codepoint_t gid)
|
||||
{
|
||||
if (flags & GID_IS_24BIT)
|
||||
* (HBGlyphID24 *) &pad = gid;
|
||||
else
|
||||
* (HBGlyphID16 *) &pad = gid;
|
||||
}
|
||||
|
||||
unsigned get_numAxes () const
|
||||
{
|
||||
return numAxes;
|
||||
}
|
||||
|
||||
unsigned get_num_points () const
|
||||
{
|
||||
unsigned fl = flags;
|
||||
unsigned num = 0;
|
||||
if (fl & AXES_HAVE_VARIATION) num += numAxes;
|
||||
|
||||
/* Hopefully faster code, relying on the value of the flags. */
|
||||
fl = (((fl & (HAVE_TRANSLATE_Y | HAVE_SCALE_Y | HAVE_SKEW_Y | HAVE_TCENTER_Y)) >> 1) | fl) &
|
||||
(HAVE_TRANSLATE_X | HAVE_ROTATION | HAVE_SCALE_X | HAVE_SKEW_X | HAVE_TCENTER_X);
|
||||
num += hb_popcount (fl);
|
||||
return num;
|
||||
|
||||
/* Slower but more readable code. */
|
||||
if (fl & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y)) num++;
|
||||
if (fl & HAVE_ROTATION) num++;
|
||||
if (fl & (HAVE_SCALE_X | HAVE_SCALE_Y)) num++;
|
||||
if (fl & (HAVE_SKEW_X | HAVE_SKEW_Y)) num++;
|
||||
if (fl & (HAVE_TCENTER_X | HAVE_TCENTER_Y)) num++;
|
||||
return num;
|
||||
}
|
||||
|
||||
void transform_points (hb_array_t<const contour_point_t> record_points,
|
||||
hb_array_t<contour_point_t> points) const
|
||||
{
|
||||
float matrix[4];
|
||||
contour_point_t trans;
|
||||
|
||||
get_transformation_from_points (record_points.arrayZ, matrix, trans);
|
||||
|
||||
auto arrayZ = points.arrayZ;
|
||||
unsigned count = points.length;
|
||||
|
||||
if (matrix[0] != 1.f || matrix[1] != 0.f ||
|
||||
matrix[2] != 0.f || matrix[3] != 1.f)
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
arrayZ[i].transform (matrix);
|
||||
|
||||
if (trans.x != 0.f || trans.y != 0.f)
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
arrayZ[i].translate (trans);
|
||||
}
|
||||
|
||||
static inline void transform (float (&matrix)[4], contour_point_t &trans,
|
||||
float (other)[6])
|
||||
{
|
||||
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L268
|
||||
float xx1 = other[0];
|
||||
float xy1 = other[1];
|
||||
float yx1 = other[2];
|
||||
float yy1 = other[3];
|
||||
float dx1 = other[4];
|
||||
float dy1 = other[5];
|
||||
float xx2 = matrix[0];
|
||||
float xy2 = matrix[1];
|
||||
float yx2 = matrix[2];
|
||||
float yy2 = matrix[3];
|
||||
float dx2 = trans.x;
|
||||
float dy2 = trans.y;
|
||||
|
||||
matrix[0] = xx1*xx2 + xy1*yx2;
|
||||
matrix[1] = xx1*xy2 + xy1*yy2;
|
||||
matrix[2] = yx1*xx2 + yy1*yx2;
|
||||
matrix[3] = yx1*xy2 + yy1*yy2;
|
||||
trans.x = xx2*dx1 + yx2*dy1 + dx2;
|
||||
trans.y = xy2*dx1 + yy2*dy1 + dy2;
|
||||
}
|
||||
|
||||
static void translate (float (&matrix)[4], contour_point_t &trans,
|
||||
float translateX, float translateY)
|
||||
{
|
||||
if (!translateX && !translateY)
|
||||
return;
|
||||
|
||||
trans.x += matrix[0] * translateX + matrix[2] * translateY;
|
||||
trans.y += matrix[1] * translateX + matrix[3] * translateY;
|
||||
}
|
||||
|
||||
static void scale (float (&matrix)[4], contour_point_t &trans,
|
||||
float scaleX, float scaleY)
|
||||
{
|
||||
if (scaleX == 1.f && scaleY == 1.f)
|
||||
return;
|
||||
|
||||
matrix[0] *= scaleX;
|
||||
matrix[1] *= scaleX;
|
||||
matrix[2] *= scaleY;
|
||||
matrix[3] *= scaleY;
|
||||
}
|
||||
|
||||
static void rotate (float (&matrix)[4], contour_point_t &trans,
|
||||
float rotation)
|
||||
{
|
||||
if (!rotation)
|
||||
return;
|
||||
|
||||
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L240
|
||||
rotation = rotation * HB_PI;
|
||||
float c;
|
||||
float s;
|
||||
#ifdef HAVE_SINCOSF
|
||||
sincosf (rotation, &s, &c);
|
||||
#else
|
||||
c = cosf (rotation);
|
||||
s = sinf (rotation);
|
||||
#endif
|
||||
float other[6] = {c, s, -s, c, 0.f, 0.f};
|
||||
transform (matrix, trans, other);
|
||||
}
|
||||
|
||||
static void skew (float (&matrix)[4], contour_point_t &trans,
|
||||
float skewX, float skewY)
|
||||
{
|
||||
if (!skewX && !skewY)
|
||||
return;
|
||||
|
||||
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L255
|
||||
skewX = skewX * HB_PI;
|
||||
skewY = skewY * HB_PI;
|
||||
float other[6] = {1.f,
|
||||
skewY ? tanf (skewY) : 0.f,
|
||||
skewX ? tanf (skewX) : 0.f,
|
||||
1.f,
|
||||
0.f, 0.f};
|
||||
transform (matrix, trans, other);
|
||||
}
|
||||
|
||||
bool get_points (contour_point_vector_t &points) const
|
||||
{
|
||||
unsigned num_points = get_num_points ();
|
||||
|
||||
points.alloc (points.length + num_points + 4); // For phantom points
|
||||
if (unlikely (!points.resize (points.length + num_points, false))) return false;
|
||||
contour_point_t *rec_points = points.arrayZ + (points.length - num_points);
|
||||
hb_memset (rec_points, 0, num_points * sizeof (rec_points[0]));
|
||||
|
||||
unsigned fl = flags;
|
||||
|
||||
unsigned num_axes = numAxes;
|
||||
unsigned axis_width = (fl & AXIS_INDICES_ARE_SHORT) ? 2 : 1;
|
||||
unsigned axes_size = num_axes * axis_width;
|
||||
|
||||
const F2DOT14 *q = (const F2DOT14 *) (axes_size +
|
||||
(fl & GID_IS_24BIT ? 3 : 2) +
|
||||
(const HBUINT8 *) &pad);
|
||||
|
||||
unsigned count = num_axes;
|
||||
if (fl & AXES_HAVE_VARIATION)
|
||||
{
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
rec_points++->x = q++->to_int ();
|
||||
}
|
||||
else
|
||||
q += count;
|
||||
|
||||
const HBUINT16 *p = (const HBUINT16 *) q;
|
||||
|
||||
if (fl & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y))
|
||||
{
|
||||
int translateX = (fl & HAVE_TRANSLATE_X) ? * (const FWORD *) p++ : 0;
|
||||
int translateY = (fl & HAVE_TRANSLATE_Y) ? * (const FWORD *) p++ : 0;
|
||||
rec_points->x = translateX;
|
||||
rec_points->y = translateY;
|
||||
rec_points++;
|
||||
}
|
||||
if (fl & HAVE_ROTATION)
|
||||
{
|
||||
int rotation = (fl & HAVE_ROTATION) ? ((const F4DOT12 *) p++)->to_int () : 0;
|
||||
rec_points->x = rotation;
|
||||
rec_points++;
|
||||
}
|
||||
if (fl & (HAVE_SCALE_X | HAVE_SCALE_Y))
|
||||
{
|
||||
int scaleX = (fl & HAVE_SCALE_X) ? ((const F6DOT10 *) p++)->to_int () : 1 << 10;
|
||||
int scaleY = (fl & HAVE_SCALE_Y) ? ((const F6DOT10 *) p++)->to_int () : 1 << 10;
|
||||
if ((fl & UNIFORM_SCALE) && !(fl & HAVE_SCALE_Y))
|
||||
scaleY = scaleX;
|
||||
rec_points->x = scaleX;
|
||||
rec_points->y = scaleY;
|
||||
rec_points++;
|
||||
}
|
||||
if (fl & (HAVE_SKEW_X | HAVE_SKEW_Y))
|
||||
{
|
||||
int skewX = (fl & HAVE_SKEW_X) ? ((const F4DOT12 *) p++)->to_int () : 0;
|
||||
int skewY = (fl & HAVE_SKEW_Y) ? ((const F4DOT12 *) p++)->to_int () : 0;
|
||||
rec_points->x = skewX;
|
||||
rec_points->y = skewY;
|
||||
rec_points++;
|
||||
}
|
||||
if (fl & (HAVE_TCENTER_X | HAVE_TCENTER_Y))
|
||||
{
|
||||
int tCenterX = (fl & HAVE_TCENTER_X) ? * (const FWORD *) p++ : 0;
|
||||
int tCenterY = (fl & HAVE_TCENTER_Y) ? * (const FWORD *) p++ : 0;
|
||||
rec_points->x = tCenterX;
|
||||
rec_points->y = tCenterY;
|
||||
rec_points++;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void get_transformation_from_points (const contour_point_t *rec_points,
|
||||
float (&matrix)[4], contour_point_t &trans) const
|
||||
{
|
||||
unsigned fl = flags;
|
||||
|
||||
if (fl & AXES_HAVE_VARIATION)
|
||||
rec_points += numAxes;
|
||||
|
||||
matrix[0] = matrix[3] = 1.f;
|
||||
matrix[1] = matrix[2] = 0.f;
|
||||
trans.init (0.f, 0.f);
|
||||
|
||||
float translateX = 0.f;
|
||||
float translateY = 0.f;
|
||||
float rotation = 0.f;
|
||||
float scaleX = 1.f;
|
||||
float scaleY = 1.f;
|
||||
float skewX = 0.f;
|
||||
float skewY = 0.f;
|
||||
float tCenterX = 0.f;
|
||||
float tCenterY = 0.f;
|
||||
|
||||
if (fl & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y))
|
||||
{
|
||||
translateX = rec_points->x;
|
||||
translateY = rec_points->y;
|
||||
rec_points++;
|
||||
}
|
||||
if (fl & HAVE_ROTATION)
|
||||
{
|
||||
rotation = rec_points->x / (1 << 12);
|
||||
rec_points++;
|
||||
}
|
||||
if (fl & (HAVE_SCALE_X | HAVE_SCALE_Y))
|
||||
{
|
||||
scaleX = rec_points->x / (1 << 10);
|
||||
scaleY = rec_points->y / (1 << 10);
|
||||
rec_points++;
|
||||
}
|
||||
if (fl & (HAVE_SKEW_X | HAVE_SKEW_Y))
|
||||
{
|
||||
skewX = rec_points->x / (1 << 12);
|
||||
skewY = rec_points->y / (1 << 12);
|
||||
rec_points++;
|
||||
}
|
||||
if (fl & (HAVE_TCENTER_X | HAVE_TCENTER_Y))
|
||||
{
|
||||
tCenterX = rec_points->x;
|
||||
tCenterY = rec_points->y;
|
||||
rec_points++;
|
||||
}
|
||||
|
||||
translate (matrix, trans, translateX + tCenterX, translateY + tCenterY);
|
||||
rotate (matrix, trans, rotation);
|
||||
scale (matrix, trans, scaleX, scaleY);
|
||||
skew (matrix, trans, -skewX, skewY);
|
||||
translate (matrix, trans, -tCenterX, -tCenterY);
|
||||
}
|
||||
|
||||
void set_variations (coord_setter_t &setter,
|
||||
hb_array_t<contour_point_t> rec_points) const
|
||||
{
|
||||
bool have_variations = flags & AXES_HAVE_VARIATION;
|
||||
unsigned axis_width = (flags & AXIS_INDICES_ARE_SHORT) ? 2 : 1;
|
||||
unsigned num_axes = numAxes;
|
||||
|
||||
const HBUINT8 *p = (const HBUINT8 *) (((HBUINT8 *) &numAxes) + numAxes.static_size + (flags & GID_IS_24BIT ? 3 : 2));
|
||||
const HBUINT16 *q = (const HBUINT16 *) (((HBUINT8 *) &numAxes) + numAxes.static_size + (flags & GID_IS_24BIT ? 3 : 2));
|
||||
|
||||
const F2DOT14 *a = (const F2DOT14 *) ((HBUINT8 *) (axis_width == 1 ? (p + num_axes) : (HBUINT8 *) (q + num_axes)));
|
||||
|
||||
unsigned count = num_axes;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
{
|
||||
unsigned axis_index = axis_width == 1 ? (unsigned) *p++ : (unsigned) *q++;
|
||||
|
||||
signed v = have_variations ? rec_points.arrayZ[i].x : a++->to_int ();
|
||||
|
||||
v = hb_clamp (v, -(1<<14), (1<<14));
|
||||
setter[axis_index] = v;
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
HBUINT16 flags;
|
||||
HBUINT8 numAxes;
|
||||
HBUINT16 pad;
|
||||
public:
|
||||
DEFINE_SIZE_MIN (5);
|
||||
};
|
||||
|
||||
using var_composite_iter_t = composite_iter_tmpl<VarCompositeGlyphRecord>;
|
||||
|
||||
struct VarCompositeGlyph
|
||||
{
|
||||
const GlyphHeader &header;
|
||||
hb_bytes_t bytes;
|
||||
VarCompositeGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) :
|
||||
header (header_), bytes (bytes_) {}
|
||||
|
||||
var_composite_iter_t iter () const
|
||||
{ return var_composite_iter_t (bytes, &StructAfter<VarCompositeGlyphRecord, GlyphHeader> (header)); }
|
||||
|
||||
const hb_bytes_t trim_padding () const
|
||||
{
|
||||
unsigned length = GlyphHeader::static_size;
|
||||
for (auto &comp : iter ())
|
||||
length += comp.get_size ();
|
||||
return bytes.sub_array (0, length);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
} /* namespace glyf_impl */
|
||||
} /* namespace OT */
|
||||
|
||||
|
||||
#endif /* OT_GLYF_VARCOMPOSITEGLYPH_HH */
|
||||
|
|
@ -38,7 +38,7 @@ _write_loca (IteratorIn&& it,
|
|||
|
||||
unsigned padded_size = *it++;
|
||||
offset += padded_size;
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry gid %u offset %u padded-size %u", gid, offset, padded_size);
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry gid %" PRIu32 " offset %u padded-size %u", gid, offset, padded_size);
|
||||
value = offset >> right_shift;
|
||||
*dest++ = value;
|
||||
|
||||
|
|
|
|||
|
|
@ -205,8 +205,12 @@ struct glyf_accelerator_t
|
|||
|
||||
protected:
|
||||
template<typename T>
|
||||
bool get_points (hb_font_t *font, hb_codepoint_t gid, T consumer) const
|
||||
bool get_points (hb_font_t *font, hb_codepoint_t gid, T consumer,
|
||||
hb_array_t<const int> coords = hb_array_t<const int> ()) const
|
||||
{
|
||||
if (!coords)
|
||||
coords = hb_array (font->coords, font->num_coords);
|
||||
|
||||
if (gid >= num_glyphs) return false;
|
||||
|
||||
/* Making this allocfree is not that easy
|
||||
|
|
@ -216,7 +220,7 @@ struct glyf_accelerator_t
|
|||
contour_point_vector_t all_points;
|
||||
|
||||
bool phantom_only = !consumer.is_consuming_contour_points ();
|
||||
if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, nullptr, nullptr, nullptr, true, true, phantom_only)))
|
||||
if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, nullptr, nullptr, nullptr, true, true, phantom_only, coords)))
|
||||
return false;
|
||||
|
||||
unsigned count = all_points.length;
|
||||
|
|
@ -408,6 +412,11 @@ struct glyf_accelerator_t
|
|||
get_path (hb_font_t *font, hb_codepoint_t gid, hb_draw_session_t &draw_session) const
|
||||
{ return get_points (font, gid, glyf_impl::path_builder_t (font, draw_session)); }
|
||||
|
||||
bool
|
||||
get_path_at (hb_font_t *font, hb_codepoint_t gid, hb_draw_session_t &draw_session,
|
||||
hb_array_t<const int> coords) const
|
||||
{ return get_points (font, gid, glyf_impl::path_builder_t (font, draw_session), coords); }
|
||||
|
||||
#ifndef HB_NO_VAR
|
||||
const gvar_accelerator_t *gvar;
|
||||
#endif
|
||||
|
|
|
|||
65
modules/juce_graphics/fonts/harfbuzz/failing-alloc.c
Normal file
65
modules/juce_graphics/fonts/harfbuzz/failing-alloc.c
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright © 2020 Ebrahim Byagowi
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
int alloc_state = 0;
|
||||
|
||||
__attribute__((no_sanitize("integer")))
|
||||
static int fastrand ()
|
||||
{
|
||||
if (!alloc_state) return 1;
|
||||
/* Based on https://software.intel.com/content/www/us/en/develop/articles/fast-random-number-generator-on-the-intel-pentiumr-4-processor.html */
|
||||
alloc_state = (214013 * alloc_state + 2531011);
|
||||
return (alloc_state >> 16) & 0x7FFF;
|
||||
}
|
||||
|
||||
void* hb_malloc_impl (size_t size)
|
||||
{
|
||||
return (fastrand () % 16) ? malloc (size) : NULL;
|
||||
}
|
||||
|
||||
void* hb_calloc_impl (size_t nmemb, size_t size)
|
||||
{
|
||||
return (fastrand () % 16) ? calloc (nmemb, size) : NULL;
|
||||
}
|
||||
|
||||
void* hb_realloc_impl (void *ptr, size_t size)
|
||||
{
|
||||
return (fastrand () % 16) ? realloc (ptr, size) : NULL;
|
||||
}
|
||||
|
||||
void hb_free_impl (void *ptr)
|
||||
{
|
||||
return free (ptr);
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
64
modules/juce_graphics/fonts/harfbuzz/harfbuzz-subset.cc
Normal file
64
modules/juce_graphics/fonts/harfbuzz/harfbuzz-subset.cc
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
#include "OT/Var/VARC/VARC.cc"
|
||||
#include "graph/gsubgpos-context.cc"
|
||||
#include "hb-aat-layout.cc"
|
||||
#include "hb-aat-map.cc"
|
||||
#include "hb-blob.cc"
|
||||
#include "hb-buffer-serialize.cc"
|
||||
#include "hb-buffer-verify.cc"
|
||||
#include "hb-buffer.cc"
|
||||
#include "hb-common.cc"
|
||||
#include "hb-draw.cc"
|
||||
#include "hb-face-builder.cc"
|
||||
#include "hb-face.cc"
|
||||
#include "hb-fallback-shape.cc"
|
||||
#include "hb-font.cc"
|
||||
#include "hb-map.cc"
|
||||
#include "hb-number.cc"
|
||||
#include "hb-ot-cff1-table.cc"
|
||||
#include "hb-ot-cff2-table.cc"
|
||||
#include "hb-ot-color.cc"
|
||||
#include "hb-ot-face.cc"
|
||||
#include "hb-ot-font.cc"
|
||||
#include "hb-ot-layout.cc"
|
||||
#include "hb-ot-map.cc"
|
||||
#include "hb-ot-math.cc"
|
||||
#include "hb-ot-meta.cc"
|
||||
#include "hb-ot-metrics.cc"
|
||||
#include "hb-ot-name.cc"
|
||||
#include "hb-ot-shape-fallback.cc"
|
||||
#include "hb-ot-shape-normalize.cc"
|
||||
#include "hb-ot-shape.cc"
|
||||
#include "hb-ot-shaper-arabic.cc"
|
||||
#include "hb-ot-shaper-default.cc"
|
||||
#include "hb-ot-shaper-hangul.cc"
|
||||
#include "hb-ot-shaper-hebrew.cc"
|
||||
#include "hb-ot-shaper-indic-table.cc"
|
||||
#include "hb-ot-shaper-indic.cc"
|
||||
#include "hb-ot-shaper-khmer.cc"
|
||||
#include "hb-ot-shaper-myanmar.cc"
|
||||
#include "hb-ot-shaper-syllabic.cc"
|
||||
#include "hb-ot-shaper-thai.cc"
|
||||
#include "hb-ot-shaper-use.cc"
|
||||
#include "hb-ot-shaper-vowel-constraints.cc"
|
||||
#include "hb-ot-tag.cc"
|
||||
#include "hb-ot-var.cc"
|
||||
#include "hb-outline.cc"
|
||||
#include "hb-paint-extents.cc"
|
||||
#include "hb-paint.cc"
|
||||
#include "hb-set.cc"
|
||||
#include "hb-shape-plan.cc"
|
||||
#include "hb-shape.cc"
|
||||
#include "hb-shaper.cc"
|
||||
#include "hb-static.cc"
|
||||
#include "hb-style.cc"
|
||||
#include "hb-subset-cff-common.cc"
|
||||
#include "hb-subset-cff1.cc"
|
||||
#include "hb-subset-cff2.cc"
|
||||
#include "hb-subset-input.cc"
|
||||
#include "hb-subset-instancer-iup.cc"
|
||||
#include "hb-subset-instancer-solver.cc"
|
||||
#include "hb-subset-plan.cc"
|
||||
#include "hb-subset-repacker.cc"
|
||||
#include "hb-subset.cc"
|
||||
#include "hb-ucd.cc"
|
||||
#include "hb-unicode.cc"
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
#include "OT/Var/VARC/VARC.cc"
|
||||
#include "hb-aat-layout.cc"
|
||||
#include "hb-aat-map.cc"
|
||||
#include "hb-blob.cc"
|
||||
|
|
|
|||
|
|
@ -39,6 +39,7 @@ namespace AAT {
|
|||
|
||||
using namespace OT;
|
||||
|
||||
#define HB_AAT_BUFFER_DIGEST_THRESHOLD 32
|
||||
|
||||
struct ankr;
|
||||
|
||||
|
|
@ -46,8 +47,9 @@ struct hb_aat_apply_context_t :
|
|||
hb_dispatch_context_t<hb_aat_apply_context_t, bool, HB_DEBUG_APPLY>
|
||||
{
|
||||
const char *get_name () { return "APPLY"; }
|
||||
template <typename T>
|
||||
return_t dispatch (const T &obj) { return obj.apply (this); }
|
||||
template <typename T, typename ...Ts>
|
||||
return_t dispatch (const T &obj, Ts&&... ds)
|
||||
{ return obj.apply (this, std::forward<Ts> (ds)...); }
|
||||
static return_t default_return_value () { return false; }
|
||||
bool stop_sublookup_iteration (return_t r) const { return r; }
|
||||
|
||||
|
|
@ -59,6 +61,10 @@ struct hb_aat_apply_context_t :
|
|||
const ankr *ankr_table;
|
||||
const OT::GDEF *gdef_table;
|
||||
const hb_sorted_vector_t<hb_aat_map_t::range_flags_t> *range_flags = nullptr;
|
||||
hb_set_digest_t buffer_digest = hb_set_digest_t::full ();
|
||||
hb_set_digest_t machine_glyph_set = hb_set_digest_t::full ();
|
||||
hb_set_digest_t left_set = hb_set_digest_t::full ();
|
||||
hb_set_digest_t right_set = hb_set_digest_t::full ();
|
||||
hb_mask_t subtable_flags = 0;
|
||||
|
||||
/* Unused. For debug tracing only. */
|
||||
|
|
@ -81,6 +87,8 @@ struct hb_aat_apply_context_t :
|
|||
* Lookup Table
|
||||
*/
|
||||
|
||||
enum { DELETED_GLYPH = 0xFFFF };
|
||||
|
||||
template <typename T> struct Lookup;
|
||||
|
||||
template <typename T>
|
||||
|
|
@ -95,6 +103,12 @@ struct LookupFormat0
|
|||
return &arrayZ[glyph_id];
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs, unsigned num_glyphs) const
|
||||
{
|
||||
glyphs.add_range (0, num_glyphs - 1);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -123,6 +137,14 @@ struct LookupSegmentSingle
|
|||
int cmp (hb_codepoint_t g) const
|
||||
{ return g < first ? -1 : g <= last ? 0 : +1 ; }
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs) const
|
||||
{
|
||||
if (first == DELETED_GLYPH)
|
||||
return;
|
||||
glyphs.add_range (first, last);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -153,6 +175,14 @@ struct LookupFormat2
|
|||
return v ? &v->value : nullptr;
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs) const
|
||||
{
|
||||
unsigned count = segments.get_length ();
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
segments[i].collect_glyphs (glyphs);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -184,6 +214,14 @@ struct LookupSegmentArray
|
|||
return first <= glyph_id && glyph_id <= last ? &(base+valuesZ)[glyph_id - first] : nullptr;
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs) const
|
||||
{
|
||||
if (first == DELETED_GLYPH)
|
||||
return;
|
||||
glyphs.add_range (first, last);
|
||||
}
|
||||
|
||||
int cmp (hb_codepoint_t g) const
|
||||
{ return g < first ? -1 : g <= last ? 0 : +1; }
|
||||
|
||||
|
|
@ -226,6 +264,14 @@ struct LookupFormat4
|
|||
return v ? v->get_value (glyph_id, this) : nullptr;
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs) const
|
||||
{
|
||||
unsigned count = segments.get_length ();
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
segments[i].collect_glyphs (glyphs);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -254,6 +300,14 @@ struct LookupSingle
|
|||
|
||||
int cmp (hb_codepoint_t g) const { return glyph.cmp (g); }
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs) const
|
||||
{
|
||||
if (glyph == DELETED_GLYPH)
|
||||
return;
|
||||
glyphs.add (glyph);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -283,6 +337,14 @@ struct LookupFormat6
|
|||
return v ? &v->value : nullptr;
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs) const
|
||||
{
|
||||
unsigned count = entries.get_length ();
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
entries[i].collect_glyphs (glyphs);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -314,6 +376,16 @@ struct LookupFormat8
|
|||
&valueArrayZ[glyph_id - firstGlyph] : nullptr;
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs) const
|
||||
{
|
||||
if (unlikely (!glyphCount))
|
||||
return;
|
||||
if (firstGlyph == DELETED_GLYPH)
|
||||
return;
|
||||
glyphs.add_range (firstGlyph, firstGlyph + glyphCount - 1);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -358,6 +430,16 @@ struct LookupFormat10
|
|||
return v;
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs) const
|
||||
{
|
||||
if (unlikely (!glyphCount))
|
||||
return;
|
||||
if (firstGlyph == DELETED_GLYPH)
|
||||
return;
|
||||
glyphs.add_range (firstGlyph, firstGlyph + glyphCount - 1);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -406,6 +488,20 @@ struct Lookup
|
|||
}
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs, unsigned int num_glyphs) const
|
||||
{
|
||||
switch (u.format) {
|
||||
case 0: u.format0.collect_glyphs (glyphs, num_glyphs); return;
|
||||
case 2: u.format2.collect_glyphs (glyphs); return;
|
||||
case 4: u.format4.collect_glyphs (glyphs); return;
|
||||
case 6: u.format6.collect_glyphs (glyphs); return;
|
||||
case 8: u.format8.collect_glyphs (glyphs); return;
|
||||
case 10: u.format10.collect_glyphs (glyphs); return;
|
||||
default:return;
|
||||
}
|
||||
}
|
||||
|
||||
typename T::type get_class (hb_codepoint_t glyph_id,
|
||||
unsigned int num_glyphs,
|
||||
unsigned int outOfRange) const
|
||||
|
|
@ -460,8 +556,6 @@ struct Lookup
|
|||
};
|
||||
DECLARE_NULL_NAMESPACE_BYTES_TEMPLATE1 (AAT, Lookup, 2);
|
||||
|
||||
enum { DELETED_GLYPH = 0xFFFF };
|
||||
|
||||
/*
|
||||
* (Extended) State Table
|
||||
*/
|
||||
|
|
@ -512,6 +606,14 @@ struct Entry<void>
|
|||
DEFINE_SIZE_STATIC (4);
|
||||
};
|
||||
|
||||
enum Class
|
||||
{
|
||||
CLASS_END_OF_TEXT = 0,
|
||||
CLASS_OUT_OF_BOUNDS = 1,
|
||||
CLASS_DELETED_GLYPH = 2,
|
||||
CLASS_END_OF_LINE = 3,
|
||||
};
|
||||
|
||||
template <typename Types, typename Extra>
|
||||
struct StateTable
|
||||
{
|
||||
|
|
@ -524,21 +626,24 @@ struct StateTable
|
|||
STATE_START_OF_TEXT = 0,
|
||||
STATE_START_OF_LINE = 1,
|
||||
};
|
||||
enum Class
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs, unsigned num_glyphs) const
|
||||
{
|
||||
CLASS_END_OF_TEXT = 0,
|
||||
CLASS_OUT_OF_BOUNDS = 1,
|
||||
CLASS_DELETED_GLYPH = 2,
|
||||
CLASS_END_OF_LINE = 3,
|
||||
};
|
||||
(this+classTable).collect_glyphs (glyphs, num_glyphs);
|
||||
}
|
||||
|
||||
int new_state (unsigned int newState) const
|
||||
{ return Types::extended ? newState : ((int) newState - (int) stateArrayTable) / (int) nClasses; }
|
||||
|
||||
unsigned int get_class (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
|
||||
template <typename set_t>
|
||||
unsigned int get_class (hb_codepoint_t glyph_id,
|
||||
unsigned int num_glyphs,
|
||||
const set_t &glyphs) const
|
||||
{
|
||||
if (unlikely (glyph_id == DELETED_GLYPH)) return CLASS_DELETED_GLYPH;
|
||||
return (this+classTable).get_class (glyph_id, num_glyphs, 1);
|
||||
if (!glyphs[glyph_id]) return CLASS_OUT_OF_BOUNDS;
|
||||
return (this+classTable).get_class (glyph_id, num_glyphs, CLASS_OUT_OF_BOUNDS);
|
||||
}
|
||||
|
||||
const Entry<Extra> *get_entries () const
|
||||
|
|
@ -547,7 +652,7 @@ struct StateTable
|
|||
const Entry<Extra> &get_entry (int state, unsigned int klass) const
|
||||
{
|
||||
if (unlikely (klass >= nClasses))
|
||||
klass = StateTable::CLASS_OUT_OF_BOUNDS;
|
||||
klass = CLASS_OUT_OF_BOUNDS;
|
||||
|
||||
const HBUSHORT *states = (this+stateArrayTable).arrayZ;
|
||||
const Entry<Extra> *entries = (this+entryTable).arrayZ;
|
||||
|
|
@ -690,6 +795,15 @@ struct ClassTable
|
|||
{
|
||||
return get_class (glyph_id, outOfRange);
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &glyphs, unsigned num_glyphs) const
|
||||
{
|
||||
for (unsigned i = 0; i < classArray.len; i++)
|
||||
if (classArray.arrayZ[i] != CLASS_OUT_OF_BOUNDS)
|
||||
glyphs.add (firstGlyph + i);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -703,6 +817,38 @@ struct ClassTable
|
|||
DEFINE_SIZE_ARRAY (4, classArray);
|
||||
};
|
||||
|
||||
struct SubtableGlyphCoverage
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned subtable_count) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
||||
if (unlikely (!c->check_array (&subtableOffsets, subtable_count)))
|
||||
return_trace (false);
|
||||
|
||||
unsigned bytes = (c->get_num_glyphs () + CHAR_BIT - 1) / CHAR_BIT;
|
||||
for (unsigned i = 0; i < subtable_count; i++)
|
||||
{
|
||||
uint32_t offset = (uint32_t) subtableOffsets[i];
|
||||
if (offset == 0 || offset == 0xFFFFFFFF)
|
||||
continue;
|
||||
if (unlikely (!subtableOffsets[i].sanitize (c, this, bytes)))
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
protected:
|
||||
UnsizedArrayOf<NNOffset32To<UnsizedArrayOf<HBUINT8>>> subtableOffsets;
|
||||
/* Array of offsets from the beginning of the
|
||||
* subtable glyph coverage table to the glyph
|
||||
* coverage bitfield for a given subtable; there
|
||||
* is one offset for each subtable in the chain */
|
||||
/* UnsizedArrayOf<HBUINT8> coverageBitfields; *//* The individual coverage bitfields. */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (0, subtableOffsets);
|
||||
};
|
||||
|
||||
struct ObsoleteTypes
|
||||
{
|
||||
static constexpr bool extended = false;
|
||||
|
|
@ -779,15 +925,23 @@ struct StateTableDriver
|
|||
using EntryT = Entry<EntryData>;
|
||||
|
||||
StateTableDriver (const StateTableT &machine_,
|
||||
hb_buffer_t *buffer_,
|
||||
hb_face_t *face_) :
|
||||
machine (machine_),
|
||||
buffer (buffer_),
|
||||
num_glyphs (face_->get_num_glyphs ()) {}
|
||||
|
||||
template <typename context_t>
|
||||
bool is_idempotent_on_all_out_of_bounds (context_t *c, hb_aat_apply_context_t *ac)
|
||||
{
|
||||
const auto entry = machine.get_entry (StateTableT::STATE_START_OF_TEXT, CLASS_OUT_OF_BOUNDS);
|
||||
return !c->is_actionable (ac->buffer, this, entry) &&
|
||||
machine.new_state (entry.newState) == StateTableT::STATE_START_OF_TEXT;
|
||||
}
|
||||
|
||||
template <typename context_t>
|
||||
void drive (context_t *c, hb_aat_apply_context_t *ac)
|
||||
{
|
||||
hb_buffer_t *buffer = ac->buffer;
|
||||
|
||||
if (!c->in_place)
|
||||
buffer->clear_output ();
|
||||
|
||||
|
|
@ -822,9 +976,9 @@ struct StateTableDriver
|
|||
}
|
||||
}
|
||||
|
||||
unsigned int klass = buffer->idx < buffer->len ?
|
||||
machine.get_class (buffer->cur().codepoint, num_glyphs) :
|
||||
(unsigned) StateTableT::CLASS_END_OF_TEXT;
|
||||
unsigned int klass = likely (buffer->idx < buffer->len) ?
|
||||
machine.get_class (buffer->cur().codepoint, num_glyphs, ac->machine_glyph_set) :
|
||||
(unsigned) CLASS_END_OF_TEXT;
|
||||
DEBUG_MSG (APPLY, nullptr, "c%u at %u", klass, buffer->idx);
|
||||
const EntryT &entry = machine.get_entry (state, klass);
|
||||
const int next_state = machine.new_state (entry.newState);
|
||||
|
|
@ -861,11 +1015,11 @@ struct StateTableDriver
|
|||
const auto is_safe_to_break_extra = [&]()
|
||||
{
|
||||
/* 2c. */
|
||||
const auto wouldbe_entry = machine.get_entry(StateTableT::STATE_START_OF_TEXT, klass);
|
||||
const auto &wouldbe_entry = machine.get_entry(StateTableT::STATE_START_OF_TEXT, klass);
|
||||
|
||||
/* 2c'. */
|
||||
if (c->is_actionable (this, wouldbe_entry))
|
||||
return false;
|
||||
if (c->is_actionable (buffer, this, wouldbe_entry))
|
||||
return false;
|
||||
|
||||
/* 2c". */
|
||||
return next_state == machine.new_state(wouldbe_entry.newState)
|
||||
|
|
@ -875,7 +1029,7 @@ struct StateTableDriver
|
|||
const auto is_safe_to_break = [&]()
|
||||
{
|
||||
/* 1. */
|
||||
if (c->is_actionable (this, entry))
|
||||
if (c->is_actionable (buffer, this, entry))
|
||||
return false;
|
||||
|
||||
/* 2. */
|
||||
|
|
@ -888,13 +1042,13 @@ struct StateTableDriver
|
|||
return false;
|
||||
|
||||
/* 3. */
|
||||
return !c->is_actionable (this, machine.get_entry (state, StateTableT::CLASS_END_OF_TEXT));
|
||||
return !c->is_actionable (buffer, this, machine.get_entry (state, CLASS_END_OF_TEXT));
|
||||
};
|
||||
|
||||
if (!is_safe_to_break () && buffer->backtrack_len () && buffer->idx < buffer->len)
|
||||
buffer->unsafe_to_break_from_outbuffer (buffer->backtrack_len () - 1, buffer->idx + 1);
|
||||
|
||||
c->transition (this, entry);
|
||||
c->transition (buffer, this, entry);
|
||||
|
||||
state = next_state;
|
||||
DEBUG_MSG (APPLY, nullptr, "s%d", state);
|
||||
|
|
@ -912,7 +1066,6 @@ struct StateTableDriver
|
|||
|
||||
public:
|
||||
const StateTableT &machine;
|
||||
hb_buffer_t *buffer;
|
||||
unsigned int num_glyphs;
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@
|
|||
|
||||
#include "hb-kern.hh"
|
||||
#include "hb-aat-layout-ankr-table.hh"
|
||||
#include "hb-set-digest.hh"
|
||||
|
||||
/*
|
||||
* kerx -- Extended Kerning
|
||||
|
|
@ -82,7 +83,7 @@ struct KernPair
|
|||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
protected:
|
||||
public:
|
||||
HBGlyphID16 left;
|
||||
HBGlyphID16 right;
|
||||
FWORD value;
|
||||
|
|
@ -106,10 +107,14 @@ struct KerxSubTableFormat0
|
|||
TRACE_APPLY (this);
|
||||
|
||||
if (!c->plan->requested_kerning)
|
||||
return false;
|
||||
return_trace (false);
|
||||
|
||||
if (header.coverage & header.Backwards)
|
||||
return false;
|
||||
return_trace (false);
|
||||
|
||||
if (!(c->buffer_digest.may_have (c->left_set) &&
|
||||
c->buffer_digest.may_have (c->right_set)))
|
||||
return_trace (false);
|
||||
|
||||
accelerator_t accel (*this, c);
|
||||
hb_kern_machine_t<accelerator_t> machine (accel, header.coverage & header.CrossStream);
|
||||
|
|
@ -118,6 +123,16 @@ struct KerxSubTableFormat0
|
|||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &left_set, set_t &right_set, unsigned num_glyphs) const
|
||||
{
|
||||
for (const KernPair& pair : pairs)
|
||||
{
|
||||
left_set.add (pair.left);
|
||||
right_set.add (pair.right);
|
||||
}
|
||||
}
|
||||
|
||||
struct accelerator_t
|
||||
{
|
||||
const KerxSubTableFormat0 &table;
|
||||
|
|
@ -128,7 +143,10 @@ struct KerxSubTableFormat0
|
|||
table (table_), c (c_) {}
|
||||
|
||||
int get_kerning (hb_codepoint_t left, hb_codepoint_t right) const
|
||||
{ return table.get_kerning (left, right, c); }
|
||||
{
|
||||
if (!c->left_set[left] || !c->right_set[right]) return 0;
|
||||
return table.get_kerning (left, right, c);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
|
@ -228,13 +246,14 @@ struct KerxSubTableFormat1
|
|||
depth (0),
|
||||
crossStream (table->header.coverage & table->header.CrossStream) {}
|
||||
|
||||
bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
bool is_actionable (hb_buffer_t *buffer HB_UNUSED,
|
||||
StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
const Entry<EntryData> &entry)
|
||||
{ return Format1EntryT::performAction (entry); }
|
||||
void transition (StateTableDriver<Types, EntryData> *driver,
|
||||
void transition (hb_buffer_t *buffer,
|
||||
StateTableDriver<Types, EntryData> *driver,
|
||||
const Entry<EntryData> &entry)
|
||||
{
|
||||
hb_buffer_t *buffer = driver->buffer;
|
||||
unsigned int flags = entry.flags;
|
||||
|
||||
if (flags & Format1EntryT::Reset)
|
||||
|
|
@ -351,7 +370,13 @@ struct KerxSubTableFormat1
|
|||
|
||||
driver_context_t dc (this, c);
|
||||
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->font->face);
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->font->face);
|
||||
|
||||
if (driver.is_idempotent_on_all_out_of_bounds (&dc, c) &&
|
||||
!(c->buffer_digest.may_have (c->left_set) &&
|
||||
c->buffer_digest.may_have (c->right_set)))
|
||||
return_trace (false);
|
||||
|
||||
driver.drive (&dc, c);
|
||||
|
||||
return_trace (true);
|
||||
|
|
@ -365,12 +390,21 @@ struct KerxSubTableFormat1
|
|||
machine.sanitize (c)));
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &left_set, set_t &right_set, unsigned num_glyphs) const
|
||||
{
|
||||
set_t set;
|
||||
machine.collect_glyphs (set, num_glyphs);
|
||||
left_set.union_ (set);
|
||||
right_set.union_ (set);
|
||||
}
|
||||
|
||||
protected:
|
||||
KernSubTableHeader header;
|
||||
StateTable<Types, EntryData> machine;
|
||||
NNOffsetTo<UnsizedArrayOf<FWORD>, HBUINT> kernAction;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 5 * sizeof (HBUINT));
|
||||
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + (StateTable<Types, EntryData>::static_size + HBUINT::static_size));
|
||||
};
|
||||
|
||||
template <typename KernSubTableHeader>
|
||||
|
|
@ -401,10 +435,14 @@ struct KerxSubTableFormat2
|
|||
TRACE_APPLY (this);
|
||||
|
||||
if (!c->plan->requested_kerning)
|
||||
return false;
|
||||
return_trace (false);
|
||||
|
||||
if (header.coverage & header.Backwards)
|
||||
return false;
|
||||
return_trace (false);
|
||||
|
||||
if (!(c->buffer_digest.may_have (c->left_set) &&
|
||||
c->buffer_digest.may_have (c->right_set)))
|
||||
return_trace (false);
|
||||
|
||||
accelerator_t accel (*this, c);
|
||||
hb_kern_machine_t<accelerator_t> machine (accel, header.coverage & header.CrossStream);
|
||||
|
|
@ -413,6 +451,13 @@ struct KerxSubTableFormat2
|
|||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &left_set, set_t &right_set, unsigned num_glyphs) const
|
||||
{
|
||||
(this+leftClassTable).collect_glyphs (left_set, num_glyphs);
|
||||
(this+rightClassTable).collect_glyphs (right_set, num_glyphs);
|
||||
}
|
||||
|
||||
struct accelerator_t
|
||||
{
|
||||
const KerxSubTableFormat2 &table;
|
||||
|
|
@ -423,7 +468,10 @@ struct KerxSubTableFormat2
|
|||
table (table_), c (c_) {}
|
||||
|
||||
int get_kerning (hb_codepoint_t left, hb_codepoint_t right) const
|
||||
{ return table.get_kerning (left, right, c); }
|
||||
{
|
||||
if (!c->left_set[left] || !c->right_set[right]) return 0;
|
||||
return table.get_kerning (left, right, c);
|
||||
}
|
||||
};
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
|
@ -493,14 +541,14 @@ struct KerxSubTableFormat4
|
|||
mark_set (false),
|
||||
mark (0) {}
|
||||
|
||||
bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
bool is_actionable (hb_buffer_t *buffer HB_UNUSED,
|
||||
StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
const Entry<EntryData> &entry)
|
||||
{ return entry.data.ankrActionIndex != 0xFFFF; }
|
||||
void transition (StateTableDriver<Types, EntryData> *driver,
|
||||
void transition (hb_buffer_t *buffer,
|
||||
StateTableDriver<Types, EntryData> *driver,
|
||||
const Entry<EntryData> &entry)
|
||||
{
|
||||
hb_buffer_t *buffer = driver->buffer;
|
||||
|
||||
if (mark_set && entry.data.ankrActionIndex != 0xFFFF && buffer->idx < buffer->len)
|
||||
{
|
||||
hb_glyph_position_t &o = buffer->cur_pos();
|
||||
|
|
@ -600,7 +648,13 @@ struct KerxSubTableFormat4
|
|||
|
||||
driver_context_t dc (this, c);
|
||||
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->font->face);
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->font->face);
|
||||
|
||||
if (driver.is_idempotent_on_all_out_of_bounds (&dc, c) &&
|
||||
!(c->buffer_digest.may_have (c->left_set) &&
|
||||
c->buffer_digest.may_have (c->right_set)))
|
||||
return_trace (false);
|
||||
|
||||
driver.drive (&dc, c);
|
||||
|
||||
return_trace (true);
|
||||
|
|
@ -614,12 +668,21 @@ struct KerxSubTableFormat4
|
|||
machine.sanitize (c)));
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &left_set, set_t &right_set, unsigned num_glyphs) const
|
||||
{
|
||||
set_t set;
|
||||
machine.collect_glyphs (set, num_glyphs);
|
||||
left_set.union_ (set);
|
||||
right_set.union_ (set);
|
||||
}
|
||||
|
||||
protected:
|
||||
KernSubTableHeader header;
|
||||
StateTable<Types, EntryData> machine;
|
||||
HBUINT32 flags;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 20);
|
||||
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + (StateTable<Types, EntryData>::static_size + HBUINT32::static_size));
|
||||
};
|
||||
|
||||
template <typename KernSubTableHeader>
|
||||
|
|
@ -638,7 +701,7 @@ struct KerxSubTableFormat6
|
|||
unsigned int num_glyphs = c->sanitizer.get_num_glyphs ();
|
||||
if (is_long ())
|
||||
{
|
||||
const typename U::Long &t = u.l;
|
||||
const auto &t = u.l;
|
||||
unsigned int l = (this+t.rowIndexTable).get_value_or_null (left, num_glyphs);
|
||||
unsigned int r = (this+t.columnIndexTable).get_value_or_null (right, num_glyphs);
|
||||
unsigned int offset = l + r;
|
||||
|
|
@ -651,7 +714,7 @@ struct KerxSubTableFormat6
|
|||
}
|
||||
else
|
||||
{
|
||||
const typename U::Short &t = u.s;
|
||||
const auto &t = u.s;
|
||||
unsigned int l = (this+t.rowIndexTable).get_value_or_null (left, num_glyphs);
|
||||
unsigned int r = (this+t.columnIndexTable).get_value_or_null (right, num_glyphs);
|
||||
unsigned int offset = l + r;
|
||||
|
|
@ -667,10 +730,14 @@ struct KerxSubTableFormat6
|
|||
TRACE_APPLY (this);
|
||||
|
||||
if (!c->plan->requested_kerning)
|
||||
return false;
|
||||
return_trace (false);
|
||||
|
||||
if (header.coverage & header.Backwards)
|
||||
return false;
|
||||
return_trace (false);
|
||||
|
||||
if (!(c->buffer_digest.may_have (c->left_set) &&
|
||||
c->buffer_digest.may_have (c->right_set)))
|
||||
return_trace (false);
|
||||
|
||||
accelerator_t accel (*this, c);
|
||||
hb_kern_machine_t<accelerator_t> machine (accel, header.coverage & header.CrossStream);
|
||||
|
|
@ -698,6 +765,23 @@ struct KerxSubTableFormat6
|
|||
c->check_range (this, vector))));
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &left_set, set_t &right_set, unsigned num_glyphs) const
|
||||
{
|
||||
if (is_long ())
|
||||
{
|
||||
const auto &t = u.l;
|
||||
(this+t.rowIndexTable).collect_glyphs (left_set, num_glyphs);
|
||||
(this+t.columnIndexTable).collect_glyphs (right_set, num_glyphs);
|
||||
}
|
||||
else
|
||||
{
|
||||
const auto &t = u.s;
|
||||
(this+t.rowIndexTable).collect_glyphs (left_set, num_glyphs);
|
||||
(this+t.columnIndexTable).collect_glyphs (right_set, num_glyphs);
|
||||
}
|
||||
}
|
||||
|
||||
struct accelerator_t
|
||||
{
|
||||
const KerxSubTableFormat6 &table;
|
||||
|
|
@ -708,7 +792,10 @@ struct KerxSubTableFormat6
|
|||
table (table_), c (c_) {}
|
||||
|
||||
int get_kerning (hb_codepoint_t left, hb_codepoint_t right) const
|
||||
{ return table.get_kerning (left, right, c); }
|
||||
{
|
||||
if (!c->left_set[left] || !c->right_set[right]) return 0;
|
||||
return table.get_kerning (left, right, c);
|
||||
}
|
||||
};
|
||||
|
||||
protected:
|
||||
|
|
@ -794,6 +881,20 @@ struct KerxSubTable
|
|||
}
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &left_set, set_t &right_set, unsigned num_glyphs) const
|
||||
{
|
||||
unsigned int subtable_type = get_type ();
|
||||
switch (subtable_type) {
|
||||
case 0: u.format0.collect_glyphs (left_set, right_set, num_glyphs); return;
|
||||
case 1: u.format1.collect_glyphs (left_set, right_set, num_glyphs); return;
|
||||
case 2: u.format2.collect_glyphs (left_set, right_set, num_glyphs); return;
|
||||
case 4: u.format4.collect_glyphs (left_set, right_set, num_glyphs); return;
|
||||
case 6: u.format6.collect_glyphs (left_set, right_set, num_glyphs); return;
|
||||
default: return;
|
||||
}
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -824,6 +925,8 @@ struct KerxSubTable
|
|||
* The 'kerx' Table
|
||||
*/
|
||||
|
||||
using kern_accelerator_data_t = hb_vector_t<hb_pair_t<hb_set_digest_t, hb_set_digest_t>>;
|
||||
|
||||
template <typename T>
|
||||
struct KerxTable
|
||||
{
|
||||
|
|
@ -840,6 +943,9 @@ struct KerxTable
|
|||
{
|
||||
if (st->get_type () == 1)
|
||||
return true;
|
||||
|
||||
// TODO: What about format 4? What's this API used for anyway?
|
||||
|
||||
st = &StructAfter<SubTable> (*st);
|
||||
}
|
||||
return false;
|
||||
|
|
@ -878,10 +984,16 @@ struct KerxTable
|
|||
return v;
|
||||
}
|
||||
|
||||
bool apply (AAT::hb_aat_apply_context_t *c) const
|
||||
bool apply (AAT::hb_aat_apply_context_t *c,
|
||||
const kern_accelerator_data_t *accel_data = nullptr) const
|
||||
{
|
||||
c->buffer->unsafe_to_concat ();
|
||||
|
||||
if (c->buffer->len < HB_AAT_BUFFER_DIGEST_THRESHOLD)
|
||||
c->buffer_digest = c->buffer->digest ();
|
||||
else
|
||||
c->buffer_digest = hb_set_digest_t::full ();
|
||||
|
||||
typedef typename T::SubTable SubTable;
|
||||
|
||||
bool ret = false;
|
||||
|
|
@ -925,6 +1037,16 @@ struct KerxTable
|
|||
if (reverse)
|
||||
c->buffer->reverse ();
|
||||
|
||||
if (accel_data)
|
||||
{
|
||||
c->left_set = (*accel_data)[i].first;
|
||||
c->right_set = (*accel_data)[i].second;
|
||||
}
|
||||
else
|
||||
{
|
||||
c->left_set = c->right_set = hb_set_digest_t::full ();
|
||||
}
|
||||
|
||||
{
|
||||
/* See comment in sanitize() for conditional here. */
|
||||
hb_sanitize_with_object_t with (&c->sanitizer, i < count - 1 ? st : (const SubTable *) nullptr);
|
||||
|
|
@ -977,8 +1099,61 @@ struct KerxTable
|
|||
st = &StructAfter<SubTable> (*st);
|
||||
}
|
||||
|
||||
unsigned majorVersion = thiz()->version;
|
||||
if (sizeof (thiz()->version) == 4)
|
||||
majorVersion = majorVersion >> 16;
|
||||
if (majorVersion >= 3)
|
||||
{
|
||||
const SubtableGlyphCoverage *coverage = (const SubtableGlyphCoverage *) st;
|
||||
if (!coverage->sanitize (c, count))
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
kern_accelerator_data_t create_accelerator_data (unsigned num_glyphs) const
|
||||
{
|
||||
kern_accelerator_data_t accel_data;
|
||||
|
||||
typedef typename T::SubTable SubTable;
|
||||
|
||||
const SubTable *st = &thiz()->firstSubTable;
|
||||
unsigned int count = thiz()->tableCount;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
hb_set_digest_t left_set, right_set;
|
||||
st->collect_glyphs (left_set, right_set, num_glyphs);
|
||||
accel_data.push (hb_pair (left_set, right_set));
|
||||
st = &StructAfter<SubTable> (*st);
|
||||
}
|
||||
|
||||
return accel_data;
|
||||
}
|
||||
|
||||
struct accelerator_t
|
||||
{
|
||||
accelerator_t (hb_face_t *face)
|
||||
{
|
||||
hb_sanitize_context_t sc;
|
||||
this->table = sc.reference_table<T> (face);
|
||||
this->accel_data = this->table->create_accelerator_data (face->get_num_glyphs ());
|
||||
}
|
||||
~accelerator_t ()
|
||||
{
|
||||
this->table.destroy ();
|
||||
}
|
||||
|
||||
hb_blob_t *get_blob () const { return table.get_blob (); }
|
||||
|
||||
bool apply (AAT::hb_aat_apply_context_t *c) const
|
||||
{
|
||||
return table->apply (c, &accel_data);
|
||||
}
|
||||
|
||||
hb_blob_ptr_t<T> table;
|
||||
kern_accelerator_data_t accel_data;
|
||||
};
|
||||
};
|
||||
|
||||
struct kerx : KerxTable<kerx>
|
||||
|
|
@ -1007,8 +1182,10 @@ struct kerx : KerxTable<kerx>
|
|||
DEFINE_SIZE_MIN (8);
|
||||
};
|
||||
|
||||
struct kerx_accelerator_t : kerx::accelerator_t {
|
||||
kerx_accelerator_t (hb_face_t *face) : kerx::accelerator_t (face) {}
|
||||
};
|
||||
|
||||
} /* namespace AAT */
|
||||
|
||||
|
||||
#endif /* HB_AAT_LAYOUT_KERX_TABLE_HH */
|
||||
|
|
|
|||
|
|
@ -74,15 +74,16 @@ struct RearrangementSubtable
|
|||
ret (false),
|
||||
start (0), end (0) {}
|
||||
|
||||
bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
const Entry<EntryData> &entry)
|
||||
bool is_actionable (hb_buffer_t *buffer HB_UNUSED,
|
||||
StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
const Entry<EntryData> &entry) const
|
||||
{
|
||||
return (entry.flags & Verb) && start < end;
|
||||
}
|
||||
void transition (StateTableDriver<Types, EntryData> *driver,
|
||||
void transition (hb_buffer_t *buffer,
|
||||
StateTableDriver<Types, EntryData> *driver,
|
||||
const Entry<EntryData> &entry)
|
||||
{
|
||||
hb_buffer_t *buffer = driver->buffer;
|
||||
unsigned int flags = entry.flags;
|
||||
|
||||
if (flags & MarkFirst)
|
||||
|
|
@ -168,7 +169,12 @@ struct RearrangementSubtable
|
|||
|
||||
driver_context_t dc (this);
|
||||
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->face);
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->face);
|
||||
|
||||
if (driver.is_idempotent_on_all_out_of_bounds (&dc, c) &&
|
||||
!c->buffer_digest.may_have (c->machine_glyph_set))
|
||||
return_trace (false);
|
||||
|
||||
driver.drive (&dc, c);
|
||||
|
||||
return_trace (dc.ret);
|
||||
|
|
@ -180,10 +186,10 @@ struct RearrangementSubtable
|
|||
return_trace (machine.sanitize (c));
|
||||
}
|
||||
|
||||
protected:
|
||||
public:
|
||||
StateTable<Types, EntryData> machine;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (16);
|
||||
DEFINE_SIZE_STATIC ((StateTable<Types, EntryData>::static_size));
|
||||
};
|
||||
|
||||
template <typename Types>
|
||||
|
|
@ -223,21 +229,19 @@ struct ContextualSubtable
|
|||
table (table_),
|
||||
subs (table+table->substitutionTables) {}
|
||||
|
||||
bool is_actionable (StateTableDriver<Types, EntryData> *driver,
|
||||
const Entry<EntryData> &entry)
|
||||
bool is_actionable (hb_buffer_t *buffer,
|
||||
StateTableDriver<Types, EntryData> *driver,
|
||||
const Entry<EntryData> &entry) const
|
||||
{
|
||||
hb_buffer_t *buffer = driver->buffer;
|
||||
|
||||
if (buffer->idx == buffer->len && !mark_set)
|
||||
return false;
|
||||
|
||||
return entry.data.markIndex != 0xFFFF || entry.data.currentIndex != 0xFFFF;
|
||||
}
|
||||
void transition (StateTableDriver<Types, EntryData> *driver,
|
||||
void transition (hb_buffer_t *buffer,
|
||||
StateTableDriver<Types, EntryData> *driver,
|
||||
const Entry<EntryData> &entry)
|
||||
{
|
||||
hb_buffer_t *buffer = driver->buffer;
|
||||
|
||||
/* Looks like CoreText applies neither mark nor current substitution for
|
||||
* end-of-text if mark was not explicitly set. */
|
||||
if (buffer->idx == buffer->len && !mark_set)
|
||||
|
|
@ -268,6 +272,7 @@ struct ContextualSubtable
|
|||
{
|
||||
buffer->unsafe_to_break (mark, hb_min (buffer->idx + 1, buffer->len));
|
||||
buffer->info[mark].codepoint = *replacement;
|
||||
c->buffer_digest.add (*replacement);
|
||||
if (has_glyph_classes)
|
||||
_hb_glyph_info_set_glyph_props (&buffer->info[mark],
|
||||
gdef.get_glyph_props (*replacement));
|
||||
|
|
@ -297,6 +302,7 @@ struct ContextualSubtable
|
|||
if (replacement)
|
||||
{
|
||||
buffer->info[idx].codepoint = *replacement;
|
||||
c->buffer_digest.add (*replacement);
|
||||
if (has_glyph_classes)
|
||||
_hb_glyph_info_set_glyph_props (&buffer->info[idx],
|
||||
gdef.get_glyph_props (*replacement));
|
||||
|
|
@ -328,7 +334,12 @@ struct ContextualSubtable
|
|||
|
||||
driver_context_t dc (this, c);
|
||||
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->face);
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->face);
|
||||
|
||||
if (driver.is_idempotent_on_all_out_of_bounds (&dc, c) &&
|
||||
!c->buffer_digest.may_have (c->machine_glyph_set))
|
||||
return_trace (false);
|
||||
|
||||
driver.drive (&dc, c);
|
||||
|
||||
return_trace (dc.ret);
|
||||
|
|
@ -361,13 +372,14 @@ struct ContextualSubtable
|
|||
return_trace (substitutionTables.sanitize (c, this, num_lookups));
|
||||
}
|
||||
|
||||
protected:
|
||||
public:
|
||||
StateTable<Types, EntryData>
|
||||
machine;
|
||||
protected:
|
||||
NNOffsetTo<UnsizedListOfOffset16To<Lookup<HBGlyphID16>, HBUINT, void, false>, HBUINT>
|
||||
substitutionTables;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (20);
|
||||
DEFINE_SIZE_STATIC ((StateTable<Types, EntryData>::static_size + HBUINT::static_size));
|
||||
};
|
||||
|
||||
|
||||
|
|
@ -464,16 +476,16 @@ struct LigatureSubtable
|
|||
ligature (table+table->ligature),
|
||||
match_length (0) {}
|
||||
|
||||
bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
const Entry<EntryData> &entry)
|
||||
bool is_actionable (hb_buffer_t *buffer HB_UNUSED,
|
||||
StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
const Entry<EntryData> &entry) const
|
||||
{
|
||||
return LigatureEntryT::performAction (entry);
|
||||
}
|
||||
void transition (StateTableDriver<Types, EntryData> *driver,
|
||||
void transition (hb_buffer_t *buffer,
|
||||
StateTableDriver<Types, EntryData> *driver,
|
||||
const Entry<EntryData> &entry)
|
||||
{
|
||||
hb_buffer_t *buffer = driver->buffer;
|
||||
|
||||
DEBUG_MSG (APPLY, nullptr, "Ligature transition at %u", buffer->idx);
|
||||
if (entry.flags & LigatureEntryT::SetComponent)
|
||||
{
|
||||
|
|
@ -552,6 +564,7 @@ struct LigatureSubtable
|
|||
{
|
||||
DEBUG_MSG (APPLY, nullptr, "Skipping ligature component");
|
||||
if (unlikely (!buffer->move_to (match_positions[--match_length % ARRAY_LENGTH (match_positions)]))) return;
|
||||
buffer->cur().unicode_props() |= UPROPS_MASK_IGNORABLE;
|
||||
if (unlikely (!buffer->replace_glyph (DELETED_GLYPH))) return;
|
||||
}
|
||||
|
||||
|
|
@ -584,7 +597,12 @@ struct LigatureSubtable
|
|||
|
||||
driver_context_t dc (this, c);
|
||||
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->face);
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->face);
|
||||
|
||||
if (driver.is_idempotent_on_all_out_of_bounds (&dc, c) &&
|
||||
!c->buffer_digest.may_have (c->machine_glyph_set))
|
||||
return_trace (false);
|
||||
|
||||
driver.drive (&dc, c);
|
||||
|
||||
return_trace (dc.ret);
|
||||
|
|
@ -599,9 +617,10 @@ struct LigatureSubtable
|
|||
ligAction && component && ligature);
|
||||
}
|
||||
|
||||
protected:
|
||||
public:
|
||||
StateTable<Types, EntryData>
|
||||
machine;
|
||||
protected:
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT32>, HBUINT>
|
||||
ligAction; /* Offset to the ligature action table. */
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT16>, HBUINT>
|
||||
|
|
@ -609,7 +628,7 @@ struct LigatureSubtable
|
|||
NNOffsetTo<UnsizedArrayOf<HBGlyphID16>, HBUINT>
|
||||
ligature; /* Offset to the actual ligature lists. */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (28);
|
||||
DEFINE_SIZE_STATIC ((StateTable<Types, EntryData>::static_size + 3 * HBUINT::static_size));
|
||||
};
|
||||
|
||||
template <typename Types>
|
||||
|
|
@ -652,6 +671,7 @@ struct NoncontextualSubtable
|
|||
if (replacement)
|
||||
{
|
||||
info[i].codepoint = *replacement;
|
||||
c->buffer_digest.add (*replacement);
|
||||
if (has_glyph_classes)
|
||||
_hb_glyph_info_set_glyph_props (&info[i],
|
||||
gdef.get_glyph_props (*replacement));
|
||||
|
|
@ -753,16 +773,17 @@ struct InsertionSubtable
|
|||
mark (0),
|
||||
insertionAction (table+table->insertionAction) {}
|
||||
|
||||
bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
const Entry<EntryData> &entry)
|
||||
bool is_actionable (hb_buffer_t *buffer HB_UNUSED,
|
||||
StateTableDriver<Types, EntryData> *driver HB_UNUSED,
|
||||
const Entry<EntryData> &entry) const
|
||||
{
|
||||
return (entry.flags & (CurrentInsertCount | MarkedInsertCount)) &&
|
||||
(entry.data.currentInsertIndex != 0xFFFF ||entry.data.markedInsertIndex != 0xFFFF);
|
||||
}
|
||||
void transition (StateTableDriver<Types, EntryData> *driver,
|
||||
void transition (hb_buffer_t *buffer,
|
||||
StateTableDriver<Types, EntryData> *driver,
|
||||
const Entry<EntryData> &entry)
|
||||
{
|
||||
hb_buffer_t *buffer = driver->buffer;
|
||||
unsigned int flags = entry.flags;
|
||||
|
||||
unsigned mark_loc = buffer->out_len;
|
||||
|
|
@ -785,6 +806,9 @@ struct InsertionSubtable
|
|||
if (unlikely (!buffer->copy_glyph ())) return;
|
||||
/* TODO We ignore KashidaLike setting. */
|
||||
if (unlikely (!buffer->replace_glyphs (0, count, glyphs))) return;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
c->buffer_digest.add (glyphs[i]);
|
||||
ret = true;
|
||||
if (buffer->idx < buffer->len && !before)
|
||||
buffer->skip_glyph ();
|
||||
|
||||
|
|
@ -849,7 +873,12 @@ struct InsertionSubtable
|
|||
|
||||
driver_context_t dc (this, c);
|
||||
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->face);
|
||||
StateTableDriver<Types, EntryData> driver (machine, c->face);
|
||||
|
||||
if (driver.is_idempotent_on_all_out_of_bounds (&dc, c) &&
|
||||
!c->buffer_digest.may_have (c->machine_glyph_set))
|
||||
return_trace (false);
|
||||
|
||||
driver.drive (&dc, c);
|
||||
|
||||
return_trace (dc.ret);
|
||||
|
|
@ -864,14 +893,15 @@ struct InsertionSubtable
|
|||
insertionAction);
|
||||
}
|
||||
|
||||
protected:
|
||||
public:
|
||||
StateTable<Types, EntryData>
|
||||
machine;
|
||||
protected:
|
||||
NNOffsetTo<UnsizedArrayOf<HBGlyphID16>, HBUINT>
|
||||
insertionAction; /* Byte offset from stateHeader to the start of
|
||||
* the insertion glyph table. */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (20);
|
||||
DEFINE_SIZE_STATIC ((StateTable<Types, EntryData>::static_size + HBUINT::static_size));
|
||||
};
|
||||
|
||||
|
||||
|
|
@ -895,6 +925,89 @@ struct Feature
|
|||
DEFINE_SIZE_STATIC (12);
|
||||
};
|
||||
|
||||
|
||||
struct hb_accelerate_subtables_context_t :
|
||||
hb_dispatch_context_t<hb_accelerate_subtables_context_t>
|
||||
{
|
||||
struct hb_applicable_t
|
||||
{
|
||||
friend struct hb_accelerate_subtables_context_t;
|
||||
friend struct hb_aat_layout_lookup_accelerator_t;
|
||||
|
||||
public:
|
||||
hb_set_digest_t digest;
|
||||
|
||||
template <typename T>
|
||||
auto init_ (const T &obj_, unsigned num_glyphs, hb_priority<1>) HB_AUTO_RETURN
|
||||
(
|
||||
obj_.machine.collect_glyphs (this->digest, num_glyphs)
|
||||
)
|
||||
|
||||
template <typename T>
|
||||
void init_ (const T &obj_, unsigned num_glyphs, hb_priority<0>)
|
||||
{
|
||||
digest = digest.full ();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void init (const T &obj_, unsigned num_glyphs)
|
||||
{
|
||||
init_ (obj_, num_glyphs, hb_prioritize);
|
||||
}
|
||||
};
|
||||
|
||||
/* Dispatch interface. */
|
||||
template <typename T>
|
||||
return_t dispatch (const T &obj)
|
||||
{
|
||||
hb_applicable_t *entry = &array[i++];
|
||||
|
||||
entry->init (obj, num_glyphs);
|
||||
|
||||
return hb_empty_t ();
|
||||
}
|
||||
static return_t default_return_value () { return hb_empty_t (); }
|
||||
|
||||
bool stop_sublookup_iteration (return_t r) const { return false; }
|
||||
|
||||
hb_accelerate_subtables_context_t (hb_applicable_t *array_, unsigned num_glyphs_) :
|
||||
hb_dispatch_context_t<hb_accelerate_subtables_context_t> (),
|
||||
array (array_), num_glyphs (num_glyphs_) {}
|
||||
|
||||
hb_applicable_t *array;
|
||||
unsigned num_glyphs;
|
||||
unsigned i = 0;
|
||||
};
|
||||
|
||||
struct hb_aat_layout_chain_accelerator_t
|
||||
{
|
||||
template <typename TChain>
|
||||
static hb_aat_layout_chain_accelerator_t *create (const TChain &chain, unsigned num_glyphs)
|
||||
{
|
||||
unsigned count = chain.get_subtable_count ();
|
||||
|
||||
unsigned size = sizeof (hb_aat_layout_chain_accelerator_t) -
|
||||
HB_VAR_ARRAY * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t) +
|
||||
count * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t);
|
||||
|
||||
/* The following is a calloc because when we are collecting subtables,
|
||||
* some of them might be invalid and hence not collect; as a result,
|
||||
* we might not fill in all the count entries of the subtables array.
|
||||
* Zeroing it allows the set digest to gatekeep it without having to
|
||||
* initialize it further. */
|
||||
auto *thiz = (hb_aat_layout_chain_accelerator_t *) hb_calloc (1, size);
|
||||
if (unlikely (!thiz))
|
||||
return nullptr;
|
||||
|
||||
hb_accelerate_subtables_context_t c_accelerate_subtables (thiz->subtables, num_glyphs);
|
||||
chain.dispatch (&c_accelerate_subtables);
|
||||
|
||||
return thiz;
|
||||
}
|
||||
|
||||
hb_accelerate_subtables_context_t::hb_applicable_t subtables[HB_VAR_ARRAY];
|
||||
};
|
||||
|
||||
template <typename Types>
|
||||
struct ChainSubtable
|
||||
{
|
||||
|
|
@ -986,6 +1099,8 @@ struct Chain
|
|||
{
|
||||
typedef typename Types::HBUINT HBUINT;
|
||||
|
||||
unsigned get_subtable_count () const { return subtableCount; }
|
||||
|
||||
hb_mask_t compile_flags (const hb_aat_map_builder_t *map) const
|
||||
{
|
||||
hb_mask_t flags = defaultFlags;
|
||||
|
|
@ -1026,7 +1141,8 @@ struct Chain
|
|||
return flags;
|
||||
}
|
||||
|
||||
void apply (hb_aat_apply_context_t *c) const
|
||||
void apply (hb_aat_apply_context_t *c,
|
||||
const hb_aat_layout_chain_accelerator_t *accel) const
|
||||
{
|
||||
const ChainSubtable<Types> *subtable = &StructAfter<ChainSubtable<Types>> (featureZ.as_array (featureCount));
|
||||
unsigned int count = subtableCount;
|
||||
|
|
@ -1038,6 +1154,7 @@ struct Chain
|
|||
hb_map ([&subtable] (const hb_aat_map_t::range_flags_t _) -> bool { return subtable->subFeatureFlags & (_.flags); })))
|
||||
goto skip;
|
||||
c->subtable_flags = subtable->subFeatureFlags;
|
||||
c->machine_glyph_set = accel ? accel->subtables[i].digest : hb_set_digest_t::full ();
|
||||
|
||||
if (!(subtable->get_coverage() & ChainSubtable<Types>::AllDirections) &&
|
||||
HB_DIRECTION_IS_VERTICAL (c->buffer->props.direction) !=
|
||||
|
|
@ -1099,7 +1216,22 @@ struct Chain
|
|||
|
||||
unsigned int get_size () const { return length; }
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int version HB_UNUSED) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
const ChainSubtable<Types> *subtable = &StructAfter<ChainSubtable<Types>> (featureZ.as_array (featureCount));
|
||||
unsigned int count = subtableCount;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
typename context_t::return_t ret = subtable->dispatch (c, std::forward<Ts> (ds)...);
|
||||
if (c->stop_sublookup_iteration (ret))
|
||||
return ret;
|
||||
subtable = &StructAfter<ChainSubtable<Types>> (*subtable);
|
||||
}
|
||||
return c->default_return_value ();
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int version) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (!(length.sanitize (c) &&
|
||||
|
|
@ -1121,6 +1253,13 @@ struct Chain
|
|||
subtable = &StructAfter<ChainSubtable<Types>> (*subtable);
|
||||
}
|
||||
|
||||
if (version >= 3)
|
||||
{
|
||||
const SubtableGlyphCoverage *coverage = (const SubtableGlyphCoverage *) subtable;
|
||||
if (!coverage->sanitize (c, count))
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
|
@ -1132,7 +1271,7 @@ struct Chain
|
|||
|
||||
UnsizedArrayOf<Feature> featureZ; /* Features. */
|
||||
/*ChainSubtable firstSubtable;*//* Subtables. */
|
||||
/*subtableGlyphCoverageArray*/ /* Only if version >= 3. We don't use. */
|
||||
/*SubtableGlyphCoverage coverages*//* Only if version >= 3. */
|
||||
|
||||
public:
|
||||
DEFINE_SIZE_MIN (8 + 2 * sizeof (HBUINT));
|
||||
|
|
@ -1143,13 +1282,69 @@ struct Chain
|
|||
* The 'mort'/'morx' Table
|
||||
*/
|
||||
|
||||
template <typename Types, hb_tag_t TAG>
|
||||
template <typename T, typename Types, hb_tag_t TAG>
|
||||
struct mortmorx
|
||||
{
|
||||
static constexpr hb_tag_t tableTag = TAG;
|
||||
|
||||
bool has_data () const { return version != 0; }
|
||||
|
||||
struct accelerator_t
|
||||
{
|
||||
accelerator_t (hb_face_t *face)
|
||||
{
|
||||
hb_sanitize_context_t sc;
|
||||
this->table = sc.reference_table<T> (face);
|
||||
|
||||
this->chain_count = table->get_chain_count ();
|
||||
|
||||
this->accels = (hb_atomic_ptr_t<hb_aat_layout_chain_accelerator_t> *) hb_calloc (this->chain_count, sizeof (*accels));
|
||||
if (unlikely (!this->accels))
|
||||
{
|
||||
this->chain_count = 0;
|
||||
this->table.destroy ();
|
||||
this->table = hb_blob_get_empty ();
|
||||
}
|
||||
}
|
||||
~accelerator_t ()
|
||||
{
|
||||
for (unsigned int i = 0; i < this->chain_count; i++)
|
||||
hb_free (this->accels[i]);
|
||||
hb_free (this->accels);
|
||||
this->table.destroy ();
|
||||
}
|
||||
|
||||
hb_blob_t *get_blob () const { return table.get_blob (); }
|
||||
|
||||
template <typename Chain>
|
||||
hb_aat_layout_chain_accelerator_t *get_accel (unsigned chain_index, const Chain &chain, unsigned num_glyphs) const
|
||||
{
|
||||
if (unlikely (chain_index >= chain_count)) return nullptr;
|
||||
|
||||
retry:
|
||||
auto *accel = accels[chain_index].get_acquire ();
|
||||
if (unlikely (!accel))
|
||||
{
|
||||
accel = hb_aat_layout_chain_accelerator_t::create (chain, num_glyphs);
|
||||
if (unlikely (!accel))
|
||||
return nullptr;
|
||||
|
||||
if (unlikely (!accels[chain_index].cmpexch (nullptr, accel)))
|
||||
{
|
||||
hb_free (accel);
|
||||
goto retry;
|
||||
}
|
||||
}
|
||||
|
||||
return accel;
|
||||
}
|
||||
|
||||
hb_blob_ptr_t<T> table;
|
||||
unsigned int chain_count;
|
||||
hb_atomic_ptr_t<hb_aat_layout_chain_accelerator_t> *accels;
|
||||
};
|
||||
|
||||
|
||||
void compile_flags (const hb_aat_map_builder_t *mapper,
|
||||
hb_aat_map_t *map) const
|
||||
{
|
||||
|
|
@ -1166,20 +1361,32 @@ struct mortmorx
|
|||
}
|
||||
}
|
||||
|
||||
unsigned get_chain_count () const
|
||||
{
|
||||
return chainCount;
|
||||
}
|
||||
|
||||
void apply (hb_aat_apply_context_t *c,
|
||||
const hb_aat_map_t &map) const
|
||||
const hb_aat_map_t &map,
|
||||
const accelerator_t &accel) const
|
||||
{
|
||||
if (unlikely (!c->buffer->successful)) return;
|
||||
|
||||
c->buffer->unsafe_to_concat ();
|
||||
|
||||
if (c->buffer->len < HB_AAT_BUFFER_DIGEST_THRESHOLD)
|
||||
c->buffer_digest = c->buffer->digest ();
|
||||
else
|
||||
c->buffer_digest = hb_set_digest_t::full ();
|
||||
|
||||
c->set_lookup_index (0);
|
||||
const Chain<Types> *chain = &firstChain;
|
||||
unsigned int count = chainCount;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
auto *chain_accel = accel.get_accel (i, *chain, c->face->get_num_glyphs ());
|
||||
c->range_flags = &map.chain_flags[i];
|
||||
chain->apply (c);
|
||||
chain->apply (c, chain_accel);
|
||||
if (unlikely (!c->buffer->successful)) return;
|
||||
chain = &StructAfter<Chain<Types>> (*chain);
|
||||
}
|
||||
|
|
@ -1219,8 +1426,15 @@ struct mortmorx
|
|||
DEFINE_SIZE_MIN (8);
|
||||
};
|
||||
|
||||
struct morx : mortmorx<ExtendedTypes, HB_AAT_TAG_morx> {};
|
||||
struct mort : mortmorx<ObsoleteTypes, HB_AAT_TAG_mort> {};
|
||||
struct morx : mortmorx<morx, ExtendedTypes, HB_AAT_TAG_morx> {};
|
||||
struct mort : mortmorx<mort, ObsoleteTypes, HB_AAT_TAG_mort> {};
|
||||
|
||||
struct morx_accelerator_t : morx::accelerator_t {
|
||||
morx_accelerator_t (hb_face_t *face) : morx::accelerator_t (face) {}
|
||||
};
|
||||
struct mort_accelerator_t : mort::accelerator_t {
|
||||
mort_accelerator_t (hb_face_t *face) : mort::accelerator_t (face) {}
|
||||
};
|
||||
|
||||
|
||||
} /* namespace AAT */
|
||||
|
|
|
|||
|
|
@ -211,14 +211,14 @@ void
|
|||
hb_aat_layout_compile_map (const hb_aat_map_builder_t *mapper,
|
||||
hb_aat_map_t *map)
|
||||
{
|
||||
const AAT::morx& morx = *mapper->face->table.morx;
|
||||
const AAT::morx& morx = *mapper->face->table.morx->table;
|
||||
if (morx.has_data ())
|
||||
{
|
||||
morx.compile_flags (mapper, map);
|
||||
return;
|
||||
}
|
||||
|
||||
const AAT::mort& mort = *mapper->face->table.mort;
|
||||
const AAT::mort& mort = *mapper->face->table.mort->table;
|
||||
if (mort.has_data ())
|
||||
{
|
||||
mort.compile_flags (mapper, map);
|
||||
|
|
@ -243,8 +243,8 @@ hb_aat_layout_compile_map (const hb_aat_map_builder_t *mapper,
|
|||
hb_bool_t
|
||||
hb_aat_layout_has_substitution (hb_face_t *face)
|
||||
{
|
||||
return face->table.morx->has_data () ||
|
||||
face->table.mort->has_data ();
|
||||
return face->table.morx->table->has_data () ||
|
||||
face->table.mort->table->has_data ();
|
||||
}
|
||||
|
||||
void
|
||||
|
|
@ -260,26 +260,30 @@ hb_aat_layout_substitute (const hb_ot_shape_plan_t *plan,
|
|||
hb_aat_map_t map;
|
||||
builder.compile (map);
|
||||
|
||||
hb_blob_t *morx_blob = font->face->table.morx.get_blob ();
|
||||
const AAT::morx& morx = *morx_blob->as<AAT::morx> ();
|
||||
if (morx.has_data ())
|
||||
{
|
||||
AAT::hb_aat_apply_context_t c (plan, font, buffer, morx_blob);
|
||||
if (!buffer->message (font, "start table morx")) return;
|
||||
morx.apply (&c, map);
|
||||
(void) buffer->message (font, "end table morx");
|
||||
return;
|
||||
auto &accel = *font->face->table.morx;
|
||||
const AAT::morx& morx = *accel.table;
|
||||
if (morx.has_data ())
|
||||
{
|
||||
AAT::hb_aat_apply_context_t c (plan, font, buffer, accel.get_blob ());
|
||||
if (!buffer->message (font, "start table morx")) return;
|
||||
morx.apply (&c, map, accel);
|
||||
(void) buffer->message (font, "end table morx");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
hb_blob_t *mort_blob = font->face->table.mort.get_blob ();
|
||||
const AAT::mort& mort = *mort_blob->as<AAT::mort> ();
|
||||
if (mort.has_data ())
|
||||
{
|
||||
AAT::hb_aat_apply_context_t c (plan, font, buffer, mort_blob);
|
||||
if (!buffer->message (font, "start table mort")) return;
|
||||
mort.apply (&c, map);
|
||||
(void) buffer->message (font, "end table mort");
|
||||
return;
|
||||
auto &accel = *font->face->table.mort;
|
||||
const AAT::mort& mort = *accel.table;
|
||||
if (mort.has_data ())
|
||||
{
|
||||
AAT::hb_aat_apply_context_t c (plan, font, buffer, accel.get_blob ());
|
||||
if (!buffer->message (font, "start table mort")) return;
|
||||
mort.apply (&c, map, accel);
|
||||
(void) buffer->message (font, "end table mort");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -322,7 +326,7 @@ hb_aat_layout_remove_deleted_glyphs (hb_buffer_t *buffer)
|
|||
hb_bool_t
|
||||
hb_aat_layout_has_positioning (hb_face_t *face)
|
||||
{
|
||||
return face->table.kerx->has_data ();
|
||||
return face->table.kerx->table->has_data ();
|
||||
}
|
||||
|
||||
void
|
||||
|
|
@ -330,13 +334,12 @@ hb_aat_layout_position (const hb_ot_shape_plan_t *plan,
|
|||
hb_font_t *font,
|
||||
hb_buffer_t *buffer)
|
||||
{
|
||||
hb_blob_t *kerx_blob = font->face->table.kerx.get_blob ();
|
||||
const AAT::kerx& kerx = *kerx_blob->as<AAT::kerx> ();
|
||||
auto &accel = *font->face->table.kerx;
|
||||
|
||||
AAT::hb_aat_apply_context_t c (plan, font, buffer, kerx_blob);
|
||||
AAT::hb_aat_apply_context_t c (plan, font, buffer, accel.get_blob ());
|
||||
if (!buffer->message (font, "start table kerx")) return;
|
||||
c.set_ankr_table (font->face->table.ankr.get ());
|
||||
kerx.apply (&c);
|
||||
accel.apply (&c);
|
||||
(void) buffer->message (font, "end table kerx");
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -202,8 +202,12 @@ struct BEInt<Type, 4>
|
|||
/* Floats. */
|
||||
|
||||
/* We want our rounding towards +infinity. */
|
||||
static inline double
|
||||
_hb_roundf (double x) { return floor (x + .5); }
|
||||
|
||||
static inline float
|
||||
_hb_roundf (float x) { return floorf (x + .5f); }
|
||||
|
||||
#define roundf(x) _hb_roundf(x)
|
||||
|
||||
|
||||
|
|
@ -671,7 +675,7 @@ struct hb_pair_t
|
|||
return 0;
|
||||
}
|
||||
|
||||
friend void swap (hb_pair_t& a, hb_pair_t& b)
|
||||
friend void swap (hb_pair_t& a, hb_pair_t& b) noexcept
|
||||
{
|
||||
hb_swap (a.first, b.first);
|
||||
hb_swap (a.second, b.second);
|
||||
|
|
@ -1053,6 +1057,18 @@ _hb_cmp_method (const void *pkey, const void *pval, Ts... ds)
|
|||
return val.cmp (key, ds...);
|
||||
}
|
||||
|
||||
template <typename K, typename V>
|
||||
static int
|
||||
_hb_cmp_operator (const void *pkey, const void *pval)
|
||||
{
|
||||
const K& key = * (const K*) pkey;
|
||||
const V& val = * (const V*) pval;
|
||||
|
||||
if (key < val) return -1;
|
||||
if (key > val) return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
template <typename V, typename K, typename ...Ts>
|
||||
static inline bool
|
||||
hb_bsearch_impl (unsigned *pos, /* Out */
|
||||
|
|
|
|||
|
|
@ -39,10 +39,10 @@ struct hb_bit_set_invertible_t
|
|||
|
||||
hb_bit_set_invertible_t () = default;
|
||||
hb_bit_set_invertible_t (const hb_bit_set_invertible_t& o) = default;
|
||||
hb_bit_set_invertible_t (hb_bit_set_invertible_t&& other) : hb_bit_set_invertible_t () { hb_swap (*this, other); }
|
||||
hb_bit_set_invertible_t (hb_bit_set_invertible_t&& other) noexcept : hb_bit_set_invertible_t () { hb_swap (*this, other); }
|
||||
hb_bit_set_invertible_t& operator= (const hb_bit_set_invertible_t& o) = default;
|
||||
hb_bit_set_invertible_t& operator= (hb_bit_set_invertible_t&& other) { hb_swap (*this, other); return *this; }
|
||||
friend void swap (hb_bit_set_invertible_t &a, hb_bit_set_invertible_t &b)
|
||||
hb_bit_set_invertible_t& operator= (hb_bit_set_invertible_t&& other) noexcept { hb_swap (*this, other); return *this; }
|
||||
friend void swap (hb_bit_set_invertible_t &a, hb_bit_set_invertible_t &b) noexcept
|
||||
{
|
||||
if (likely (!a.s.successful || !b.s.successful))
|
||||
return;
|
||||
|
|
|
|||
|
|
@ -38,10 +38,10 @@ struct hb_bit_set_t
|
|||
~hb_bit_set_t () = default;
|
||||
|
||||
hb_bit_set_t (const hb_bit_set_t& other) : hb_bit_set_t () { set (other, true); }
|
||||
hb_bit_set_t ( hb_bit_set_t&& other) : hb_bit_set_t () { hb_swap (*this, other); }
|
||||
hb_bit_set_t ( hb_bit_set_t&& other) noexcept : hb_bit_set_t () { hb_swap (*this, other); }
|
||||
hb_bit_set_t& operator= (const hb_bit_set_t& other) { set (other); return *this; }
|
||||
hb_bit_set_t& operator= (hb_bit_set_t&& other) { hb_swap (*this, other); return *this; }
|
||||
friend void swap (hb_bit_set_t &a, hb_bit_set_t &b)
|
||||
hb_bit_set_t& operator= (hb_bit_set_t&& other) noexcept { hb_swap (*this, other); return *this; }
|
||||
friend void swap (hb_bit_set_t &a, hb_bit_set_t &b) noexcept
|
||||
{
|
||||
if (likely (!a.successful || !b.successful))
|
||||
return;
|
||||
|
|
|
|||
|
|
@ -598,6 +598,11 @@ _open_resource_fork (const char *file_name, hb_mapped_file_t *file)
|
|||
* Creates a new blob containing the data from the
|
||||
* specified binary font file.
|
||||
*
|
||||
* The filename is passed directly to the system on all platforms,
|
||||
* except on Windows, where the filename is interpreted as UTF-8.
|
||||
* Only if the filename is not valid UTF-8, it will be interpreted
|
||||
* according to the system codepage.
|
||||
*
|
||||
* Returns: An #hb_blob_t pointer with the content of the file,
|
||||
* or hb_blob_get_empty() if failed.
|
||||
*
|
||||
|
|
@ -617,6 +622,11 @@ hb_blob_create_from_file (const char *file_name)
|
|||
* Creates a new blob containing the data from the
|
||||
* specified binary font file.
|
||||
*
|
||||
* The filename is passed directly to the system on all platforms,
|
||||
* except on Windows, where the filename is interpreted as UTF-8.
|
||||
* Only if the filename is not valid UTF-8, it will be interpreted
|
||||
* according to the system codepage.
|
||||
*
|
||||
* Returns: An #hb_blob_t pointer with the content of the file,
|
||||
* or `NULL` if failed.
|
||||
*
|
||||
|
|
@ -672,10 +682,19 @@ fail_without_close:
|
|||
if (unlikely (!file)) return nullptr;
|
||||
|
||||
HANDLE fd;
|
||||
int conversion;
|
||||
unsigned int size = strlen (file_name) + 1;
|
||||
wchar_t * wchar_file_name = (wchar_t *) hb_malloc (sizeof (wchar_t) * size);
|
||||
if (unlikely (!wchar_file_name)) goto fail_without_close;
|
||||
mbstowcs (wchar_file_name, file_name, size);
|
||||
|
||||
/* Assume file name is given in UTF-8 encoding */
|
||||
conversion = MultiByteToWideChar(CP_UTF8, MB_ERR_INVALID_CHARS, file_name, -1, wchar_file_name, size);
|
||||
if (conversion <= 0)
|
||||
{
|
||||
/* Conversion failed due to invalid UTF-8 characters,
|
||||
Repeat conversion based on system code page */
|
||||
mbstowcs(wchar_file_name, file_name, size);
|
||||
}
|
||||
#if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) && WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP)
|
||||
{
|
||||
CREATEFILE2_EXTENDED_PARAMETERS ceparams = { 0 };
|
||||
|
|
|
|||
|
|
@ -149,7 +149,7 @@ buffer_verify_unsafe_to_break (hb_buffer_t *buffer,
|
|||
}
|
||||
assert (text_start < text_end);
|
||||
|
||||
if (0)
|
||||
if (false)
|
||||
printf("start %u end %u text start %u end %u\n", start, end, text_start, text_end);
|
||||
|
||||
hb_buffer_clear_contents (fragment);
|
||||
|
|
@ -288,7 +288,7 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
|
|||
}
|
||||
assert (text_start < text_end);
|
||||
|
||||
if (0)
|
||||
if (false)
|
||||
printf("start %u end %u text start %u end %u\n", start, end, text_start, text_end);
|
||||
|
||||
#if 0
|
||||
|
|
|
|||
|
|
@ -309,6 +309,7 @@ hb_buffer_t::clear ()
|
|||
|
||||
deallocate_var_all ();
|
||||
serial = 0;
|
||||
random_state = 1;
|
||||
scratch_flags = HB_BUFFER_SCRATCH_FLAG_DEFAULT;
|
||||
}
|
||||
|
||||
|
|
@ -1359,6 +1360,49 @@ hb_buffer_get_not_found_glyph (const hb_buffer_t *buffer)
|
|||
return buffer->not_found;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_buffer_set_random_state:
|
||||
* @buffer: An #hb_buffer_t
|
||||
* @state: the new random state
|
||||
*
|
||||
* Sets the random state of the buffer. The state changes
|
||||
* every time a glyph uses randomness (eg. the `rand`
|
||||
* OpenType feature). This function together with
|
||||
* hb_buffer_get_random_state() allow for transferring
|
||||
* the current random state to a subsequent buffer, to
|
||||
* get better randomness distribution.
|
||||
*
|
||||
* Defaults to 1 and when buffer contents are cleared.
|
||||
* A value of 0 disables randomness during shaping.
|
||||
*
|
||||
* Since: 8.4.0
|
||||
**/
|
||||
void
|
||||
hb_buffer_set_random_state (hb_buffer_t *buffer,
|
||||
unsigned state)
|
||||
{
|
||||
if (unlikely (hb_object_is_immutable (buffer)))
|
||||
return;
|
||||
|
||||
buffer->random_state = state;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_buffer_get_random_state:
|
||||
* @buffer: An #hb_buffer_t
|
||||
*
|
||||
* See hb_buffer_set_random_state().
|
||||
*
|
||||
* Return value:
|
||||
* The @buffer random state
|
||||
*
|
||||
* Since: 8.4.0
|
||||
**/
|
||||
unsigned
|
||||
hb_buffer_get_random_state (const hb_buffer_t *buffer)
|
||||
{
|
||||
return buffer->random_state;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_buffer_clear_contents:
|
||||
|
|
|
|||
|
|
@ -487,6 +487,12 @@ hb_buffer_set_not_found_glyph (hb_buffer_t *buffer,
|
|||
HB_EXTERN hb_codepoint_t
|
||||
hb_buffer_get_not_found_glyph (const hb_buffer_t *buffer);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_buffer_set_random_state (hb_buffer_t *buffer,
|
||||
unsigned state);
|
||||
|
||||
HB_EXTERN unsigned
|
||||
hb_buffer_get_random_state (const hb_buffer_t *buffer);
|
||||
|
||||
/*
|
||||
* Content API.
|
||||
|
|
|
|||
|
|
@ -116,6 +116,7 @@ struct hb_buffer_t
|
|||
|
||||
uint8_t allocated_var_bits;
|
||||
uint8_t serial;
|
||||
uint32_t random_state;
|
||||
hb_buffer_scratch_flags_t scratch_flags; /* Have space-fallback, etc. */
|
||||
unsigned int max_len; /* Maximum allowed len. */
|
||||
int max_ops; /* Maximum allowed operations. */
|
||||
|
|
|
|||
|
|
@ -624,7 +624,6 @@ struct opset_t
|
|||
} else {
|
||||
/* invalid unknown operator */
|
||||
env.clear_args ();
|
||||
env.set_error ();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -54,8 +54,8 @@ struct top_dict_values_t : dict_values_t<OPSTR>
|
|||
}
|
||||
void fini () { dict_values_t<OPSTR>::fini (); }
|
||||
|
||||
unsigned int charStringsOffset;
|
||||
unsigned int FDArrayOffset;
|
||||
int charStringsOffset;
|
||||
int FDArrayOffset;
|
||||
};
|
||||
|
||||
struct dict_opset_t : opset_t<number_t>
|
||||
|
|
@ -157,11 +157,11 @@ struct top_dict_opset_t : dict_opset_t
|
|||
{
|
||||
switch (op) {
|
||||
case OpCode_CharStrings:
|
||||
dictval.charStringsOffset = env.argStack.pop_uint ();
|
||||
dictval.charStringsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_FDArray:
|
||||
dictval.FDArrayOffset = env.argStack.pop_uint ();
|
||||
dictval.FDArrayOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_FontMatrix:
|
||||
|
|
|
|||
|
|
@ -76,16 +76,12 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<ELEM, CFF2Subrs>
|
|||
coords = coords_;
|
||||
num_coords = num_coords_;
|
||||
varStore = acc.varStore;
|
||||
seen_blend = false;
|
||||
seen_vsindex_ = false;
|
||||
scalars.init ();
|
||||
do_blend = num_coords && coords && varStore->size;
|
||||
set_ivs (acc.privateDicts[fd].ivs);
|
||||
}
|
||||
|
||||
void fini ()
|
||||
{
|
||||
scalars.fini ();
|
||||
SUPER::fini ();
|
||||
}
|
||||
|
||||
|
|
@ -168,13 +164,13 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<ELEM, CFF2Subrs>
|
|||
protected:
|
||||
const int *coords;
|
||||
unsigned int num_coords;
|
||||
const CFF2VariationStore *varStore;
|
||||
const CFF2ItemVariationStore *varStore;
|
||||
unsigned int region_count;
|
||||
unsigned int ivs;
|
||||
hb_vector_t<float> scalars;
|
||||
bool do_blend;
|
||||
bool seen_vsindex_;
|
||||
bool seen_blend;
|
||||
bool seen_vsindex_ = false;
|
||||
bool seen_blend = false;
|
||||
|
||||
typedef cs_interp_env_t<ELEM, CFF2Subrs> SUPER;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -996,7 +996,7 @@ hb_feature_to_string (hb_feature_t *feature,
|
|||
if (feature->value > 1)
|
||||
{
|
||||
s[len++] = '=';
|
||||
len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->value));
|
||||
len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%" PRIu32, feature->value));
|
||||
}
|
||||
assert (len < ARRAY_LENGTH (s));
|
||||
len = hb_min (len, size - 1);
|
||||
|
|
|
|||
|
|
@ -47,14 +47,10 @@
|
|||
# endif /* !__cplusplus */
|
||||
#endif
|
||||
|
||||
#if defined (_SVR4) || defined (SVR4) || defined (__OpenBSD__) || \
|
||||
defined (_sgi) || defined (__sun) || defined (sun) || \
|
||||
defined (__digital__) || defined (__HP_cc)
|
||||
# include <inttypes.h>
|
||||
#elif defined (_AIX)
|
||||
#if defined (_AIX)
|
||||
# include <sys/inttypes.h>
|
||||
#elif defined (_MSC_VER) && _MSC_VER < 1600
|
||||
/* VS 2010 (_MSC_VER 1600) has stdint.h */
|
||||
/* VS 2010 (_MSC_VER 1600) has stdint.h */
|
||||
typedef __int8 int8_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef __int16 int16_t;
|
||||
|
|
@ -63,10 +59,11 @@ typedef __int32 int32_t;
|
|||
typedef unsigned __int32 uint32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
#elif defined (__KERNEL__)
|
||||
# include <linux/types.h>
|
||||
#else
|
||||
#elif defined (_MSC_VER) && _MSC_VER < 1800
|
||||
/* VS 2013 (_MSC_VER 1800) has inttypes.h */
|
||||
# include <stdint.h>
|
||||
#else
|
||||
# include <inttypes.h>
|
||||
#endif
|
||||
|
||||
#if defined(__GNUC__) && ((__GNUC__ > 3) || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
|
||||
|
|
|
|||
|
|
@ -118,6 +118,10 @@
|
|||
#define HB_NO_VAR_COMPOSITES
|
||||
#endif
|
||||
|
||||
#ifdef HB_NO_VAR
|
||||
#define HB_NO_VAR_COMPOSITES
|
||||
#endif
|
||||
|
||||
#ifdef HB_DISABLE_DEPRECATED
|
||||
#define HB_IF_NOT_DEPRECATED(x)
|
||||
#else
|
||||
|
|
|
|||
|
|
@ -27,9 +27,6 @@
|
|||
|
||||
#include "hb.h"
|
||||
|
||||
HB_BEGIN_DECLS
|
||||
HB_END_DECLS
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
||||
#include <functional>
|
||||
|
|
@ -56,15 +53,15 @@ struct shared_ptr
|
|||
|
||||
explicit shared_ptr (T *p = nullptr) : p (p) {}
|
||||
shared_ptr (const shared_ptr &o) : p (v::reference (o.p)) {}
|
||||
shared_ptr (shared_ptr &&o) : p (o.p) { o.p = nullptr; }
|
||||
shared_ptr (shared_ptr &&o) noexcept : p (o.p) { o.p = nullptr; }
|
||||
shared_ptr& operator = (const shared_ptr &o) { if (p != o.p) { destroy (); p = o.p; reference (); } return *this; }
|
||||
shared_ptr& operator = (shared_ptr &&o) { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
|
||||
shared_ptr& operator = (shared_ptr &&o) noexcept { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
|
||||
~shared_ptr () { v::destroy (p); p = nullptr; }
|
||||
|
||||
T* get() const { return p; }
|
||||
|
||||
void swap (shared_ptr &o) { std::swap (p, o.p); }
|
||||
friend void swap (shared_ptr &a, shared_ptr &b) { std::swap (a.p, b.p); }
|
||||
void swap (shared_ptr &o) noexcept { std::swap (p, o.p); }
|
||||
friend void swap (shared_ptr &a, shared_ptr &b) noexcept { std::swap (a.p, b.p); }
|
||||
|
||||
operator T * () const { return p; }
|
||||
T& operator * () const { return *get (); }
|
||||
|
|
@ -98,16 +95,16 @@ struct unique_ptr
|
|||
|
||||
explicit unique_ptr (T *p = nullptr) : p (p) {}
|
||||
unique_ptr (const unique_ptr &o) = delete;
|
||||
unique_ptr (unique_ptr &&o) : p (o.p) { o.p = nullptr; }
|
||||
unique_ptr (unique_ptr &&o) noexcept : p (o.p) { o.p = nullptr; }
|
||||
unique_ptr& operator = (const unique_ptr &o) = delete;
|
||||
unique_ptr& operator = (unique_ptr &&o) { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
|
||||
unique_ptr& operator = (unique_ptr &&o) noexcept { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
|
||||
~unique_ptr () { v::destroy (p); p = nullptr; }
|
||||
|
||||
T* get() const { return p; }
|
||||
T* release () { T* v = p; p = nullptr; return v; }
|
||||
|
||||
void swap (unique_ptr &o) { std::swap (p, o.p); }
|
||||
friend void swap (unique_ptr &a, unique_ptr &b) { std::swap (a.p, b.p); }
|
||||
void swap (unique_ptr &o) noexcept { std::swap (p, o.p); }
|
||||
friend void swap (unique_ptr &a, unique_ptr &b) noexcept { std::swap (a.p, b.p); }
|
||||
|
||||
operator T * () const { return p; }
|
||||
T& operator * () const { return *get (); }
|
||||
|
|
|
|||
|
|
@ -173,7 +173,7 @@ _hb_directwrite_shaper_face_data_create (hb_face_t *face)
|
|||
|
||||
t_DWriteCreateFactory p_DWriteCreateFactory;
|
||||
|
||||
#if defined(__GNUC__)
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wcast-function-type"
|
||||
#endif
|
||||
|
|
@ -181,7 +181,7 @@ _hb_directwrite_shaper_face_data_create (hb_face_t *face)
|
|||
p_DWriteCreateFactory = (t_DWriteCreateFactory)
|
||||
GetProcAddress (data->dwrite_dll, "DWriteCreateFactory");
|
||||
|
||||
#if defined(__GNUC__)
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
||||
|
|
|
|||
|
|
@ -232,7 +232,7 @@ struct hb_draw_session_t
|
|||
funcs->close_path (draw_data, st);
|
||||
}
|
||||
|
||||
protected:
|
||||
public:
|
||||
float slant;
|
||||
bool not_slanted;
|
||||
hb_draw_funcs_t *funcs;
|
||||
|
|
|
|||
|
|
@ -651,7 +651,7 @@ struct hb_font_t
|
|||
{
|
||||
if (get_glyph_name (glyph, s, size)) return;
|
||||
|
||||
if (size && snprintf (s, size, "gid%u", glyph) < 0)
|
||||
if (size && snprintf (s, size, "gid%" PRIu32, glyph) < 0)
|
||||
*s = '\0';
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ struct hb_ft_paint_context_t
|
|||
hb_map_t current_glyphs;
|
||||
hb_map_t current_layers;
|
||||
int depth_left = HB_MAX_NESTING_LEVEL;
|
||||
int edge_count = HB_COLRV1_MAX_EDGE_COUNT;
|
||||
int edge_count = HB_MAX_GRAPH_EDGE_COUNT;
|
||||
};
|
||||
|
||||
static unsigned
|
||||
|
|
|
|||
|
|
@ -1215,7 +1215,7 @@ hb_ft_face_finalize (void *arg)
|
|||
hb_face_t *
|
||||
hb_ft_face_create_cached (FT_Face ft_face)
|
||||
{
|
||||
if (unlikely (!ft_face->generic.data || ft_face->generic.finalizer != (FT_Generic_Finalizer) hb_ft_face_finalize))
|
||||
if (unlikely (!ft_face->generic.data || ft_face->generic.finalizer != hb_ft_face_finalize))
|
||||
{
|
||||
if (ft_face->generic.finalizer)
|
||||
ft_face->generic.finalizer (ft_face);
|
||||
|
|
|
|||
284
modules/juce_graphics/fonts/harfbuzz/hb-geometry.hh
Normal file
284
modules/juce_graphics/fonts/harfbuzz/hb-geometry.hh
Normal file
|
|
@ -0,0 +1,284 @@
|
|||
/*
|
||||
* Copyright © 2022 Behdad Esfahbod
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*/
|
||||
#ifndef HB_GEOMETRY_HH
|
||||
#define HB_GEOMETRY_HH
|
||||
|
||||
#include "hb.hh"
|
||||
|
||||
|
||||
struct hb_extents_t
|
||||
{
|
||||
hb_extents_t () {}
|
||||
hb_extents_t (float xmin, float ymin, float xmax, float ymax) :
|
||||
xmin (xmin), ymin (ymin), xmax (xmax), ymax (ymax) {}
|
||||
|
||||
bool is_empty () const { return xmin >= xmax || ymin >= ymax; }
|
||||
bool is_void () const { return xmin > xmax; }
|
||||
|
||||
void union_ (const hb_extents_t &o)
|
||||
{
|
||||
xmin = hb_min (xmin, o.xmin);
|
||||
ymin = hb_min (ymin, o.ymin);
|
||||
xmax = hb_max (xmax, o.xmax);
|
||||
ymax = hb_max (ymax, o.ymax);
|
||||
}
|
||||
|
||||
void intersect (const hb_extents_t &o)
|
||||
{
|
||||
xmin = hb_max (xmin, o.xmin);
|
||||
ymin = hb_max (ymin, o.ymin);
|
||||
xmax = hb_min (xmax, o.xmax);
|
||||
ymax = hb_min (ymax, o.ymax);
|
||||
}
|
||||
|
||||
void
|
||||
add_point (float x, float y)
|
||||
{
|
||||
if (unlikely (is_void ()))
|
||||
{
|
||||
xmin = xmax = x;
|
||||
ymin = ymax = y;
|
||||
}
|
||||
else
|
||||
{
|
||||
xmin = hb_min (xmin, x);
|
||||
ymin = hb_min (ymin, y);
|
||||
xmax = hb_max (xmax, x);
|
||||
ymax = hb_max (ymax, y);
|
||||
}
|
||||
}
|
||||
|
||||
float xmin = 0.f;
|
||||
float ymin = 0.f;
|
||||
float xmax = -1.f;
|
||||
float ymax = -1.f;
|
||||
};
|
||||
|
||||
struct hb_transform_t
|
||||
{
|
||||
hb_transform_t () {}
|
||||
hb_transform_t (float xx, float yx,
|
||||
float xy, float yy,
|
||||
float x0, float y0) :
|
||||
xx (xx), yx (yx), xy (xy), yy (yy), x0 (x0), y0 (y0) {}
|
||||
|
||||
void multiply (const hb_transform_t &o)
|
||||
{
|
||||
/* Copied from cairo, with "o" being "a" there and "this" being "b" there. */
|
||||
hb_transform_t r;
|
||||
|
||||
r.xx = o.xx * xx + o.yx * xy;
|
||||
r.yx = o.xx * yx + o.yx * yy;
|
||||
|
||||
r.xy = o.xy * xx + o.yy * xy;
|
||||
r.yy = o.xy * yx + o.yy * yy;
|
||||
|
||||
r.x0 = o.x0 * xx + o.y0 * xy + x0;
|
||||
r.y0 = o.x0 * yx + o.y0 * yy + y0;
|
||||
|
||||
*this = r;
|
||||
}
|
||||
|
||||
void transform_distance (float &dx, float &dy) const
|
||||
{
|
||||
float new_x = xx * dx + xy * dy;
|
||||
float new_y = yx * dx + yy * dy;
|
||||
dx = new_x;
|
||||
dy = new_y;
|
||||
}
|
||||
|
||||
void transform_point (float &x, float &y) const
|
||||
{
|
||||
transform_distance (x, y);
|
||||
x += x0;
|
||||
y += y0;
|
||||
}
|
||||
|
||||
void transform_extents (hb_extents_t &extents) const
|
||||
{
|
||||
float quad_x[4], quad_y[4];
|
||||
|
||||
quad_x[0] = extents.xmin;
|
||||
quad_y[0] = extents.ymin;
|
||||
quad_x[1] = extents.xmin;
|
||||
quad_y[1] = extents.ymax;
|
||||
quad_x[2] = extents.xmax;
|
||||
quad_y[2] = extents.ymin;
|
||||
quad_x[3] = extents.xmax;
|
||||
quad_y[3] = extents.ymax;
|
||||
|
||||
extents = hb_extents_t {};
|
||||
for (unsigned i = 0; i < 4; i++)
|
||||
{
|
||||
transform_point (quad_x[i], quad_y[i]);
|
||||
extents.add_point (quad_x[i], quad_y[i]);
|
||||
}
|
||||
}
|
||||
|
||||
void transform (const hb_transform_t &o) { multiply (o); }
|
||||
|
||||
void translate (float x, float y)
|
||||
{
|
||||
if (x == 0.f && y == 0.f)
|
||||
return;
|
||||
|
||||
x0 += xx * x + xy * y;
|
||||
y0 += yx * x + yy * y;
|
||||
}
|
||||
|
||||
void scale (float scaleX, float scaleY)
|
||||
{
|
||||
if (scaleX == 1.f && scaleY == 1.f)
|
||||
return;
|
||||
|
||||
xx *= scaleX;
|
||||
yx *= scaleX;
|
||||
xy *= scaleY;
|
||||
yy *= scaleY;
|
||||
}
|
||||
|
||||
void rotate (float rotation)
|
||||
{
|
||||
if (rotation == 0.f)
|
||||
return;
|
||||
|
||||
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L240
|
||||
rotation = rotation * HB_PI;
|
||||
float c;
|
||||
float s;
|
||||
#ifdef HAVE_SINCOSF
|
||||
sincosf (rotation, &s, &c);
|
||||
#else
|
||||
c = cosf (rotation);
|
||||
s = sinf (rotation);
|
||||
#endif
|
||||
auto other = hb_transform_t{c, s, -s, c, 0.f, 0.f};
|
||||
transform (other);
|
||||
}
|
||||
|
||||
void skew (float skewX, float skewY)
|
||||
{
|
||||
if (skewX == 0.f && skewY == 0.f)
|
||||
return;
|
||||
|
||||
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L255
|
||||
skewX = skewX * HB_PI;
|
||||
skewY = skewY * HB_PI;
|
||||
auto other = hb_transform_t{1.f,
|
||||
skewY ? tanf (skewY) : 0.f,
|
||||
skewX ? tanf (skewX) : 0.f,
|
||||
1.f,
|
||||
0.f, 0.f};
|
||||
transform (other);
|
||||
}
|
||||
|
||||
float xx = 1.f;
|
||||
float yx = 0.f;
|
||||
float xy = 0.f;
|
||||
float yy = 1.f;
|
||||
float x0 = 0.f;
|
||||
float y0 = 0.f;
|
||||
};
|
||||
|
||||
struct hb_bounds_t
|
||||
{
|
||||
enum status_t {
|
||||
UNBOUNDED,
|
||||
BOUNDED,
|
||||
EMPTY,
|
||||
};
|
||||
|
||||
hb_bounds_t (status_t status) : status (status) {}
|
||||
hb_bounds_t (const hb_extents_t &extents) :
|
||||
status (extents.is_empty () ? EMPTY : BOUNDED), extents (extents) {}
|
||||
|
||||
void union_ (const hb_bounds_t &o)
|
||||
{
|
||||
if (o.status == UNBOUNDED)
|
||||
status = UNBOUNDED;
|
||||
else if (o.status == BOUNDED)
|
||||
{
|
||||
if (status == EMPTY)
|
||||
*this = o;
|
||||
else if (status == BOUNDED)
|
||||
extents.union_ (o.extents);
|
||||
}
|
||||
}
|
||||
|
||||
void intersect (const hb_bounds_t &o)
|
||||
{
|
||||
if (o.status == EMPTY)
|
||||
status = EMPTY;
|
||||
else if (o.status == BOUNDED)
|
||||
{
|
||||
if (status == UNBOUNDED)
|
||||
*this = o;
|
||||
else if (status == BOUNDED)
|
||||
{
|
||||
extents.intersect (o.extents);
|
||||
if (extents.is_empty ())
|
||||
status = EMPTY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
status_t status;
|
||||
hb_extents_t extents;
|
||||
};
|
||||
|
||||
struct hb_transform_decomposed_t
|
||||
{
|
||||
float translateX = 0;
|
||||
float translateY = 0;
|
||||
float rotation = 0; // in degrees, counter-clockwise
|
||||
float scaleX = 1;
|
||||
float scaleY = 1;
|
||||
float skewX = 0; // in degrees, counter-clockwise
|
||||
float skewY = 0; // in degrees, counter-clockwise
|
||||
float tCenterX = 0;
|
||||
float tCenterY = 0;
|
||||
|
||||
operator bool () const
|
||||
{
|
||||
return translateX || translateY ||
|
||||
rotation ||
|
||||
scaleX != 1 || scaleY != 1 ||
|
||||
skewX || skewY ||
|
||||
tCenterX || tCenterY;
|
||||
}
|
||||
|
||||
hb_transform_t to_transform () const
|
||||
{
|
||||
hb_transform_t t;
|
||||
t.translate (translateX + tCenterX, translateY + tCenterY);
|
||||
t.rotate (rotation);
|
||||
t.scale (scaleX, scaleY);
|
||||
t.skew (-skewX, skewY);
|
||||
t.translate (-tCenterX, -tCenterY);
|
||||
return t;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
#endif /* HB_GEOMETRY_HH */
|
||||
116
modules/juce_graphics/fonts/harfbuzz/hb-gobject-structs.cc
Normal file
116
modules/juce_graphics/fonts/harfbuzz/hb-gobject-structs.cc
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
/*
|
||||
* Copyright © 2011 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#include "hb.hh"
|
||||
|
||||
#ifdef HAVE_GOBJECT
|
||||
|
||||
|
||||
/*
|
||||
* SECTION:hb-gobject
|
||||
* @title: hb-gobject
|
||||
* @short_description: GObject integration support
|
||||
* @include: hb-gobject.h
|
||||
*
|
||||
* Support for using HarfBuzz with the GObject library to provide
|
||||
* type data.
|
||||
*
|
||||
* The types and functions listed here are solely a linkage between
|
||||
* HarfBuzz's public data types and the GTypes used by the GObject framework.
|
||||
* HarfBuzz uses GObject introspection to generate its Python bindings
|
||||
* (and potentially other language bindings); client programs should never need
|
||||
* to access the GObject-integration mechanics.
|
||||
*
|
||||
* For client programs using the GNOME and GTK software stack, please see the
|
||||
* GLib and FreeType integration pages.
|
||||
**/
|
||||
|
||||
|
||||
/* g++ didn't like older gtype.h gcc-only code path. */
|
||||
#include <glib.h>
|
||||
#if !GLIB_CHECK_VERSION(2,29,16)
|
||||
#undef __GNUC__
|
||||
#undef __GNUC_MINOR__
|
||||
#define __GNUC__ 2
|
||||
#define __GNUC_MINOR__ 6
|
||||
#endif
|
||||
|
||||
#include "hb-gobject.h"
|
||||
|
||||
#define HB_DEFINE_BOXED_TYPE(name,copy_func,free_func) \
|
||||
GType \
|
||||
hb_gobject_##name##_get_type () \
|
||||
{ \
|
||||
static gsize type_id = 0; \
|
||||
if (g_once_init_enter (&type_id)) { \
|
||||
GType id = g_boxed_type_register_static (g_intern_static_string ("hb_" #name "_t"), \
|
||||
(GBoxedCopyFunc) copy_func, \
|
||||
(GBoxedFreeFunc) free_func); \
|
||||
g_once_init_leave (&type_id, id); \
|
||||
} \
|
||||
return type_id; \
|
||||
}
|
||||
|
||||
#define HB_DEFINE_OBJECT_TYPE(name) \
|
||||
HB_DEFINE_BOXED_TYPE (name, hb_##name##_reference, hb_##name##_destroy)
|
||||
|
||||
#define HB_DEFINE_VALUE_TYPE(name) \
|
||||
static hb_##name##_t *_hb_##name##_reference (const hb_##name##_t *l) \
|
||||
{ \
|
||||
hb_##name##_t *c = (hb_##name##_t *) hb_calloc (1, sizeof (hb_##name##_t)); \
|
||||
if (unlikely (!c)) return nullptr; \
|
||||
*c = *l; \
|
||||
return c; \
|
||||
} \
|
||||
static void _hb_##name##_destroy (hb_##name##_t *l) { hb_free (l); } \
|
||||
HB_DEFINE_BOXED_TYPE (name, _hb_##name##_reference, _hb_##name##_destroy)
|
||||
|
||||
HB_DEFINE_OBJECT_TYPE (buffer)
|
||||
HB_DEFINE_OBJECT_TYPE (blob)
|
||||
HB_DEFINE_OBJECT_TYPE (draw_funcs)
|
||||
HB_DEFINE_OBJECT_TYPE (paint_funcs)
|
||||
HB_DEFINE_OBJECT_TYPE (face)
|
||||
HB_DEFINE_OBJECT_TYPE (font)
|
||||
HB_DEFINE_OBJECT_TYPE (font_funcs)
|
||||
HB_DEFINE_OBJECT_TYPE (set)
|
||||
HB_DEFINE_OBJECT_TYPE (map)
|
||||
HB_DEFINE_OBJECT_TYPE (shape_plan)
|
||||
HB_DEFINE_OBJECT_TYPE (unicode_funcs)
|
||||
HB_DEFINE_VALUE_TYPE (feature)
|
||||
HB_DEFINE_VALUE_TYPE (glyph_info)
|
||||
HB_DEFINE_VALUE_TYPE (glyph_position)
|
||||
HB_DEFINE_VALUE_TYPE (segment_properties)
|
||||
HB_DEFINE_VALUE_TYPE (draw_state)
|
||||
HB_DEFINE_VALUE_TYPE (color_stop)
|
||||
HB_DEFINE_VALUE_TYPE (color_line)
|
||||
HB_DEFINE_VALUE_TYPE (user_data_key)
|
||||
|
||||
HB_DEFINE_VALUE_TYPE (ot_var_axis_info)
|
||||
HB_DEFINE_VALUE_TYPE (ot_math_glyph_variant)
|
||||
HB_DEFINE_VALUE_TYPE (ot_math_glyph_part)
|
||||
|
||||
|
||||
#endif
|
||||
136
modules/juce_graphics/fonts/harfbuzz/hb-gobject-structs.h
Normal file
136
modules/juce_graphics/fonts/harfbuzz/hb-gobject-structs.h
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
/*
|
||||
* Copyright (C) 2011 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#if !defined(HB_GOBJECT_H_IN) && !defined(HB_NO_SINGLE_HEADER_ERROR)
|
||||
#error "Include <hb-gobject.h> instead."
|
||||
#endif
|
||||
|
||||
#ifndef HB_GOBJECT_STRUCTS_H
|
||||
#define HB_GOBJECT_STRUCTS_H
|
||||
|
||||
#include "hb.h"
|
||||
|
||||
#include <glib-object.h>
|
||||
|
||||
HB_BEGIN_DECLS
|
||||
|
||||
|
||||
/* Object types */
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_blob_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_BLOB (hb_gobject_blob_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_buffer_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_BUFFER (hb_gobject_buffer_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_draw_funcs_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_DRAW_FUNCS (hb_gobject_draw_funcs_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_paint_funcs_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_PAINT_FUNCS (hb_gobject_paint_funcs_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_face_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_FACE (hb_gobject_face_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_font_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_FONT (hb_gobject_font_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_font_funcs_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_FONT_FUNCS (hb_gobject_font_funcs_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_set_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_SET (hb_gobject_set_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_map_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_MAP (hb_gobject_map_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_shape_plan_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_SHAPE_PLAN (hb_gobject_shape_plan_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_unicode_funcs_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_UNICODE_FUNCS (hb_gobject_unicode_funcs_get_type ())
|
||||
|
||||
/* Value types */
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_feature_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_FEATURE (hb_gobject_feature_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_glyph_info_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_GLYPH_INFO (hb_gobject_glyph_info_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_glyph_position_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_GLYPH_POSITION (hb_gobject_glyph_position_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_segment_properties_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_SEGMENT_PROPERTIES (hb_gobject_segment_properties_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_draw_state_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_DRAW_STATE (hb_gobject_draw_state_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_color_stop_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_COLOR_STOP (hb_gobject_color_stop_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_color_line_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_COLOR_LINE (hb_gobject_color_line_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_user_data_key_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_USER_DATA_KEY (hb_gobject_user_data_key_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_ot_var_axis_info_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_OT_VAR_AXIS_INFO (hb_gobject_ot_var_axis_info_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_ot_math_glyph_variant_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_OT_MATH_GLYPH_VARIANT (hb_gobject_ot_math_glyph_variant_get_type ())
|
||||
|
||||
HB_EXTERN GType
|
||||
hb_gobject_ot_math_glyph_part_get_type (void);
|
||||
#define HB_GOBJECT_TYPE_OT_MATH_GLYPH_PART (hb_gobject_ot_math_glyph_part_get_type ())
|
||||
|
||||
|
||||
HB_END_DECLS
|
||||
|
||||
#endif /* HB_GOBJECT_H */
|
||||
40
modules/juce_graphics/fonts/harfbuzz/hb-gobject.h
Normal file
40
modules/juce_graphics/fonts/harfbuzz/hb-gobject.h
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright (C) 2011 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#ifndef HB_GOBJECT_H
|
||||
#define HB_GOBJECT_H
|
||||
#define HB_GOBJECT_H_IN
|
||||
|
||||
#include "hb.h"
|
||||
|
||||
#include "hb-gobject-enums.h"
|
||||
#include "hb-gobject-structs.h"
|
||||
|
||||
HB_BEGIN_DECLS
|
||||
HB_END_DECLS
|
||||
|
||||
#undef HB_GOBJECT_H_IN
|
||||
#endif /* HB_GOBJECT_H */
|
||||
291
modules/juce_graphics/fonts/harfbuzz/hb-icu.cc
Normal file
291
modules/juce_graphics/fonts/harfbuzz/hb-icu.cc
Normal file
|
|
@ -0,0 +1,291 @@
|
|||
/*
|
||||
* Copyright © 2009 Red Hat, Inc.
|
||||
* Copyright © 2009 Keith Stribley
|
||||
* Copyright © 2011 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Red Hat Author(s): Behdad Esfahbod
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#include "hb.hh"
|
||||
|
||||
#ifdef HAVE_ICU
|
||||
|
||||
#include "hb-icu.h"
|
||||
|
||||
#include "hb-machinery.hh"
|
||||
|
||||
#include <unicode/uchar.h>
|
||||
#include <unicode/unorm2.h>
|
||||
#include <unicode/ustring.h>
|
||||
#include <unicode/utf16.h>
|
||||
#include <unicode/uversion.h>
|
||||
|
||||
/* ICU extra semicolon, fixed since 65, https://github.com/unicode-org/icu/commit/480bec3 */
|
||||
#if U_ICU_VERSION_MAJOR_NUM < 65 && (defined(__GNUC__) || defined(__clang__))
|
||||
#define HB_ICU_EXTRA_SEMI_IGNORED
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wextra-semi-stmt"
|
||||
#endif
|
||||
|
||||
/**
|
||||
* SECTION:hb-icu
|
||||
* @title: hb-icu
|
||||
* @short_description: ICU integration
|
||||
* @include: hb-icu.h
|
||||
*
|
||||
* Functions for using HarfBuzz with the International Components for Unicode
|
||||
* (ICU) library. HarfBuzz supports using ICU to provide Unicode data, by attaching
|
||||
* ICU functions to the virtual methods in a #hb_unicode_funcs_t function
|
||||
* structure.
|
||||
**/
|
||||
|
||||
/**
|
||||
* hb_icu_script_to_script:
|
||||
* @script: The UScriptCode identifier to query
|
||||
*
|
||||
* Fetches the #hb_script_t script that corresponds to the
|
||||
* specified UScriptCode identifier.
|
||||
*
|
||||
* Return value: the #hb_script_t script found
|
||||
*
|
||||
**/
|
||||
|
||||
hb_script_t
|
||||
hb_icu_script_to_script (UScriptCode script)
|
||||
{
|
||||
if (unlikely (script == USCRIPT_INVALID_CODE))
|
||||
return HB_SCRIPT_INVALID;
|
||||
|
||||
return hb_script_from_string (uscript_getShortName (script), -1);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_icu_script_from_script:
|
||||
* @script: The #hb_script_t script to query
|
||||
*
|
||||
* Fetches the UScriptCode identifier that corresponds to the
|
||||
* specified #hb_script_t script.
|
||||
*
|
||||
* Return value: the UScriptCode identifier found
|
||||
*
|
||||
**/
|
||||
UScriptCode
|
||||
hb_icu_script_from_script (hb_script_t script)
|
||||
{
|
||||
UScriptCode out = USCRIPT_INVALID_CODE;
|
||||
|
||||
if (unlikely (script == HB_SCRIPT_INVALID))
|
||||
return out;
|
||||
|
||||
UErrorCode icu_err = U_ZERO_ERROR;
|
||||
const unsigned char buf[5] = {HB_UNTAG (script), 0};
|
||||
uscript_getCode ((const char *) buf, &out, 1, &icu_err);
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
static hb_unicode_combining_class_t
|
||||
hb_icu_unicode_combining_class (hb_unicode_funcs_t *ufuncs HB_UNUSED,
|
||||
hb_codepoint_t unicode,
|
||||
void *user_data HB_UNUSED)
|
||||
|
||||
{
|
||||
return (hb_unicode_combining_class_t) u_getCombiningClass (unicode);
|
||||
}
|
||||
|
||||
static hb_unicode_general_category_t
|
||||
hb_icu_unicode_general_category (hb_unicode_funcs_t *ufuncs HB_UNUSED,
|
||||
hb_codepoint_t unicode,
|
||||
void *user_data HB_UNUSED)
|
||||
{
|
||||
switch (u_getIntPropertyValue(unicode, UCHAR_GENERAL_CATEGORY))
|
||||
{
|
||||
case U_UNASSIGNED: return HB_UNICODE_GENERAL_CATEGORY_UNASSIGNED;
|
||||
|
||||
case U_UPPERCASE_LETTER: return HB_UNICODE_GENERAL_CATEGORY_UPPERCASE_LETTER;
|
||||
case U_LOWERCASE_LETTER: return HB_UNICODE_GENERAL_CATEGORY_LOWERCASE_LETTER;
|
||||
case U_TITLECASE_LETTER: return HB_UNICODE_GENERAL_CATEGORY_TITLECASE_LETTER;
|
||||
case U_MODIFIER_LETTER: return HB_UNICODE_GENERAL_CATEGORY_MODIFIER_LETTER;
|
||||
case U_OTHER_LETTER: return HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER;
|
||||
|
||||
case U_NON_SPACING_MARK: return HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK;
|
||||
case U_ENCLOSING_MARK: return HB_UNICODE_GENERAL_CATEGORY_ENCLOSING_MARK;
|
||||
case U_COMBINING_SPACING_MARK: return HB_UNICODE_GENERAL_CATEGORY_SPACING_MARK;
|
||||
|
||||
case U_DECIMAL_DIGIT_NUMBER: return HB_UNICODE_GENERAL_CATEGORY_DECIMAL_NUMBER;
|
||||
case U_LETTER_NUMBER: return HB_UNICODE_GENERAL_CATEGORY_LETTER_NUMBER;
|
||||
case U_OTHER_NUMBER: return HB_UNICODE_GENERAL_CATEGORY_OTHER_NUMBER;
|
||||
|
||||
case U_SPACE_SEPARATOR: return HB_UNICODE_GENERAL_CATEGORY_SPACE_SEPARATOR;
|
||||
case U_LINE_SEPARATOR: return HB_UNICODE_GENERAL_CATEGORY_LINE_SEPARATOR;
|
||||
case U_PARAGRAPH_SEPARATOR: return HB_UNICODE_GENERAL_CATEGORY_PARAGRAPH_SEPARATOR;
|
||||
|
||||
case U_CONTROL_CHAR: return HB_UNICODE_GENERAL_CATEGORY_CONTROL;
|
||||
case U_FORMAT_CHAR: return HB_UNICODE_GENERAL_CATEGORY_FORMAT;
|
||||
case U_PRIVATE_USE_CHAR: return HB_UNICODE_GENERAL_CATEGORY_PRIVATE_USE;
|
||||
case U_SURROGATE: return HB_UNICODE_GENERAL_CATEGORY_SURROGATE;
|
||||
|
||||
|
||||
case U_DASH_PUNCTUATION: return HB_UNICODE_GENERAL_CATEGORY_DASH_PUNCTUATION;
|
||||
case U_START_PUNCTUATION: return HB_UNICODE_GENERAL_CATEGORY_OPEN_PUNCTUATION;
|
||||
case U_END_PUNCTUATION: return HB_UNICODE_GENERAL_CATEGORY_CLOSE_PUNCTUATION;
|
||||
case U_CONNECTOR_PUNCTUATION: return HB_UNICODE_GENERAL_CATEGORY_CONNECT_PUNCTUATION;
|
||||
case U_OTHER_PUNCTUATION: return HB_UNICODE_GENERAL_CATEGORY_OTHER_PUNCTUATION;
|
||||
|
||||
case U_MATH_SYMBOL: return HB_UNICODE_GENERAL_CATEGORY_MATH_SYMBOL;
|
||||
case U_CURRENCY_SYMBOL: return HB_UNICODE_GENERAL_CATEGORY_CURRENCY_SYMBOL;
|
||||
case U_MODIFIER_SYMBOL: return HB_UNICODE_GENERAL_CATEGORY_MODIFIER_SYMBOL;
|
||||
case U_OTHER_SYMBOL: return HB_UNICODE_GENERAL_CATEGORY_OTHER_SYMBOL;
|
||||
|
||||
case U_INITIAL_PUNCTUATION: return HB_UNICODE_GENERAL_CATEGORY_INITIAL_PUNCTUATION;
|
||||
case U_FINAL_PUNCTUATION: return HB_UNICODE_GENERAL_CATEGORY_FINAL_PUNCTUATION;
|
||||
}
|
||||
|
||||
return HB_UNICODE_GENERAL_CATEGORY_UNASSIGNED;
|
||||
}
|
||||
|
||||
static hb_codepoint_t
|
||||
hb_icu_unicode_mirroring (hb_unicode_funcs_t *ufuncs HB_UNUSED,
|
||||
hb_codepoint_t unicode,
|
||||
void *user_data HB_UNUSED)
|
||||
{
|
||||
return u_charMirror(unicode);
|
||||
}
|
||||
|
||||
static hb_script_t
|
||||
hb_icu_unicode_script (hb_unicode_funcs_t *ufuncs HB_UNUSED,
|
||||
hb_codepoint_t unicode,
|
||||
void *user_data HB_UNUSED)
|
||||
{
|
||||
UErrorCode status = U_ZERO_ERROR;
|
||||
UScriptCode scriptCode = uscript_getScript(unicode, &status);
|
||||
|
||||
if (unlikely (U_FAILURE (status)))
|
||||
return HB_SCRIPT_UNKNOWN;
|
||||
|
||||
return hb_icu_script_to_script (scriptCode);
|
||||
}
|
||||
|
||||
static hb_bool_t
|
||||
hb_icu_unicode_compose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
|
||||
hb_codepoint_t a,
|
||||
hb_codepoint_t b,
|
||||
hb_codepoint_t *ab,
|
||||
void *user_data)
|
||||
{
|
||||
const UNormalizer2 *normalizer = (const UNormalizer2 *) user_data;
|
||||
UChar32 ret = unorm2_composePair (normalizer, a, b);
|
||||
if (ret < 0) return false;
|
||||
*ab = ret;
|
||||
return true;
|
||||
}
|
||||
|
||||
static hb_bool_t
|
||||
hb_icu_unicode_decompose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
|
||||
hb_codepoint_t ab,
|
||||
hb_codepoint_t *a,
|
||||
hb_codepoint_t *b,
|
||||
void *user_data)
|
||||
{
|
||||
const UNormalizer2 *normalizer = (const UNormalizer2 *) user_data;
|
||||
UChar decomposed[4];
|
||||
int len;
|
||||
UErrorCode icu_err = U_ZERO_ERROR;
|
||||
len = unorm2_getRawDecomposition (normalizer, ab, decomposed,
|
||||
ARRAY_LENGTH (decomposed), &icu_err);
|
||||
if (U_FAILURE (icu_err) || len < 0) return false;
|
||||
|
||||
len = u_countChar32 (decomposed, len);
|
||||
if (len == 1)
|
||||
{
|
||||
U16_GET_UNSAFE (decomposed, 0, *a);
|
||||
*b = 0;
|
||||
return *a != ab;
|
||||
}
|
||||
else if (len == 2)
|
||||
{
|
||||
len = 0;
|
||||
U16_NEXT_UNSAFE (decomposed, len, *a);
|
||||
U16_NEXT_UNSAFE (decomposed, len, *b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
static inline void free_static_icu_funcs ();
|
||||
|
||||
static struct hb_icu_unicode_funcs_lazy_loader_t : hb_unicode_funcs_lazy_loader_t<hb_icu_unicode_funcs_lazy_loader_t>
|
||||
{
|
||||
static hb_unicode_funcs_t *create ()
|
||||
{
|
||||
void *user_data = nullptr;
|
||||
UErrorCode icu_err = U_ZERO_ERROR;
|
||||
user_data = (void *) unorm2_getNFCInstance (&icu_err);
|
||||
assert (user_data);
|
||||
|
||||
hb_unicode_funcs_t *funcs = hb_unicode_funcs_create (nullptr);
|
||||
|
||||
hb_unicode_funcs_set_combining_class_func (funcs, hb_icu_unicode_combining_class, nullptr, nullptr);
|
||||
hb_unicode_funcs_set_general_category_func (funcs, hb_icu_unicode_general_category, nullptr, nullptr);
|
||||
hb_unicode_funcs_set_mirroring_func (funcs, hb_icu_unicode_mirroring, nullptr, nullptr);
|
||||
hb_unicode_funcs_set_script_func (funcs, hb_icu_unicode_script, nullptr, nullptr);
|
||||
hb_unicode_funcs_set_compose_func (funcs, hb_icu_unicode_compose, user_data, nullptr);
|
||||
hb_unicode_funcs_set_decompose_func (funcs, hb_icu_unicode_decompose, user_data, nullptr);
|
||||
|
||||
hb_unicode_funcs_make_immutable (funcs);
|
||||
|
||||
hb_atexit (free_static_icu_funcs);
|
||||
|
||||
return funcs;
|
||||
}
|
||||
} static_icu_funcs;
|
||||
|
||||
static inline
|
||||
void free_static_icu_funcs ()
|
||||
{
|
||||
static_icu_funcs.free_instance ();
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_icu_get_unicode_funcs:
|
||||
*
|
||||
* Fetches a Unicode-functions structure that is populated
|
||||
* with the appropriate ICU function for each method.
|
||||
*
|
||||
* Return value: (transfer none): a pointer to the #hb_unicode_funcs_t Unicode-functions structure
|
||||
*
|
||||
* Since: 0.9.38
|
||||
**/
|
||||
hb_unicode_funcs_t *
|
||||
hb_icu_get_unicode_funcs ()
|
||||
{
|
||||
return static_icu_funcs.get_unconst ();
|
||||
}
|
||||
|
||||
#ifdef HB_ICU_EXTRA_SEMI_IGNORED
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
||||
#endif
|
||||
52
modules/juce_graphics/fonts/harfbuzz/hb-icu.h
Normal file
52
modules/juce_graphics/fonts/harfbuzz/hb-icu.h
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright © 2009 Red Hat, Inc.
|
||||
* Copyright © 2011 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Red Hat Author(s): Behdad Esfahbod
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#ifndef HB_ICU_H
|
||||
#define HB_ICU_H
|
||||
|
||||
#include "hb.h"
|
||||
|
||||
#include <unicode/uscript.h>
|
||||
|
||||
HB_BEGIN_DECLS
|
||||
|
||||
|
||||
HB_EXTERN hb_script_t
|
||||
hb_icu_script_to_script (UScriptCode script);
|
||||
|
||||
HB_EXTERN UScriptCode
|
||||
hb_icu_script_from_script (hb_script_t script);
|
||||
|
||||
|
||||
HB_EXTERN hb_unicode_funcs_t *
|
||||
hb_icu_get_unicode_funcs (void);
|
||||
|
||||
|
||||
HB_END_DECLS
|
||||
|
||||
#endif /* HB_ICU_H */
|
||||
|
|
@ -324,6 +324,16 @@ struct hb_is_sink_of
|
|||
(hb_is_source_of(Iter, Item) && Iter::is_sorted_iterator)
|
||||
|
||||
|
||||
struct
|
||||
{
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
unsigned operator () (const Iterable &_) const { return hb_len (hb_iter (_)); }
|
||||
|
||||
unsigned operator () (unsigned _) const { return _; }
|
||||
}
|
||||
HB_FUNCOBJ (hb_len_of);
|
||||
|
||||
/* Range-based 'for' for iterables. */
|
||||
|
||||
template <typename Iterable,
|
||||
|
|
|
|||
|
|
@ -88,25 +88,24 @@
|
|||
#define HB_MAX_LOOKUP_VISIT_COUNT 35000
|
||||
#endif
|
||||
|
||||
#ifndef HB_MAX_GRAPH_EDGE_COUNT
|
||||
#define HB_MAX_GRAPH_EDGE_COUNT 2048
|
||||
#endif
|
||||
|
||||
#ifndef HB_GLYF_VAR_COMPOSITE_MAX_AXES
|
||||
#define HB_GLYF_VAR_COMPOSITE_MAX_AXES 4096
|
||||
#ifndef HB_VAR_COMPOSITE_MAX_AXES
|
||||
#define HB_VAR_COMPOSITE_MAX_AXES 4096
|
||||
#endif
|
||||
|
||||
#ifndef HB_GLYF_MAX_POINTS
|
||||
#define HB_GLYF_MAX_POINTS 20000
|
||||
#endif
|
||||
|
||||
#ifndef HB_GLYF_MAX_EDGE_COUNT
|
||||
#define HB_GLYF_MAX_EDGE_COUNT 1024
|
||||
#define HB_GLYF_MAX_POINTS 200000
|
||||
#endif
|
||||
|
||||
#ifndef HB_CFF_MAX_OPS
|
||||
#define HB_CFF_MAX_OPS 10000
|
||||
#endif
|
||||
|
||||
#ifndef HB_COLRV1_MAX_EDGE_COUNT
|
||||
#define HB_COLRV1_MAX_EDGE_COUNT 65536
|
||||
#ifndef HB_MAX_COMPOSITE_OPERATIONS_PER_GLYPH
|
||||
#define HB_MAX_COMPOSITE_OPERATIONS_PER_GLYPH 64
|
||||
#endif
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -70,9 +70,9 @@ struct hb_hashmap_t
|
|||
|
||||
alloc (o.population); hb_copy (o, *this);
|
||||
}
|
||||
hb_hashmap_t (hb_hashmap_t&& o) : hb_hashmap_t () { hb_swap (*this, o); }
|
||||
hb_hashmap_t (hb_hashmap_t&& o) noexcept : hb_hashmap_t () { hb_swap (*this, o); }
|
||||
hb_hashmap_t& operator= (const hb_hashmap_t& o) { reset (); alloc (o.population); hb_copy (o, *this); return *this; }
|
||||
hb_hashmap_t& operator= (hb_hashmap_t&& o) { hb_swap (*this, o); return *this; }
|
||||
hb_hashmap_t& operator= (hb_hashmap_t&& o) noexcept { hb_swap (*this, o); return *this; }
|
||||
|
||||
hb_hashmap_t (std::initializer_list<hb_pair_t<K, V>> lst) : hb_hashmap_t ()
|
||||
{
|
||||
|
|
@ -137,26 +137,23 @@ struct hb_hashmap_t
|
|||
};
|
||||
|
||||
hb_object_header_t header;
|
||||
unsigned int successful : 1; /* Allocations successful */
|
||||
unsigned int population : 31; /* Not including tombstones. */
|
||||
bool successful; /* Allocations successful */
|
||||
unsigned short max_chain_length;
|
||||
unsigned int population; /* Not including tombstones. */
|
||||
unsigned int occupancy; /* Including tombstones. */
|
||||
unsigned int mask;
|
||||
unsigned int prime;
|
||||
unsigned int max_chain_length;
|
||||
item_t *items;
|
||||
|
||||
friend void swap (hb_hashmap_t& a, hb_hashmap_t& b)
|
||||
friend void swap (hb_hashmap_t& a, hb_hashmap_t& b) noexcept
|
||||
{
|
||||
if (unlikely (!a.successful || !b.successful))
|
||||
return;
|
||||
unsigned tmp = a.population;
|
||||
a.population = b.population;
|
||||
b.population = tmp;
|
||||
//hb_swap (a.population, b.population);
|
||||
hb_swap (a.max_chain_length, b.max_chain_length);
|
||||
hb_swap (a.population, b.population);
|
||||
hb_swap (a.occupancy, b.occupancy);
|
||||
hb_swap (a.mask, b.mask);
|
||||
hb_swap (a.prime, b.prime);
|
||||
hb_swap (a.max_chain_length, b.max_chain_length);
|
||||
hb_swap (a.items, b.items);
|
||||
}
|
||||
void init ()
|
||||
|
|
@ -164,10 +161,10 @@ struct hb_hashmap_t
|
|||
hb_object_init (this);
|
||||
|
||||
successful = true;
|
||||
max_chain_length = 0;
|
||||
population = occupancy = 0;
|
||||
mask = 0;
|
||||
prime = 0;
|
||||
max_chain_length = 0;
|
||||
items = nullptr;
|
||||
}
|
||||
void fini ()
|
||||
|
|
@ -558,7 +555,7 @@ struct hb_map_t : hb_hashmap_t<hb_codepoint_t,
|
|||
~hb_map_t () = default;
|
||||
hb_map_t () : hashmap () {}
|
||||
hb_map_t (const hb_map_t &o) : hashmap ((hashmap &) o) {}
|
||||
hb_map_t (hb_map_t &&o) : hashmap (std::move ((hashmap &) o)) {}
|
||||
hb_map_t (hb_map_t &&o) noexcept : hashmap (std::move ((hashmap &) o)) {}
|
||||
hb_map_t& operator= (const hb_map_t&) = default;
|
||||
hb_map_t& operator= (hb_map_t&&) = default;
|
||||
hb_map_t (std::initializer_list<hb_codepoint_pair_t> lst) : hashmap (lst) {}
|
||||
|
|
|
|||
|
|
@ -325,7 +325,7 @@ retry:
|
|||
hb_user_data_array_t *user_data = obj->header.user_data.get_acquire ();
|
||||
if (unlikely (!user_data))
|
||||
{
|
||||
user_data = (hb_user_data_array_t *) hb_calloc (sizeof (hb_user_data_array_t), 1);
|
||||
user_data = (hb_user_data_array_t *) hb_calloc (1, sizeof (hb_user_data_array_t));
|
||||
if (unlikely (!user_data))
|
||||
return false;
|
||||
user_data->init ();
|
||||
|
|
|
|||
|
|
@ -132,6 +132,89 @@ struct HBUINT15 : HBUINT16
|
|||
DEFINE_SIZE_STATIC (2);
|
||||
};
|
||||
|
||||
/* 32-bit unsigned integer with variable encoding. */
|
||||
struct HBUINT32VAR
|
||||
{
|
||||
unsigned get_size () const
|
||||
{
|
||||
unsigned b0 = v[0];
|
||||
if (b0 < 0x80)
|
||||
return 1;
|
||||
else if (b0 < 0xC0)
|
||||
return 2;
|
||||
else if (b0 < 0xE0)
|
||||
return 3;
|
||||
else if (b0 < 0xF0)
|
||||
return 4;
|
||||
else
|
||||
return 5;
|
||||
}
|
||||
|
||||
static unsigned get_size (uint32_t v)
|
||||
{
|
||||
if (v < 0x80)
|
||||
return 1;
|
||||
else if (v < 0x4000)
|
||||
return 2;
|
||||
else if (v < 0x200000)
|
||||
return 3;
|
||||
else if (v < 0x10000000)
|
||||
return 4;
|
||||
else
|
||||
return 5;
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_range (v, 1) &&
|
||||
hb_barrier () &&
|
||||
c->check_range (v, get_size ()));
|
||||
}
|
||||
|
||||
operator uint32_t () const
|
||||
{
|
||||
unsigned b0 = v[0];
|
||||
if (b0 < 0x80)
|
||||
return b0;
|
||||
else if (b0 < 0xC0)
|
||||
return ((b0 & 0x3F) << 8) | v[1];
|
||||
else if (b0 < 0xE0)
|
||||
return ((b0 & 0x1F) << 16) | (v[1] << 8) | v[2];
|
||||
else if (b0 < 0xF0)
|
||||
return ((b0 & 0x0F) << 24) | (v[1] << 16) | (v[2] << 8) | v[3];
|
||||
else
|
||||
return (v[1] << 24) | (v[2] << 16) | (v[3] << 8) | v[4];
|
||||
}
|
||||
|
||||
static bool serialize (hb_serialize_context_t *c, uint32_t v)
|
||||
{
|
||||
unsigned len = get_size (v);
|
||||
|
||||
unsigned char *buf = c->allocate_size<unsigned char> (len, false);
|
||||
if (unlikely (!buf))
|
||||
return false;
|
||||
|
||||
unsigned char *p = buf + len;
|
||||
for (unsigned i = 0; i < len; i++)
|
||||
{
|
||||
*--p = v & 0xFF;
|
||||
v >>= 8;
|
||||
}
|
||||
|
||||
if (len > 1)
|
||||
buf[0] |= ((1 << (len - 1)) - 1) << (9 - len);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
protected:
|
||||
unsigned char v[1];
|
||||
|
||||
public:
|
||||
DEFINE_SIZE_MIN (1);
|
||||
};
|
||||
|
||||
/* 16-bit signed integer (HBINT16) that describes a quantity in FUnits. */
|
||||
typedef HBINT16 FWORD;
|
||||
|
||||
|
|
@ -149,6 +232,7 @@ struct HBFixed : Type
|
|||
|
||||
operator signed () const = delete;
|
||||
operator unsigned () const = delete;
|
||||
explicit operator float () const { return to_float (); }
|
||||
typename Type::type to_int () const { return Type::v; }
|
||||
void set_int (typename Type::type i ) { Type::v = i; }
|
||||
float to_float (float offset = 0) const { return ((int32_t) Type::v + offset) / shift; }
|
||||
|
|
@ -756,6 +840,7 @@ template <typename Type> using Array32Of = ArrayOf<Type, HBUINT32>;
|
|||
using PString = ArrayOf<HBUINT8, HBUINT8>;
|
||||
|
||||
/* Array of Offset's */
|
||||
template <typename Type> using Array8OfOffset24To = ArrayOf<OffsetTo<Type, HBUINT24>, HBUINT8>;
|
||||
template <typename Type> using Array16OfOffset16To = ArrayOf<OffsetTo<Type, HBUINT16>, HBUINT16>;
|
||||
template <typename Type> using Array16OfOffset32To = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT16>;
|
||||
template <typename Type> using Array32OfOffset32To = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32>;
|
||||
|
|
@ -985,6 +1070,13 @@ struct SortedArrayOf : ArrayOf<Type, LenType>
|
|||
return_trace (ret);
|
||||
}
|
||||
|
||||
SortedArrayOf* copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
SortedArrayOf* out = reinterpret_cast<SortedArrayOf *> (ArrayOf<Type, LenType>::copy (c));
|
||||
return_trace (out);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
Type &bsearch (const T &x, Type ¬_found = Crap (Type))
|
||||
{ return *as_array ().bsearch (x, ¬_found); }
|
||||
|
|
@ -1156,6 +1248,638 @@ struct VarSizedBinSearchArrayOf
|
|||
};
|
||||
|
||||
|
||||
/* CFF INDEX */
|
||||
|
||||
template <typename COUNT>
|
||||
struct CFFIndex
|
||||
{
|
||||
unsigned int offset_array_size () const
|
||||
{ return offSize * (count + 1); }
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
const Iterable &iterable,
|
||||
const unsigned *p_data_size = nullptr,
|
||||
unsigned min_off_size = 0)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned data_size;
|
||||
if (p_data_size)
|
||||
data_size = *p_data_size;
|
||||
else
|
||||
total_size (iterable, &data_size);
|
||||
|
||||
auto it = hb_iter (iterable);
|
||||
if (unlikely (!serialize_header (c, +it, data_size, min_off_size))) return_trace (false);
|
||||
unsigned char *ret = c->allocate_size<unsigned char> (data_size, false);
|
||||
if (unlikely (!ret)) return_trace (false);
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
unsigned len = _.length;
|
||||
if (!len)
|
||||
continue;
|
||||
if (len <= 1)
|
||||
{
|
||||
*ret++ = *_.arrayZ;
|
||||
continue;
|
||||
}
|
||||
hb_memcpy (ret, _.arrayZ, len);
|
||||
ret += len;
|
||||
}
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize_header (hb_serialize_context_t *c,
|
||||
Iterator it,
|
||||
unsigned data_size,
|
||||
unsigned min_off_size = 0)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
unsigned off_size = (hb_bit_storage (data_size + 1) + 7) / 8;
|
||||
off_size = hb_max(min_off_size, off_size);
|
||||
|
||||
/* serialize CFFIndex header */
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
this->count = hb_len (it);
|
||||
if (!this->count) return_trace (true);
|
||||
if (unlikely (!c->extend (this->offSize))) return_trace (false);
|
||||
this->offSize = off_size;
|
||||
if (unlikely (!c->allocate_size<HBUINT8> (off_size * (this->count + 1), false)))
|
||||
return_trace (false);
|
||||
|
||||
/* serialize indices */
|
||||
unsigned int offset = 1;
|
||||
if (HB_OPTIMIZE_SIZE_VAL)
|
||||
{
|
||||
unsigned int i = 0;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
set_offset_at (i++, offset);
|
||||
offset += hb_len_of (_);
|
||||
}
|
||||
set_offset_at (i, offset);
|
||||
}
|
||||
else
|
||||
switch (off_size)
|
||||
{
|
||||
case 1:
|
||||
{
|
||||
HBUINT8 *p = (HBUINT8 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += hb_len_of (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
{
|
||||
HBUINT16 *p = (HBUINT16 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += hb_len_of (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
{
|
||||
HBUINT24 *p = (HBUINT24 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += hb_len_of (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
{
|
||||
HBUINT32 *p = (HBUINT32 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += hb_len_of (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
assert (offset == data_size + 1);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
static unsigned total_size (const Iterable &iterable, unsigned *data_size = nullptr, unsigned min_off_size = 0)
|
||||
{
|
||||
auto it = + hb_iter (iterable);
|
||||
if (!it)
|
||||
{
|
||||
if (data_size) *data_size = 0;
|
||||
return min_size;
|
||||
}
|
||||
|
||||
unsigned total = 0;
|
||||
for (const auto &_ : +it)
|
||||
total += hb_len_of (_);
|
||||
|
||||
if (data_size) *data_size = total;
|
||||
|
||||
unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
|
||||
off_size = hb_max(min_off_size, off_size);
|
||||
|
||||
return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total;
|
||||
}
|
||||
|
||||
void set_offset_at (unsigned int index, unsigned int offset)
|
||||
{
|
||||
assert (index <= count);
|
||||
|
||||
unsigned int size = offSize;
|
||||
const HBUINT8 *p = offsets;
|
||||
switch (size)
|
||||
{
|
||||
case 1: ((HBUINT8 *) p)[index] = offset; break;
|
||||
case 2: ((HBUINT16 *) p)[index] = offset; break;
|
||||
case 3: ((HBUINT24 *) p)[index] = offset; break;
|
||||
case 4: ((HBUINT32 *) p)[index] = offset; break;
|
||||
default: return;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
unsigned int offset_at (unsigned int index) const
|
||||
{
|
||||
assert (index <= count);
|
||||
|
||||
unsigned int size = offSize;
|
||||
const HBUINT8 *p = offsets;
|
||||
switch (size)
|
||||
{
|
||||
case 1: return ((HBUINT8 *) p)[index];
|
||||
case 2: return ((HBUINT16 *) p)[index];
|
||||
case 3: return ((HBUINT24 *) p)[index];
|
||||
case 4: return ((HBUINT32 *) p)[index];
|
||||
default: return 0;
|
||||
}
|
||||
}
|
||||
|
||||
const unsigned char *data_base () const
|
||||
{ return (const unsigned char *) this + min_size + offSize.static_size - 1 + offset_array_size (); }
|
||||
public:
|
||||
|
||||
hb_ubytes_t operator [] (unsigned int index) const
|
||||
{
|
||||
if (unlikely (index >= count)) return hb_ubytes_t ();
|
||||
_hb_compiler_memory_r_barrier ();
|
||||
unsigned offset0 = offset_at (index);
|
||||
unsigned offset1 = offset_at (index + 1);
|
||||
if (unlikely (offset1 < offset0 || offset1 > offset_at (count)))
|
||||
return hb_ubytes_t ();
|
||||
return hb_ubytes_t (data_base () + offset0, offset1 - offset0);
|
||||
}
|
||||
|
||||
unsigned int get_size () const
|
||||
{
|
||||
if (count)
|
||||
return min_size + offSize.static_size + offset_array_size () + (offset_at (count) - 1);
|
||||
return min_size; /* empty CFFIndex contains count only */
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely (c->check_struct (this) &&
|
||||
hb_barrier () &&
|
||||
(count == 0 || /* empty INDEX */
|
||||
(count < count + 1u &&
|
||||
c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 &&
|
||||
c->check_array (offsets, offSize, count + 1u) &&
|
||||
c->check_range (data_base (), offset_at (count))))));
|
||||
}
|
||||
|
||||
public:
|
||||
COUNT count; /* Number of object data. Note there are (count+1) offsets */
|
||||
private:
|
||||
HBUINT8 offSize; /* The byte size of each offset in the offsets array. */
|
||||
HBUINT8 offsets[HB_VAR_ARRAY];
|
||||
/* The array of (count + 1) offsets into objects array (1-base). */
|
||||
/* HBUINT8 data[HB_VAR_ARRAY]; Object data */
|
||||
public:
|
||||
DEFINE_SIZE_MIN (COUNT::static_size);
|
||||
};
|
||||
typedef CFFIndex<HBUINT16> CFF1Index;
|
||||
typedef CFFIndex<HBUINT32> CFF2Index;
|
||||
|
||||
|
||||
/* TupleValues */
|
||||
struct TupleValues
|
||||
{
|
||||
enum packed_value_flag_t
|
||||
{
|
||||
VALUES_ARE_ZEROS = 0x80,
|
||||
VALUES_ARE_BYTES = 0x00,
|
||||
VALUES_ARE_WORDS = 0x40,
|
||||
VALUES_ARE_LONGS = 0xC0,
|
||||
VALUES_SIZE_MASK = 0xC0,
|
||||
VALUE_RUN_COUNT_MASK = 0x3F
|
||||
};
|
||||
|
||||
static unsigned compile (hb_array_t<const int> values, /* IN */
|
||||
hb_array_t<char> encoded_bytes /* OUT */)
|
||||
{
|
||||
unsigned num_values = values.length;
|
||||
unsigned encoded_len = 0;
|
||||
unsigned i = 0;
|
||||
while (i < num_values)
|
||||
{
|
||||
int val = values.arrayZ[i];
|
||||
if (val == 0)
|
||||
encoded_len += encode_value_run_as_zeroes (i, encoded_bytes.sub_array (encoded_len), values);
|
||||
else if (val >= -128 && val <= 127)
|
||||
encoded_len += encode_value_run_as_bytes (i, encoded_bytes.sub_array (encoded_len), values);
|
||||
else if (val >= -32768 && val <= 32767)
|
||||
encoded_len += encode_value_run_as_words (i, encoded_bytes.sub_array (encoded_len), values);
|
||||
else
|
||||
encoded_len += encode_value_run_as_longs (i, encoded_bytes.sub_array (encoded_len), values);
|
||||
}
|
||||
return encoded_len;
|
||||
}
|
||||
|
||||
static unsigned encode_value_run_as_zeroes (unsigned& i,
|
||||
hb_array_t<char> encoded_bytes,
|
||||
hb_array_t<const int> values)
|
||||
{
|
||||
unsigned num_values = values.length;
|
||||
unsigned run_length = 0;
|
||||
auto it = encoded_bytes.iter ();
|
||||
unsigned encoded_len = 0;
|
||||
while (i < num_values && values.arrayZ[i] == 0)
|
||||
{
|
||||
i++;
|
||||
run_length++;
|
||||
}
|
||||
|
||||
while (run_length >= 64)
|
||||
{
|
||||
*it++ = char (VALUES_ARE_ZEROS | 63);
|
||||
run_length -= 64;
|
||||
encoded_len++;
|
||||
}
|
||||
|
||||
if (run_length)
|
||||
{
|
||||
*it++ = char (VALUES_ARE_ZEROS | (run_length - 1));
|
||||
encoded_len++;
|
||||
}
|
||||
return encoded_len;
|
||||
}
|
||||
|
||||
static unsigned encode_value_run_as_bytes (unsigned &i,
|
||||
hb_array_t<char> encoded_bytes,
|
||||
hb_array_t<const int> values)
|
||||
{
|
||||
unsigned start = i;
|
||||
unsigned num_values = values.length;
|
||||
while (i < num_values)
|
||||
{
|
||||
int val = values.arrayZ[i];
|
||||
if (val > 127 || val < -128)
|
||||
break;
|
||||
|
||||
/* from fonttools: if there're 2 or more zeros in a sequence,
|
||||
* it is better to start a new run to save bytes. */
|
||||
if (val == 0 && i + 1 < num_values && values.arrayZ[i+1] == 0)
|
||||
break;
|
||||
|
||||
i++;
|
||||
}
|
||||
unsigned run_length = i - start;
|
||||
|
||||
unsigned encoded_len = 0;
|
||||
auto it = encoded_bytes.iter ();
|
||||
|
||||
while (run_length >= 64)
|
||||
{
|
||||
*it++ = (VALUES_ARE_BYTES | 63);
|
||||
encoded_len++;
|
||||
|
||||
for (unsigned j = 0; j < 64; j++)
|
||||
{
|
||||
*it++ = static_cast<char> (values.arrayZ[start + j]);
|
||||
encoded_len++;
|
||||
}
|
||||
|
||||
start += 64;
|
||||
run_length -= 64;
|
||||
}
|
||||
|
||||
if (run_length)
|
||||
{
|
||||
*it++ = (VALUES_ARE_BYTES | (run_length - 1));
|
||||
encoded_len++;
|
||||
|
||||
while (start < i)
|
||||
{
|
||||
*it++ = static_cast<char> (values.arrayZ[start++]);
|
||||
encoded_len++;
|
||||
}
|
||||
}
|
||||
|
||||
return encoded_len;
|
||||
}
|
||||
|
||||
static unsigned encode_value_run_as_words (unsigned &i,
|
||||
hb_array_t<char> encoded_bytes,
|
||||
hb_array_t<const int> values)
|
||||
{
|
||||
unsigned start = i;
|
||||
unsigned num_values = values.length;
|
||||
while (i < num_values)
|
||||
{
|
||||
int val = values.arrayZ[i];
|
||||
|
||||
/* start a new run for a single zero value*/
|
||||
if (val == 0) break;
|
||||
|
||||
/* from fonttools: continue word-encoded run if there's only one
|
||||
* single value in the range [-128, 127] because it is more compact.
|
||||
* Only start a new run when there're 2 continuous such values. */
|
||||
if (val >= -128 && val <= 127 &&
|
||||
i + 1 < num_values &&
|
||||
values.arrayZ[i+1] >= -128 && values.arrayZ[i+1] <= 127)
|
||||
break;
|
||||
|
||||
i++;
|
||||
}
|
||||
|
||||
unsigned run_length = i - start;
|
||||
auto it = encoded_bytes.iter ();
|
||||
unsigned encoded_len = 0;
|
||||
while (run_length >= 64)
|
||||
{
|
||||
*it++ = (VALUES_ARE_WORDS | 63);
|
||||
encoded_len++;
|
||||
|
||||
for (unsigned j = 0; j < 64; j++)
|
||||
{
|
||||
int16_t value_val = values.arrayZ[start + j];
|
||||
*it++ = static_cast<char> (value_val >> 8);
|
||||
*it++ = static_cast<char> (value_val & 0xFF);
|
||||
|
||||
encoded_len += 2;
|
||||
}
|
||||
|
||||
start += 64;
|
||||
run_length -= 64;
|
||||
}
|
||||
|
||||
if (run_length)
|
||||
{
|
||||
*it++ = (VALUES_ARE_WORDS | (run_length - 1));
|
||||
encoded_len++;
|
||||
while (start < i)
|
||||
{
|
||||
int16_t value_val = values.arrayZ[start++];
|
||||
*it++ = static_cast<char> (value_val >> 8);
|
||||
*it++ = static_cast<char> (value_val & 0xFF);
|
||||
|
||||
encoded_len += 2;
|
||||
}
|
||||
}
|
||||
return encoded_len;
|
||||
}
|
||||
|
||||
static unsigned encode_value_run_as_longs (unsigned &i,
|
||||
hb_array_t<char> encoded_bytes,
|
||||
hb_array_t<const int> values)
|
||||
{
|
||||
unsigned start = i;
|
||||
unsigned num_values = values.length;
|
||||
while (i < num_values)
|
||||
{
|
||||
int val = values.arrayZ[i];
|
||||
|
||||
if (val >= -32768 && val <= 32767)
|
||||
break;
|
||||
|
||||
i++;
|
||||
}
|
||||
|
||||
unsigned run_length = i - start;
|
||||
auto it = encoded_bytes.iter ();
|
||||
unsigned encoded_len = 0;
|
||||
while (run_length >= 64)
|
||||
{
|
||||
*it++ = (VALUES_ARE_LONGS | 63);
|
||||
encoded_len++;
|
||||
|
||||
for (unsigned j = 0; j < 64; j++)
|
||||
{
|
||||
int32_t value_val = values.arrayZ[start + j];
|
||||
*it++ = static_cast<char> (value_val >> 24);
|
||||
*it++ = static_cast<char> (value_val >> 16);
|
||||
*it++ = static_cast<char> (value_val >> 8);
|
||||
*it++ = static_cast<char> (value_val & 0xFF);
|
||||
|
||||
encoded_len += 4;
|
||||
}
|
||||
|
||||
start += 64;
|
||||
run_length -= 64;
|
||||
}
|
||||
|
||||
if (run_length)
|
||||
{
|
||||
*it++ = (VALUES_ARE_LONGS | (run_length - 1));
|
||||
encoded_len++;
|
||||
while (start < i)
|
||||
{
|
||||
int32_t value_val = values.arrayZ[start++];
|
||||
*it++ = static_cast<char> (value_val >> 24);
|
||||
*it++ = static_cast<char> (value_val >> 16);
|
||||
*it++ = static_cast<char> (value_val >> 8);
|
||||
*it++ = static_cast<char> (value_val & 0xFF);
|
||||
|
||||
encoded_len += 4;
|
||||
}
|
||||
}
|
||||
return encoded_len;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static bool decompile (const HBUINT8 *&p /* IN/OUT */,
|
||||
hb_vector_t<T> &values /* IN/OUT */,
|
||||
const HBUINT8 *end,
|
||||
bool consume_all = false)
|
||||
{
|
||||
unsigned i = 0;
|
||||
unsigned count = consume_all ? UINT_MAX : values.length;
|
||||
if (consume_all)
|
||||
values.alloc ((end - p) / 2);
|
||||
while (i < count)
|
||||
{
|
||||
if (unlikely (p + 1 > end)) return consume_all;
|
||||
unsigned control = *p++;
|
||||
unsigned run_count = (control & VALUE_RUN_COUNT_MASK) + 1;
|
||||
if (consume_all)
|
||||
{
|
||||
if (unlikely (!values.resize (values.length + run_count, false)))
|
||||
return false;
|
||||
}
|
||||
unsigned stop = i + run_count;
|
||||
if (unlikely (stop > count)) return false;
|
||||
if ((control & VALUES_SIZE_MASK) == VALUES_ARE_ZEROS)
|
||||
{
|
||||
for (; i < stop; i++)
|
||||
values.arrayZ[i] = 0;
|
||||
}
|
||||
else if ((control & VALUES_SIZE_MASK) == VALUES_ARE_WORDS)
|
||||
{
|
||||
if (unlikely (p + run_count * HBINT16::static_size > end)) return false;
|
||||
for (; i < stop; i++)
|
||||
{
|
||||
values.arrayZ[i] = * (const HBINT16 *) p;
|
||||
p += HBINT16::static_size;
|
||||
}
|
||||
}
|
||||
else if ((control & VALUES_SIZE_MASK) == VALUES_ARE_LONGS)
|
||||
{
|
||||
if (unlikely (p + run_count * HBINT32::static_size > end)) return false;
|
||||
for (; i < stop; i++)
|
||||
{
|
||||
values.arrayZ[i] = * (const HBINT32 *) p;
|
||||
p += HBINT32::static_size;
|
||||
}
|
||||
}
|
||||
else if ((control & VALUES_SIZE_MASK) == VALUES_ARE_BYTES)
|
||||
{
|
||||
if (unlikely (p + run_count > end)) return false;
|
||||
for (; i < stop; i++)
|
||||
{
|
||||
values.arrayZ[i] = * (const HBINT8 *) p++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
struct iter_t : hb_iter_with_fallback_t<iter_t, int>
|
||||
{
|
||||
iter_t (const unsigned char *p_, unsigned len_)
|
||||
: p (p_), end (p_ + len_)
|
||||
{ if (ensure_run ()) read_value (); }
|
||||
|
||||
private:
|
||||
const unsigned char *p;
|
||||
const unsigned char * const end;
|
||||
int current_value = 0;
|
||||
signed run_count = 0;
|
||||
unsigned width = 0;
|
||||
|
||||
bool ensure_run ()
|
||||
{
|
||||
if (likely (run_count > 0)) return true;
|
||||
|
||||
if (unlikely (p >= end))
|
||||
{
|
||||
run_count = 0;
|
||||
current_value = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
unsigned control = *p++;
|
||||
run_count = (control & VALUE_RUN_COUNT_MASK) + 1;
|
||||
width = control & VALUES_SIZE_MASK;
|
||||
switch (width)
|
||||
{
|
||||
case VALUES_ARE_ZEROS: width = 0; break;
|
||||
case VALUES_ARE_BYTES: width = HBINT8::static_size; break;
|
||||
case VALUES_ARE_WORDS: width = HBINT16::static_size; break;
|
||||
case VALUES_ARE_LONGS: width = HBINT32::static_size; break;
|
||||
default: assert (false);
|
||||
}
|
||||
|
||||
if (unlikely (p + run_count * width > end))
|
||||
{
|
||||
run_count = 0;
|
||||
current_value = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
void read_value ()
|
||||
{
|
||||
switch (width)
|
||||
{
|
||||
case 0: current_value = 0; break;
|
||||
case 1: current_value = * (const HBINT8 *) p; break;
|
||||
case 2: current_value = * (const HBINT16 *) p; break;
|
||||
case 4: current_value = * (const HBINT32 *) p; break;
|
||||
}
|
||||
p += width;
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
typedef int __item_t__;
|
||||
__item_t__ __item__ () const
|
||||
{ return current_value; }
|
||||
|
||||
bool __more__ () const { return run_count || p < end; }
|
||||
void __next__ ()
|
||||
{
|
||||
run_count--;
|
||||
if (unlikely (!ensure_run ()))
|
||||
return;
|
||||
read_value ();
|
||||
}
|
||||
void __forward__ (unsigned n)
|
||||
{
|
||||
if (unlikely (!ensure_run ()))
|
||||
return;
|
||||
while (n)
|
||||
{
|
||||
unsigned i = hb_min (n, (unsigned) run_count);
|
||||
run_count -= i;
|
||||
n -= i;
|
||||
p += (i - 1) * width;
|
||||
if (unlikely (!ensure_run ()))
|
||||
return;
|
||||
read_value ();
|
||||
}
|
||||
}
|
||||
bool operator != (const iter_t& o) const
|
||||
{ return p != o.p || run_count != o.run_count; }
|
||||
iter_t __end__ () const
|
||||
{
|
||||
iter_t it (end, 0);
|
||||
return it;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
struct TupleList : CFF2Index
|
||||
{
|
||||
TupleValues::iter_t operator [] (unsigned i) const
|
||||
{
|
||||
auto bytes = CFF2Index::operator [] (i);
|
||||
return TupleValues::iter_t (bytes.arrayZ, bytes.length);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
} /* namespace OT */
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -41,10 +41,21 @@ using namespace OT;
|
|||
using objidx_t = hb_serialize_context_t::objidx_t;
|
||||
using whence_t = hb_serialize_context_t::whence_t;
|
||||
|
||||
/* utility macro */
|
||||
template<typename Type>
|
||||
static inline const Type& StructAtOffsetOrNull (const void *P, unsigned int offset)
|
||||
{ return offset ? StructAtOffset<Type> (P, offset) : Null (Type); }
|
||||
/* CFF offsets can technically be negative */
|
||||
template<typename Type, typename ...Ts>
|
||||
static inline const Type& StructAtOffsetOrNull (const void *P, int offset, hb_sanitize_context_t &sc, Ts&&... ds)
|
||||
{
|
||||
if (!offset) return Null (Type);
|
||||
|
||||
const char *p = (const char *) P + offset;
|
||||
if (!sc.check_point (p)) return Null (Type);
|
||||
|
||||
const Type &obj = *reinterpret_cast<const Type *> (p);
|
||||
if (!obj.sanitize (&sc, std::forward<Ts> (ds)...)) return Null (Type);
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
||||
struct code_pair_t
|
||||
{
|
||||
|
|
@ -57,247 +68,6 @@ using str_buff_t = hb_vector_t<unsigned char>;
|
|||
using str_buff_vec_t = hb_vector_t<str_buff_t>;
|
||||
using glyph_to_sid_map_t = hb_vector_t<code_pair_t>;
|
||||
|
||||
struct length_f_t
|
||||
{
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
unsigned operator () (const Iterable &_) const { return hb_len (hb_iter (_)); }
|
||||
|
||||
unsigned operator () (unsigned _) const { return _; }
|
||||
}
|
||||
HB_FUNCOBJ (length_f);
|
||||
|
||||
/* CFF INDEX */
|
||||
template <typename COUNT>
|
||||
struct CFFIndex
|
||||
{
|
||||
unsigned int offset_array_size () const
|
||||
{ return offSize * (count + 1); }
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
const Iterable &iterable,
|
||||
const unsigned *p_data_size = nullptr,
|
||||
unsigned min_off_size = 0)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned data_size;
|
||||
if (p_data_size)
|
||||
data_size = *p_data_size;
|
||||
else
|
||||
total_size (iterable, &data_size);
|
||||
|
||||
auto it = hb_iter (iterable);
|
||||
if (unlikely (!serialize_header (c, +it, data_size, min_off_size))) return_trace (false);
|
||||
unsigned char *ret = c->allocate_size<unsigned char> (data_size, false);
|
||||
if (unlikely (!ret)) return_trace (false);
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
unsigned len = _.length;
|
||||
if (!len)
|
||||
continue;
|
||||
if (len <= 1)
|
||||
{
|
||||
*ret++ = *_.arrayZ;
|
||||
continue;
|
||||
}
|
||||
hb_memcpy (ret, _.arrayZ, len);
|
||||
ret += len;
|
||||
}
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize_header (hb_serialize_context_t *c,
|
||||
Iterator it,
|
||||
unsigned data_size,
|
||||
unsigned min_off_size = 0)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
unsigned off_size = (hb_bit_storage (data_size + 1) + 7) / 8;
|
||||
off_size = hb_max(min_off_size, off_size);
|
||||
|
||||
/* serialize CFFIndex header */
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
this->count = hb_len (it);
|
||||
if (!this->count) return_trace (true);
|
||||
if (unlikely (!c->extend (this->offSize))) return_trace (false);
|
||||
this->offSize = off_size;
|
||||
if (unlikely (!c->allocate_size<HBUINT8> (off_size * (this->count + 1), false)))
|
||||
return_trace (false);
|
||||
|
||||
/* serialize indices */
|
||||
unsigned int offset = 1;
|
||||
if (HB_OPTIMIZE_SIZE_VAL)
|
||||
{
|
||||
unsigned int i = 0;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
set_offset_at (i++, offset);
|
||||
offset += length_f (_);
|
||||
}
|
||||
set_offset_at (i, offset);
|
||||
}
|
||||
else
|
||||
switch (off_size)
|
||||
{
|
||||
case 1:
|
||||
{
|
||||
HBUINT8 *p = (HBUINT8 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += length_f (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
{
|
||||
HBUINT16 *p = (HBUINT16 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += length_f (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
{
|
||||
HBUINT24 *p = (HBUINT24 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += length_f (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
{
|
||||
HBUINT32 *p = (HBUINT32 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += length_f (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
assert (offset == data_size + 1);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
static unsigned total_size (const Iterable &iterable, unsigned *data_size = nullptr, unsigned min_off_size = 0)
|
||||
{
|
||||
auto it = + hb_iter (iterable);
|
||||
if (!it)
|
||||
{
|
||||
if (data_size) *data_size = 0;
|
||||
return min_size;
|
||||
}
|
||||
|
||||
unsigned total = 0;
|
||||
for (const auto &_ : +it)
|
||||
total += length_f (_);
|
||||
|
||||
if (data_size) *data_size = total;
|
||||
|
||||
unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
|
||||
off_size = hb_max(min_off_size, off_size);
|
||||
|
||||
return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total;
|
||||
}
|
||||
|
||||
void set_offset_at (unsigned int index, unsigned int offset)
|
||||
{
|
||||
assert (index <= count);
|
||||
|
||||
unsigned int size = offSize;
|
||||
const HBUINT8 *p = offsets;
|
||||
switch (size)
|
||||
{
|
||||
case 1: ((HBUINT8 *) p)[index] = offset; break;
|
||||
case 2: ((HBUINT16 *) p)[index] = offset; break;
|
||||
case 3: ((HBUINT24 *) p)[index] = offset; break;
|
||||
case 4: ((HBUINT32 *) p)[index] = offset; break;
|
||||
default: return;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
unsigned int offset_at (unsigned int index) const
|
||||
{
|
||||
assert (index <= count);
|
||||
|
||||
unsigned int size = offSize;
|
||||
const HBUINT8 *p = offsets;
|
||||
switch (size)
|
||||
{
|
||||
case 1: return ((HBUINT8 *) p)[index];
|
||||
case 2: return ((HBUINT16 *) p)[index];
|
||||
case 3: return ((HBUINT24 *) p)[index];
|
||||
case 4: return ((HBUINT32 *) p)[index];
|
||||
default: return 0;
|
||||
}
|
||||
}
|
||||
|
||||
const unsigned char *data_base () const
|
||||
{ return (const unsigned char *) this + min_size + offSize.static_size - 1 + offset_array_size (); }
|
||||
public:
|
||||
|
||||
hb_ubytes_t operator [] (unsigned int index) const
|
||||
{
|
||||
if (unlikely (index >= count)) return hb_ubytes_t ();
|
||||
_hb_compiler_memory_r_barrier ();
|
||||
unsigned offset0 = offset_at (index);
|
||||
unsigned offset1 = offset_at (index + 1);
|
||||
if (unlikely (offset1 < offset0 || offset1 > offset_at (count)))
|
||||
return hb_ubytes_t ();
|
||||
return hb_ubytes_t (data_base () + offset0, offset1 - offset0);
|
||||
}
|
||||
|
||||
unsigned int get_size () const
|
||||
{
|
||||
if (count)
|
||||
return min_size + offSize.static_size + offset_array_size () + (offset_at (count) - 1);
|
||||
return min_size; /* empty CFFIndex contains count only */
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely (c->check_struct (this) &&
|
||||
hb_barrier () &&
|
||||
(count == 0 || /* empty INDEX */
|
||||
(count < count + 1u &&
|
||||
hb_barrier () &&
|
||||
c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 &&
|
||||
c->check_array (offsets, offSize, count + 1u) &&
|
||||
c->check_array ((const HBUINT8*) data_base (), 1, offset_at (count))))));
|
||||
}
|
||||
|
||||
public:
|
||||
COUNT count; /* Number of object data. Note there are (count+1) offsets */
|
||||
private:
|
||||
HBUINT8 offSize; /* The byte size of each offset in the offsets array. */
|
||||
HBUINT8 offsets[HB_VAR_ARRAY];
|
||||
/* The array of (count + 1) offsets into objects array (1-base). */
|
||||
/* HBUINT8 data[HB_VAR_ARRAY]; Object data */
|
||||
public:
|
||||
DEFINE_SIZE_MIN (COUNT::static_size);
|
||||
};
|
||||
|
||||
/* Top Dict, Font Dict, Private Dict */
|
||||
struct Dict : UnsizedByteStr
|
||||
{
|
||||
|
|
|
|||
|
|
@ -51,9 +51,6 @@ namespace CFF {
|
|||
enum EncodingID { StandardEncoding = 0, ExpertEncoding = 1 };
|
||||
enum CharsetID { ISOAdobeCharset = 0, ExpertCharset = 1, ExpertSubsetCharset = 2 };
|
||||
|
||||
typedef CFFIndex<HBUINT16> CFF1Index;
|
||||
|
||||
typedef CFFIndex<HBUINT16> CFF1Index;
|
||||
typedef CFF1Index CFF1CharStrings;
|
||||
typedef Subrs<HBUINT16> CFF1Subrs;
|
||||
|
||||
|
|
@ -763,9 +760,9 @@ struct cff1_top_dict_values_t : top_dict_values_t<cff1_top_dict_val_t>
|
|||
unsigned int ros_supplement;
|
||||
unsigned int cidCount;
|
||||
|
||||
unsigned int EncodingOffset;
|
||||
unsigned int CharsetOffset;
|
||||
unsigned int FDSelectOffset;
|
||||
int EncodingOffset;
|
||||
int CharsetOffset;
|
||||
int FDSelectOffset;
|
||||
table_info_t privateDictInfo;
|
||||
};
|
||||
|
||||
|
|
@ -821,24 +818,24 @@ struct cff1_top_dict_opset_t : top_dict_opset_t<cff1_top_dict_val_t>
|
|||
break;
|
||||
|
||||
case OpCode_Encoding:
|
||||
dictval.EncodingOffset = env.argStack.pop_uint ();
|
||||
dictval.EncodingOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
if (unlikely (dictval.EncodingOffset == 0)) return;
|
||||
break;
|
||||
|
||||
case OpCode_charset:
|
||||
dictval.CharsetOffset = env.argStack.pop_uint ();
|
||||
dictval.CharsetOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
if (unlikely (dictval.CharsetOffset == 0)) return;
|
||||
break;
|
||||
|
||||
case OpCode_FDSelect:
|
||||
dictval.FDSelectOffset = env.argStack.pop_uint ();
|
||||
dictval.FDSelectOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
case OpCode_Private:
|
||||
dictval.privateDictInfo.offset = env.argStack.pop_uint ();
|
||||
dictval.privateDictInfo.offset = env.argStack.pop_int ();
|
||||
dictval.privateDictInfo.size = env.argStack.pop_uint ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
|
@ -913,7 +910,7 @@ struct cff1_private_dict_values_base_t : dict_values_t<VAL>
|
|||
}
|
||||
void fini () { dict_values_t<VAL>::fini (); }
|
||||
|
||||
unsigned int subrsOffset;
|
||||
int subrsOffset;
|
||||
const CFF1Subrs *localSubrs;
|
||||
};
|
||||
|
||||
|
|
@ -948,7 +945,7 @@ struct cff1_private_dict_opset_t : dict_opset_t
|
|||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_Subrs:
|
||||
dictval.subrsOffset = env.argStack.pop_uint ();
|
||||
dictval.subrsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
|
|
@ -990,7 +987,7 @@ struct cff1_private_dict_opset_subset_t : dict_opset_t
|
|||
break;
|
||||
|
||||
case OpCode_Subrs:
|
||||
dictval.subrsOffset = env.argStack.pop_uint ();
|
||||
dictval.subrsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
|
|
@ -1090,8 +1087,8 @@ struct cff1
|
|||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
topDictIndex = &StructAtOffset<CFF1TopDictIndex> (nameIndex, nameIndex->get_size ());
|
||||
if ((topDictIndex == &Null (CFF1TopDictIndex)) || !topDictIndex->sanitize (&sc) || (topDictIndex->count == 0))
|
||||
topDictIndex = &StructAtOffsetOrNull<CFF1TopDictIndex> (nameIndex, nameIndex->get_size (), sc);
|
||||
if (topDictIndex == &Null (CFF1TopDictIndex) || (topDictIndex->count == 0))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
|
|
@ -1108,20 +1105,18 @@ struct cff1
|
|||
charset = &Null (Charset);
|
||||
else
|
||||
{
|
||||
charset = &StructAtOffsetOrNull<Charset> (cff, topDict.CharsetOffset);
|
||||
if (unlikely ((charset == &Null (Charset)) || !charset->sanitize (&sc, &num_charset_entries))) goto fail;
|
||||
hb_barrier ();
|
||||
charset = &StructAtOffsetOrNull<Charset> (cff, topDict.CharsetOffset, sc, &num_charset_entries);
|
||||
if (unlikely (charset == &Null (Charset))) goto fail;
|
||||
}
|
||||
|
||||
fdCount = 1;
|
||||
if (is_CID ())
|
||||
{
|
||||
fdArray = &StructAtOffsetOrNull<CFF1FDArray> (cff, topDict.FDArrayOffset);
|
||||
fdSelect = &StructAtOffsetOrNull<CFF1FDSelect> (cff, topDict.FDSelectOffset);
|
||||
if (unlikely ((fdArray == &Null (CFF1FDArray)) || !fdArray->sanitize (&sc) ||
|
||||
(fdSelect == &Null (CFF1FDSelect)) || !fdSelect->sanitize (&sc, fdArray->count)))
|
||||
fdArray = &StructAtOffsetOrNull<CFF1FDArray> (cff, topDict.FDArrayOffset, sc);
|
||||
fdSelect = &StructAtOffsetOrNull<CFF1FDSelect> (cff, topDict.FDSelectOffset, sc, fdArray->count);
|
||||
if (unlikely (fdArray == &Null (CFF1FDArray) ||
|
||||
fdSelect == &Null (CFF1FDSelect)))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
fdCount = fdArray->count;
|
||||
}
|
||||
|
|
@ -1140,27 +1135,19 @@ struct cff1
|
|||
{
|
||||
if (!is_predef_encoding ())
|
||||
{
|
||||
encoding = &StructAtOffsetOrNull<Encoding> (cff, topDict.EncodingOffset);
|
||||
if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) goto fail;
|
||||
hb_barrier ();
|
||||
encoding = &StructAtOffsetOrNull<Encoding> (cff, topDict.EncodingOffset, sc);
|
||||
if (unlikely (encoding == &Null (Encoding))) goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
stringIndex = &StructAtOffset<CFF1StringIndex> (topDictIndex, topDictIndex->get_size ());
|
||||
if ((stringIndex == &Null (CFF1StringIndex)) || !stringIndex->sanitize (&sc))
|
||||
stringIndex = &StructAtOffsetOrNull<CFF1StringIndex> (topDictIndex, topDictIndex->get_size (), sc);
|
||||
if (stringIndex == &Null (CFF1StringIndex))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
globalSubrs = &StructAtOffset<CFF1Subrs> (stringIndex, stringIndex->get_size ());
|
||||
if ((globalSubrs != &Null (CFF1Subrs)) && !globalSubrs->sanitize (&sc))
|
||||
globalSubrs = &StructAtOffsetOrNull<CFF1Subrs> (stringIndex, stringIndex->get_size (), sc);
|
||||
charStrings = &StructAtOffsetOrNull<CFF1CharStrings> (cff, topDict.charStringsOffset, sc);
|
||||
if (charStrings == &Null (CFF1CharStrings))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
charStrings = &StructAtOffsetOrNull<CFF1CharStrings> (cff, topDict.charStringsOffset);
|
||||
|
||||
if ((charStrings == &Null (CFF1CharStrings)) || unlikely (!charStrings->sanitize (&sc)))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
num_glyphs = charStrings->count;
|
||||
if (num_glyphs != sc.get_num_glyphs ())
|
||||
|
|
@ -1188,19 +1175,14 @@ struct cff1
|
|||
font->init ();
|
||||
if (unlikely (!font_interp.interpret (*font))) goto fail;
|
||||
PRIVDICTVAL *priv = &privateDicts[i];
|
||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||
hb_barrier ();
|
||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff, font->privateDictInfo.offset, sc, font->privateDictInfo.size).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (privDictStr == (const unsigned char *) &Null (UnsizedByteStr))) goto fail;
|
||||
num_interp_env_t env2 (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env2);
|
||||
priv->init ();
|
||||
if (unlikely (!priv_interp.interpret (*priv))) goto fail;
|
||||
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset);
|
||||
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
||||
unlikely (!priv->localSubrs->sanitize (&sc)))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset, sc);
|
||||
}
|
||||
}
|
||||
else /* non-CID */
|
||||
|
|
@ -1208,18 +1190,14 @@ struct cff1
|
|||
cff1_top_dict_values_t *font = &topDict;
|
||||
PRIVDICTVAL *priv = &privateDicts[0];
|
||||
|
||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||
hb_barrier ();
|
||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff, font->privateDictInfo.offset, sc, font->privateDictInfo.size).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (privDictStr == (const unsigned char *) &Null (UnsizedByteStr))) goto fail;
|
||||
num_interp_env_t env (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env);
|
||||
priv->init ();
|
||||
if (unlikely (!priv_interp.interpret (*priv))) goto fail;
|
||||
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset);
|
||||
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
||||
unlikely (!priv->localSubrs->sanitize (&sc)))
|
||||
goto fail;
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset, sc);
|
||||
hb_barrier ();
|
||||
}
|
||||
|
||||
|
|
@ -1437,7 +1415,7 @@ struct cff1
|
|||
hb_sorted_vector_t<gname_t> *names = glyph_names.get_acquire ();
|
||||
if (unlikely (!names))
|
||||
{
|
||||
names = (hb_sorted_vector_t<gname_t> *) hb_calloc (sizeof (hb_sorted_vector_t<gname_t>), 1);
|
||||
names = (hb_sorted_vector_t<gname_t> *) hb_calloc (1, sizeof (hb_sorted_vector_t<gname_t>));
|
||||
if (likely (names))
|
||||
{
|
||||
names->init ();
|
||||
|
|
|
|||
|
|
@ -202,6 +202,11 @@ struct cff2_path_procs_path_t : path_procs_t<cff2_path_procs_path_t, cff2_cs_int
|
|||
struct cff2_cs_opset_path_t : cff2_cs_opset_t<cff2_cs_opset_path_t, cff2_path_param_t, number_t, cff2_path_procs_path_t> {};
|
||||
|
||||
bool OT::cff2::accelerator_t::get_path (hb_font_t *font, hb_codepoint_t glyph, hb_draw_session_t &draw_session) const
|
||||
{
|
||||
return get_path_at (font, glyph, draw_session, hb_array (font->coords, font->num_coords));
|
||||
}
|
||||
|
||||
bool OT::cff2::accelerator_t::get_path_at (hb_font_t *font, hb_codepoint_t glyph, hb_draw_session_t &draw_session, hb_array_t<const int> coords) const
|
||||
{
|
||||
#ifdef HB_NO_OT_FONT_CFF
|
||||
/* XXX Remove check when this code moves to .hh file. */
|
||||
|
|
@ -212,7 +217,7 @@ bool OT::cff2::accelerator_t::get_path (hb_font_t *font, hb_codepoint_t glyph, h
|
|||
|
||||
unsigned int fd = fdSelect->get_fd (glyph);
|
||||
const hb_ubytes_t str = (*charStrings)[glyph];
|
||||
cff2_cs_interp_env_t<number_t> env (str, *this, fd, font->coords, font->num_coords);
|
||||
cff2_cs_interp_env_t<number_t> env (str, *this, fd, coords.arrayZ, coords.length);
|
||||
cff2_cs_interpreter_t<cff2_cs_opset_path_t, cff2_path_param_t, number_t> interp (env);
|
||||
cff2_path_param_t param (font, draw_session);
|
||||
if (unlikely (!interp.interpret (param))) return false;
|
||||
|
|
|
|||
|
|
@ -40,8 +40,6 @@ namespace CFF {
|
|||
*/
|
||||
#define HB_OT_TAG_CFF2 HB_TAG('C','F','F','2')
|
||||
|
||||
typedef CFFIndex<HBUINT32> CFF2Index;
|
||||
|
||||
typedef CFF2Index CFF2CharStrings;
|
||||
typedef Subrs<HBUINT32> CFF2Subrs;
|
||||
|
||||
|
|
@ -111,7 +109,7 @@ struct CFF2FDSelect
|
|||
DEFINE_SIZE_MIN (2);
|
||||
};
|
||||
|
||||
struct CFF2VariationStore
|
||||
struct CFF2ItemVariationStore
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
|
|
@ -122,11 +120,11 @@ struct CFF2VariationStore
|
|||
varStore.sanitize (c));
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c, const CFF2VariationStore *varStore)
|
||||
bool serialize (hb_serialize_context_t *c, const CFF2ItemVariationStore *varStore)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned int size_ = varStore->get_size ();
|
||||
CFF2VariationStore *dest = c->allocate_size<CFF2VariationStore> (size_);
|
||||
CFF2ItemVariationStore *dest = c->allocate_size<CFF2ItemVariationStore> (size_);
|
||||
if (unlikely (!dest)) return_trace (false);
|
||||
hb_memcpy (dest, varStore, size_);
|
||||
return_trace (true);
|
||||
|
|
@ -135,9 +133,9 @@ struct CFF2VariationStore
|
|||
unsigned int get_size () const { return HBUINT16::static_size + size; }
|
||||
|
||||
HBUINT16 size;
|
||||
VariationStore varStore;
|
||||
ItemVariationStore varStore;
|
||||
|
||||
DEFINE_SIZE_MIN (2 + VariationStore::min_size);
|
||||
DEFINE_SIZE_MIN (2 + ItemVariationStore::min_size);
|
||||
};
|
||||
|
||||
struct cff2_top_dict_values_t : top_dict_values_t<>
|
||||
|
|
@ -150,8 +148,8 @@ struct cff2_top_dict_values_t : top_dict_values_t<>
|
|||
}
|
||||
void fini () { top_dict_values_t<>::fini (); }
|
||||
|
||||
unsigned int vstoreOffset;
|
||||
unsigned int FDSelectOffset;
|
||||
int vstoreOffset;
|
||||
int FDSelectOffset;
|
||||
};
|
||||
|
||||
struct cff2_top_dict_opset_t : top_dict_opset_t<>
|
||||
|
|
@ -169,11 +167,11 @@ struct cff2_top_dict_opset_t : top_dict_opset_t<>
|
|||
break;
|
||||
|
||||
case OpCode_vstore:
|
||||
dictval.vstoreOffset = env.argStack.pop_uint ();
|
||||
dictval.vstoreOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_FDSelect:
|
||||
dictval.FDSelectOffset = env.argStack.pop_uint ();
|
||||
dictval.FDSelectOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
|
|
@ -241,7 +239,7 @@ struct cff2_private_dict_values_base_t : dict_values_t<VAL>
|
|||
}
|
||||
void fini () { dict_values_t<VAL>::fini (); }
|
||||
|
||||
unsigned int subrsOffset;
|
||||
int subrsOffset;
|
||||
const CFF2Subrs *localSubrs;
|
||||
unsigned int ivs;
|
||||
};
|
||||
|
|
@ -295,7 +293,7 @@ struct cff2_private_dict_opset_t : dict_opset_t
|
|||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_Subrs:
|
||||
dictval.subrsOffset = env.argStack.pop_uint ();
|
||||
dictval.subrsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_vsindexdict:
|
||||
|
|
@ -344,7 +342,7 @@ struct cff2_private_dict_opset_subset_t : dict_opset_t
|
|||
return;
|
||||
|
||||
case OpCode_Subrs:
|
||||
dictval.subrsOffset = env.argStack.pop_uint ();
|
||||
dictval.subrsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
|
|
@ -426,18 +424,15 @@ struct cff2
|
|||
if (unlikely (!top_interp.interpret (topDict))) goto fail;
|
||||
}
|
||||
|
||||
globalSubrs = &StructAtOffset<CFF2Subrs> (cff2, cff2->topDict + cff2->topDictSize);
|
||||
varStore = &StructAtOffsetOrNull<CFF2VariationStore> (cff2, topDict.vstoreOffset);
|
||||
charStrings = &StructAtOffsetOrNull<CFF2CharStrings> (cff2, topDict.charStringsOffset);
|
||||
fdArray = &StructAtOffsetOrNull<CFF2FDArray> (cff2, topDict.FDArrayOffset);
|
||||
fdSelect = &StructAtOffsetOrNull<CFF2FDSelect> (cff2, topDict.FDSelectOffset);
|
||||
globalSubrs = &StructAtOffsetOrNull<CFF2Subrs> (cff2, cff2->topDict + cff2->topDictSize, sc);
|
||||
varStore = &StructAtOffsetOrNull<CFF2ItemVariationStore> (cff2, topDict.vstoreOffset, sc);
|
||||
charStrings = &StructAtOffsetOrNull<CFF2CharStrings> (cff2, topDict.charStringsOffset, sc);
|
||||
fdArray = &StructAtOffsetOrNull<CFF2FDArray> (cff2, topDict.FDArrayOffset, sc);
|
||||
fdSelect = &StructAtOffsetOrNull<CFF2FDSelect> (cff2, topDict.FDSelectOffset, sc, fdArray->count);
|
||||
|
||||
if (((varStore != &Null (CFF2VariationStore)) && unlikely (!varStore->sanitize (&sc))) ||
|
||||
(charStrings == &Null (CFF2CharStrings)) || unlikely (!charStrings->sanitize (&sc)) ||
|
||||
(globalSubrs == &Null (CFF2Subrs)) || unlikely (!globalSubrs->sanitize (&sc)) ||
|
||||
(fdArray == &Null (CFF2FDArray)) || unlikely (!fdArray->sanitize (&sc)) ||
|
||||
!hb_barrier () ||
|
||||
(((fdSelect != &Null (CFF2FDSelect)) && unlikely (!fdSelect->sanitize (&sc, fdArray->count)))))
|
||||
if (charStrings == &Null (CFF2CharStrings) ||
|
||||
globalSubrs == &Null (CFF2Subrs) ||
|
||||
fdArray == &Null (CFF2FDArray))
|
||||
goto fail;
|
||||
|
||||
num_glyphs = charStrings->count;
|
||||
|
|
@ -462,19 +457,14 @@ struct cff2
|
|||
font->init ();
|
||||
if (unlikely (!font_interp.interpret (*font))) goto fail;
|
||||
|
||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||
hb_barrier ();
|
||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset, sc, font->privateDictInfo.size).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (privDictStr == (const unsigned char *) &Null (UnsizedByteStr))) goto fail;
|
||||
cff2_priv_dict_interp_env_t env2 (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL, cff2_priv_dict_interp_env_t> priv_interp (env2);
|
||||
privateDicts[i].init ();
|
||||
if (unlikely (!priv_interp.interpret (privateDicts[i]))) goto fail;
|
||||
|
||||
privateDicts[i].localSubrs = &StructAtOffsetOrNull<CFF2Subrs> (&privDictStr[0], privateDicts[i].subrsOffset);
|
||||
if (privateDicts[i].localSubrs != &Null (CFF2Subrs) &&
|
||||
unlikely (!privateDicts[i].localSubrs->sanitize (&sc)))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
privateDicts[i].localSubrs = &StructAtOffsetOrNull<CFF2Subrs> (&privDictStr[0], privateDicts[i].subrsOffset, sc);
|
||||
}
|
||||
|
||||
return;
|
||||
|
|
@ -509,7 +499,7 @@ struct cff2
|
|||
hb_blob_t *blob = nullptr;
|
||||
cff2_top_dict_values_t topDict;
|
||||
const CFF2Subrs *globalSubrs = nullptr;
|
||||
const CFF2VariationStore *varStore = nullptr;
|
||||
const CFF2ItemVariationStore *varStore = nullptr;
|
||||
const CFF2CharStrings *charStrings = nullptr;
|
||||
const CFF2FDArray *fdArray = nullptr;
|
||||
const CFF2FDSelect *fdSelect = nullptr;
|
||||
|
|
@ -530,6 +520,7 @@ struct cff2
|
|||
hb_glyph_extents_t *extents) const;
|
||||
HB_INTERNAL bool paint_glyph (hb_font_t *font, hb_codepoint_t glyph, hb_paint_funcs_t *funcs, void *data, hb_color_t foreground) const;
|
||||
HB_INTERNAL bool get_path (hb_font_t *font, hb_codepoint_t glyph, hb_draw_session_t &draw_session) const;
|
||||
HB_INTERNAL bool get_path_at (hb_font_t *font, hb_codepoint_t glyph, hb_draw_session_t &draw_session, hb_array_t<const int> coords) const;
|
||||
};
|
||||
|
||||
struct accelerator_subset_t : accelerator_templ_t<cff2_private_dict_opset_subset_t, cff2_private_dict_values_subset_t>
|
||||
|
|
|
|||
|
|
@ -41,6 +41,30 @@
|
|||
|
||||
namespace OT {
|
||||
|
||||
static inline uint8_t unicode_to_macroman (hb_codepoint_t u)
|
||||
{
|
||||
uint16_t mapping[] = {
|
||||
0x00C4, 0x00C5, 0x00C7, 0x00C9, 0x00D1, 0x00D6, 0x00DC, 0x00E1,
|
||||
0x00E0, 0x00E2, 0x00E4, 0x00E3, 0x00E5, 0x00E7, 0x00E9, 0x00E8,
|
||||
0x00EA, 0x00EB, 0x00ED, 0x00EC, 0x00EE, 0x00EF, 0x00F1, 0x00F3,
|
||||
0x00F2, 0x00F4, 0x00F6, 0x00F5, 0x00FA, 0x00F9, 0x00FB, 0x00FC,
|
||||
0x2020, 0x00B0, 0x00A2, 0x00A3, 0x00A7, 0x2022, 0x00B6, 0x00DF,
|
||||
0x00AE, 0x00A9, 0x2122, 0x00B4, 0x00A8, 0x2260, 0x00C6, 0x00D8,
|
||||
0x221E, 0x00B1, 0x2264, 0x2265, 0x00A5, 0x00B5, 0x2202, 0x2211,
|
||||
0x220F, 0x03C0, 0x222B, 0x00AA, 0x00BA, 0x03A9, 0x00E6, 0x00F8,
|
||||
0x00BF, 0x00A1, 0x00AC, 0x221A, 0x0192, 0x2248, 0x2206, 0x00AB,
|
||||
0x00BB, 0x2026, 0x00A0, 0x00C0, 0x00C3, 0x00D5, 0x0152, 0x0153,
|
||||
0x2013, 0x2014, 0x201C, 0x201D, 0x2018, 0x2019, 0x00F7, 0x25CA,
|
||||
0x00FF, 0x0178, 0x2044, 0x20AC, 0x2039, 0x203A, 0xFB01, 0xFB02,
|
||||
0x2021, 0x00B7, 0x201A, 0x201E, 0x2030, 0x00C2, 0x00CA, 0x00C1,
|
||||
0x00CB, 0x00C8, 0x00CD, 0x00CE, 0x00CF, 0x00CC, 0x00D3, 0x00D4,
|
||||
0xF8FF, 0x00D2, 0x00DA, 0x00DB, 0x00D9, 0x0131, 0x02C6, 0x02DC,
|
||||
0x00AF, 0x02D8, 0x02D9, 0x02DA, 0x00B8, 0x02DD, 0x02DB, 0x02C7
|
||||
};
|
||||
uint16_t *c = hb_bsearch (u, mapping, ARRAY_LENGTH (mapping), sizeof (mapping[0]),
|
||||
_hb_cmp_operator<uint16_t, uint16_t>);
|
||||
return c ? (c - mapping) + 0x7F : 0;
|
||||
}
|
||||
|
||||
struct CmapSubtableFormat0
|
||||
{
|
||||
|
|
@ -1465,8 +1489,11 @@ struct EncodingRecord
|
|||
int ret;
|
||||
ret = platformID.cmp (other.platformID);
|
||||
if (ret) return ret;
|
||||
ret = encodingID.cmp (other.encodingID);
|
||||
if (ret) return ret;
|
||||
if (other.encodingID != 0xFFFF)
|
||||
{
|
||||
ret = encodingID.cmp (other.encodingID);
|
||||
if (ret) return ret;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
@ -1814,9 +1841,13 @@ struct cmap
|
|||
c->plan));
|
||||
}
|
||||
|
||||
const CmapSubtable *find_best_subtable (bool *symbol = nullptr) const
|
||||
const CmapSubtable *find_best_subtable (bool *symbol = nullptr,
|
||||
bool *mac = nullptr,
|
||||
bool *macroman = nullptr) const
|
||||
{
|
||||
if (symbol) *symbol = false;
|
||||
if (mac) *mac = false;
|
||||
if (macroman) *macroman = false;
|
||||
|
||||
const CmapSubtable *subtable;
|
||||
|
||||
|
|
@ -1841,6 +1872,20 @@ struct cmap
|
|||
if ((subtable = this->find_subtable (0, 1))) return subtable;
|
||||
if ((subtable = this->find_subtable (0, 0))) return subtable;
|
||||
|
||||
/* MacRoman subtable. */
|
||||
if ((subtable = this->find_subtable (1, 0)))
|
||||
{
|
||||
if (mac) *mac = true;
|
||||
if (macroman) *macroman = true;
|
||||
return subtable;
|
||||
}
|
||||
/* Any other Mac subtable; we just map ASCII for these. */
|
||||
if ((subtable = this->find_subtable (1, 0xFFFF)))
|
||||
{
|
||||
if (mac) *mac = true;
|
||||
return subtable;
|
||||
}
|
||||
|
||||
/* Meh. */
|
||||
return &Null (CmapSubtable);
|
||||
}
|
||||
|
|
@ -1852,8 +1897,8 @@ struct cmap
|
|||
accelerator_t (hb_face_t *face)
|
||||
{
|
||||
this->table = hb_sanitize_context_t ().reference_table<cmap> (face);
|
||||
bool symbol;
|
||||
this->subtable = table->find_best_subtable (&symbol);
|
||||
bool symbol, mac, macroman;
|
||||
this->subtable = table->find_best_subtable (&symbol, &mac, ¯oman);
|
||||
this->subtable_uvs = &Null (CmapSubtableFormat14);
|
||||
{
|
||||
const CmapSubtable *st = table->find_subtable (0, 5);
|
||||
|
|
@ -1862,6 +1907,7 @@ struct cmap
|
|||
}
|
||||
|
||||
this->get_glyph_data = subtable;
|
||||
#ifndef HB_NO_CMAP_LEGACY_SUBTABLES
|
||||
if (unlikely (symbol))
|
||||
{
|
||||
switch ((unsigned) face->table.OS2->get_font_page ()) {
|
||||
|
|
@ -1881,7 +1927,16 @@ struct cmap
|
|||
break;
|
||||
}
|
||||
}
|
||||
else if (unlikely (macroman))
|
||||
{
|
||||
this->get_glyph_funcZ = get_glyph_from_macroman<CmapSubtable>;
|
||||
}
|
||||
else if (unlikely (mac))
|
||||
{
|
||||
this->get_glyph_funcZ = get_glyph_from_ascii<CmapSubtable>;
|
||||
}
|
||||
else
|
||||
#endif
|
||||
{
|
||||
switch (subtable->u.format) {
|
||||
/* Accelerate format 4 and format 12. */
|
||||
|
|
@ -1924,7 +1979,7 @@ struct cmap
|
|||
hb_codepoint_t *glyph,
|
||||
cache_t *cache = nullptr) const
|
||||
{
|
||||
if (unlikely (!this->get_glyph_funcZ)) return 0;
|
||||
if (unlikely (!this->get_glyph_funcZ)) return false;
|
||||
return _cached_get (unicode, glyph, cache);
|
||||
}
|
||||
|
||||
|
|
@ -2006,6 +2061,28 @@ struct cmap
|
|||
return false;
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
HB_INTERNAL static bool get_glyph_from_ascii (const void *obj,
|
||||
hb_codepoint_t codepoint,
|
||||
hb_codepoint_t *glyph)
|
||||
{
|
||||
const Type *typed_obj = (const Type *) obj;
|
||||
return codepoint < 0x80 && typed_obj->get_glyph (codepoint, glyph);
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
HB_INTERNAL static bool get_glyph_from_macroman (const void *obj,
|
||||
hb_codepoint_t codepoint,
|
||||
hb_codepoint_t *glyph)
|
||||
{
|
||||
if (get_glyph_from_ascii<Type> (obj, codepoint, glyph))
|
||||
return true;
|
||||
|
||||
const Type *typed_obj = (const Type *) obj;
|
||||
unsigned c = unicode_to_macroman (codepoint);
|
||||
return c && typed_obj->get_glyph (c, glyph);
|
||||
}
|
||||
|
||||
private:
|
||||
hb_nonnull_ptr_t<const CmapSubtable> subtable;
|
||||
hb_nonnull_ptr_t<const CmapSubtableFormat14> subtable_uvs;
|
||||
|
|
@ -2035,28 +2112,6 @@ struct cmap
|
|||
return &(this+result.subtable);
|
||||
}
|
||||
|
||||
const EncodingRecord *find_encodingrec (unsigned int platform_id,
|
||||
unsigned int encoding_id) const
|
||||
{
|
||||
EncodingRecord key;
|
||||
key.platformID = platform_id;
|
||||
key.encodingID = encoding_id;
|
||||
|
||||
return encodingRecord.as_array ().bsearch (key);
|
||||
}
|
||||
|
||||
bool find_subtable (unsigned format) const
|
||||
{
|
||||
auto it =
|
||||
+ hb_iter (encodingRecord)
|
||||
| hb_map (&EncodingRecord::subtable)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_filter ([&] (const CmapSubtable& _) { return _.u.format == format; })
|
||||
;
|
||||
|
||||
return it.len ();
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
|
|
|||
|
|
@ -96,11 +96,14 @@ HB_OT_CORE_TABLE (OT, avar)
|
|||
HB_OT_CORE_TABLE (OT, cvar)
|
||||
HB_OT_ACCELERATOR (OT, gvar)
|
||||
HB_OT_CORE_TABLE (OT, MVAR)
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
HB_OT_CORE_TABLE (OT, VARC)
|
||||
#endif
|
||||
#endif
|
||||
|
||||
/* Legacy kern. */
|
||||
#ifndef HB_NO_OT_KERN
|
||||
HB_OT_CORE_TABLE (OT, kern)
|
||||
HB_OT_ACCELERATOR (OT, kern)
|
||||
#endif
|
||||
|
||||
/* OpenType shaping. */
|
||||
|
|
@ -118,9 +121,9 @@ HB_OT_CORE_TABLE (OT, BASE)
|
|||
|
||||
/* AAT shaping. */
|
||||
#ifndef HB_NO_AAT
|
||||
HB_OT_TABLE (AAT, morx)
|
||||
HB_OT_TABLE (AAT, mort)
|
||||
HB_OT_TABLE (AAT, kerx)
|
||||
HB_OT_ACCELERATOR (AAT, morx)
|
||||
HB_OT_ACCELERATOR (AAT, mort)
|
||||
HB_OT_ACCELERATOR (AAT, kerx)
|
||||
HB_OT_TABLE (AAT, ankr)
|
||||
HB_OT_TABLE (AAT, trak)
|
||||
HB_OT_TABLE (AAT, ltag)
|
||||
|
|
|
|||
|
|
@ -41,6 +41,8 @@
|
|||
#include "hb-ot-layout-gdef-table.hh"
|
||||
#include "hb-ot-layout-gsub-table.hh"
|
||||
#include "hb-ot-layout-gpos-table.hh"
|
||||
#include "hb-aat-layout-kerx-table.hh"
|
||||
#include "hb-aat-layout-morx-table.hh"
|
||||
|
||||
|
||||
void hb_ot_face_t::init0 (hb_face_t *face)
|
||||
|
|
|
|||
|
|
@ -43,6 +43,7 @@
|
|||
#include "hb-ot-hmtx-table.hh"
|
||||
#include "hb-ot-post-table.hh"
|
||||
#include "hb-ot-stat-table.hh" // Just so we compile it; unused otherwise.
|
||||
#include "hb-ot-var-varc-table.hh"
|
||||
#include "hb-ot-vorg-table.hh"
|
||||
#include "OT/Color/CBDT/CBDT.hh"
|
||||
#include "OT/Color/COLR/COLR.hh"
|
||||
|
|
@ -208,12 +209,12 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
|
|||
|
||||
#if !defined(HB_NO_VAR) && !defined(HB_NO_OT_FONT_ADVANCE_CACHE)
|
||||
const OT::HVAR &HVAR = *hmtx.var_table;
|
||||
const OT::VariationStore &varStore = &HVAR + HVAR.varStore;
|
||||
OT::VariationStore::cache_t *varStore_cache = font->num_coords * count >= 128 ? varStore.create_cache () : nullptr;
|
||||
const OT::ItemVariationStore &varStore = &HVAR + HVAR.varStore;
|
||||
OT::ItemVariationStore::cache_t *varStore_cache = font->num_coords * count >= 128 ? varStore.create_cache () : nullptr;
|
||||
|
||||
bool use_cache = font->num_coords;
|
||||
#else
|
||||
OT::VariationStore::cache_t *varStore_cache = nullptr;
|
||||
OT::ItemVariationStore::cache_t *varStore_cache = nullptr;
|
||||
bool use_cache = false;
|
||||
#endif
|
||||
|
||||
|
|
@ -277,7 +278,7 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
|
|||
}
|
||||
|
||||
#if !defined(HB_NO_VAR) && !defined(HB_NO_OT_FONT_ADVANCE_CACHE)
|
||||
OT::VariationStore::destroy_cache (varStore_cache);
|
||||
OT::ItemVariationStore::destroy_cache (varStore_cache);
|
||||
#endif
|
||||
|
||||
if (font->x_strength && !font->embolden_in_place)
|
||||
|
|
@ -313,10 +314,10 @@ hb_ot_get_glyph_v_advances (hb_font_t* font, void* font_data,
|
|||
{
|
||||
#if !defined(HB_NO_VAR) && !defined(HB_NO_OT_FONT_ADVANCE_CACHE)
|
||||
const OT::VVAR &VVAR = *vmtx.var_table;
|
||||
const OT::VariationStore &varStore = &VVAR + VVAR.varStore;
|
||||
OT::VariationStore::cache_t *varStore_cache = font->num_coords ? varStore.create_cache () : nullptr;
|
||||
const OT::ItemVariationStore &varStore = &VVAR + VVAR.varStore;
|
||||
OT::ItemVariationStore::cache_t *varStore_cache = font->num_coords ? varStore.create_cache () : nullptr;
|
||||
#else
|
||||
OT::VariationStore::cache_t *varStore_cache = nullptr;
|
||||
OT::ItemVariationStore::cache_t *varStore_cache = nullptr;
|
||||
#endif
|
||||
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
|
|
@ -327,7 +328,7 @@ hb_ot_get_glyph_v_advances (hb_font_t* font, void* font_data,
|
|||
}
|
||||
|
||||
#if !defined(HB_NO_VAR) && !defined(HB_NO_OT_FONT_ADVANCE_CACHE)
|
||||
OT::VariationStore::destroy_cache (varStore_cache);
|
||||
OT::ItemVariationStore::destroy_cache (varStore_cache);
|
||||
#endif
|
||||
}
|
||||
else
|
||||
|
|
@ -523,6 +524,10 @@ hb_ot_draw_glyph (hb_font_t *font,
|
|||
{ // Need draw_session to be destructed before emboldening.
|
||||
hb_draw_session_t draw_session (embolden ? hb_outline_recording_pen_get_funcs () : draw_funcs,
|
||||
embolden ? &outline : draw_data, font->slant_xy);
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
if (!font->face->table.VARC->get_path (font, glyph, draw_session))
|
||||
#endif
|
||||
// Keep the following in synch with VARC::get_path_at()
|
||||
if (!font->face->table.glyf->get_path (font, glyph, draw_session))
|
||||
#ifndef HB_NO_CFF
|
||||
if (!font->face->table.cff2->get_path (font, glyph, draw_session))
|
||||
|
|
@ -562,6 +567,9 @@ hb_ot_paint_glyph (hb_font_t *font,
|
|||
if (font->face->table.CBDT->paint_glyph (font, glyph, paint_funcs, paint_data)) return;
|
||||
if (font->face->table.sbix->paint_glyph (font, glyph, paint_funcs, paint_data)) return;
|
||||
#endif
|
||||
#endif
|
||||
#ifndef HB_NO_VAR_COMPOSITES
|
||||
if (font->face->table.VARC->paint_glyph (font, glyph, paint_funcs, paint_data, foreground)) return;
|
||||
#endif
|
||||
if (font->face->table.glyf->paint_glyph (font, glyph, paint_funcs, paint_data, foreground)) return;
|
||||
#ifndef HB_NO_CFF
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@
|
|||
#include "hb-open-type.hh"
|
||||
#include "hb-ot-maxp-table.hh"
|
||||
#include "hb-ot-hhea-table.hh"
|
||||
#include "hb-ot-os2-table.hh"
|
||||
#include "hb-ot-var-hvar-table.hh"
|
||||
#include "hb-ot-var-mvar-table.hh"
|
||||
#include "hb-ot-metrics.hh"
|
||||
|
|
@ -145,6 +146,29 @@ struct hmtxvmtx
|
|||
table->minTrailingBearing = min_rsb;
|
||||
table->maxExtent = max_extent;
|
||||
}
|
||||
|
||||
if (T::is_horizontal)
|
||||
{
|
||||
const auto &OS2 = *c->plan->source->table.OS2;
|
||||
if (OS2.has_data () &&
|
||||
table->ascender == OS2.sTypoAscender &&
|
||||
table->descender == OS2.sTypoDescender &&
|
||||
table->lineGap == OS2.sTypoLineGap)
|
||||
{
|
||||
table->ascender = static_cast<int> (roundf (OS2.sTypoAscender +
|
||||
MVAR.get_var (HB_OT_METRICS_TAG_HORIZONTAL_ASCENDER,
|
||||
c->plan->normalized_coords.arrayZ,
|
||||
c->plan->normalized_coords.length)));
|
||||
table->descender = static_cast<int> (roundf (OS2.sTypoDescender +
|
||||
MVAR.get_var (HB_OT_METRICS_TAG_HORIZONTAL_DESCENDER,
|
||||
c->plan->normalized_coords.arrayZ,
|
||||
c->plan->normalized_coords.length)));
|
||||
table->lineGap = static_cast<int> (roundf (OS2.sTypoLineGap +
|
||||
MVAR.get_var (HB_OT_METRICS_TAG_HORIZONTAL_LINE_GAP,
|
||||
c->plan->normalized_coords.arrayZ,
|
||||
c->plan->normalized_coords.length)));
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
@ -374,7 +398,7 @@ struct hmtxvmtx
|
|||
|
||||
unsigned get_advance_with_var_unscaled (hb_codepoint_t glyph,
|
||||
hb_font_t *font,
|
||||
VariationStore::cache_t *store_cache = nullptr) const
|
||||
ItemVariationStore::cache_t *store_cache = nullptr) const
|
||||
{
|
||||
unsigned int advance = get_advance_without_var_unscaled (glyph);
|
||||
|
||||
|
|
@ -387,7 +411,8 @@ struct hmtxvmtx
|
|||
font->coords, font->num_coords,
|
||||
store_cache));
|
||||
|
||||
return _glyf_get_advance_with_var_unscaled (font, glyph, T::tableTag == HB_OT_TAG_vmtx);
|
||||
unsigned glyf_advance = _glyf_get_advance_with_var_unscaled (font, glyph, T::tableTag == HB_OT_TAG_vmtx);
|
||||
return glyf_advance ? glyf_advance : advance;
|
||||
#else
|
||||
return advance;
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -86,6 +86,16 @@ struct KernSubTableFormat3
|
|||
leftClassCount * rightClassCount));
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &left_set, set_t &right_set, unsigned num_glyphs) const
|
||||
{
|
||||
set_t set;
|
||||
if (likely (glyphCount))
|
||||
set.add_range (0, glyphCount - 1);
|
||||
left_set.union_ (set);
|
||||
right_set.union_ (set);
|
||||
}
|
||||
|
||||
protected:
|
||||
KernSubTableHeader
|
||||
header;
|
||||
|
|
@ -135,16 +145,29 @@ struct KernSubTable
|
|||
switch (subtable_type) {
|
||||
case 0: return_trace (c->dispatch (u.format0));
|
||||
#ifndef HB_NO_AAT_SHAPE
|
||||
case 1: return_trace (u.header.apple ? c->dispatch (u.format1, std::forward<Ts> (ds)...) : c->default_return_value ());
|
||||
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
|
||||
#endif
|
||||
case 2: return_trace (c->dispatch (u.format2));
|
||||
#ifndef HB_NO_AAT_SHAPE
|
||||
case 3: return_trace (u.header.apple ? c->dispatch (u.format3, std::forward<Ts> (ds)...) : c->default_return_value ());
|
||||
case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
|
||||
#endif
|
||||
default: return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_glyphs (set_t &left_set, set_t &right_set, unsigned num_glyphs) const
|
||||
{
|
||||
unsigned int subtable_type = get_type ();
|
||||
switch (subtable_type) {
|
||||
case 0: u.format0.collect_glyphs (left_set, right_set, num_glyphs); return;
|
||||
case 1: u.format1.collect_glyphs (left_set, right_set, num_glyphs); return;
|
||||
case 2: u.format2.collect_glyphs (left_set, right_set, num_glyphs); return;
|
||||
case 3: u.format3.collect_glyphs (left_set, right_set, num_glyphs); return;
|
||||
default: return;
|
||||
}
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -318,8 +341,9 @@ struct kern
|
|||
}
|
||||
}
|
||||
|
||||
bool apply (AAT::hb_aat_apply_context_t *c) const
|
||||
{ return dispatch (c); }
|
||||
bool apply (AAT::hb_aat_apply_context_t *c,
|
||||
const AAT::kern_accelerator_data_t *accel_data = nullptr) const
|
||||
{ return dispatch (c, accel_data); }
|
||||
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
|
|
@ -343,6 +367,41 @@ struct kern
|
|||
return_trace (dispatch (c));
|
||||
}
|
||||
|
||||
AAT::kern_accelerator_data_t create_accelerator_data (unsigned num_glyphs) const
|
||||
{
|
||||
switch (get_type ()) {
|
||||
case 0: return u.ot.create_accelerator_data (num_glyphs);
|
||||
#ifndef HB_NO_AAT_SHAPE
|
||||
case 1: return u.aat.create_accelerator_data (num_glyphs);
|
||||
#endif
|
||||
default:return AAT::kern_accelerator_data_t ();
|
||||
}
|
||||
}
|
||||
|
||||
struct accelerator_t
|
||||
{
|
||||
accelerator_t (hb_face_t *face)
|
||||
{
|
||||
hb_sanitize_context_t sc;
|
||||
this->table = sc.reference_table<kern> (face);
|
||||
this->accel_data = this->table->create_accelerator_data (face->get_num_glyphs ());
|
||||
}
|
||||
~accelerator_t ()
|
||||
{
|
||||
this->table.destroy ();
|
||||
}
|
||||
|
||||
hb_blob_t *get_blob () const { return table.get_blob (); }
|
||||
|
||||
bool apply (AAT::hb_aat_apply_context_t *c) const
|
||||
{
|
||||
return table->apply (c, &accel_data);
|
||||
}
|
||||
|
||||
hb_blob_ptr_t<kern> table;
|
||||
AAT::kern_accelerator_data_t accel_data;
|
||||
};
|
||||
|
||||
protected:
|
||||
union {
|
||||
HBUINT32 version32;
|
||||
|
|
@ -356,6 +415,10 @@ struct kern
|
|||
DEFINE_SIZE_UNION (4, version32);
|
||||
};
|
||||
|
||||
struct kern_accelerator_t : kern::accelerator_t {
|
||||
kern_accelerator_t (hb_face_t *face) : kern::accelerator_t (face) {}
|
||||
};
|
||||
|
||||
} /* namespace OT */
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -46,6 +46,12 @@ struct BaseCoordFormat1
|
|||
return HB_DIRECTION_IS_HORIZONTAL (direction) ? font->em_scale_y (coordinate) : font->em_scale_x (coordinate);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
return_trace ((bool) c->serializer->embed (*this));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -67,6 +73,17 @@ struct BaseCoordFormat2
|
|||
return HB_DIRECTION_IS_HORIZONTAL (direction) ? font->em_scale_y (coordinate) : font->em_scale_x (coordinate);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
return_trace (c->serializer->check_assign (out->referenceGlyph,
|
||||
c->plan->glyph_map->get (referenceGlyph),
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -86,7 +103,7 @@ struct BaseCoordFormat2
|
|||
struct BaseCoordFormat3
|
||||
{
|
||||
hb_position_t get_coord (hb_font_t *font,
|
||||
const VariationStore &var_store,
|
||||
const ItemVariationStore &var_store,
|
||||
hb_direction_t direction) const
|
||||
{
|
||||
const Device &device = this+deviceTable;
|
||||
|
|
@ -96,6 +113,37 @@ struct BaseCoordFormat3
|
|||
: font->em_scale_x (coordinate) + device.get_x_delta (font, var_store);
|
||||
}
|
||||
|
||||
void collect_variation_indices (hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
unsigned varidx = (this+deviceTable).get_variation_index ();
|
||||
varidx_set.add (varidx);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
if (!c->plan->pinned_at_default)
|
||||
{
|
||||
unsigned var_idx = (this+deviceTable).get_variation_index ();
|
||||
if (var_idx != VarIdx::NO_VARIATION)
|
||||
{
|
||||
hb_pair_t<unsigned, int> *v;
|
||||
if (!c->plan->base_variation_idx_map.has (var_idx, &v))
|
||||
return_trace (false);
|
||||
|
||||
if (unlikely (!c->serializer->check_assign (out->coordinate, coordinate + hb_second (*v),
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW)))
|
||||
return_trace (false);
|
||||
}
|
||||
}
|
||||
return_trace (out->deviceTable.serialize_copy (c->serializer, deviceTable,
|
||||
this, 0,
|
||||
hb_serialize_context_t::Head,
|
||||
&c->plan->base_variation_idx_map));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
|
|
@ -120,7 +168,7 @@ struct BaseCoord
|
|||
bool has_data () const { return u.format; }
|
||||
|
||||
hb_position_t get_coord (hb_font_t *font,
|
||||
const VariationStore &var_store,
|
||||
const ItemVariationStore &var_store,
|
||||
hb_direction_t direction) const
|
||||
{
|
||||
switch (u.format) {
|
||||
|
|
@ -131,6 +179,27 @@ struct BaseCoord
|
|||
}
|
||||
}
|
||||
|
||||
void collect_variation_indices (hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
switch (u.format) {
|
||||
case 3: u.format3.collect_variation_indices (varidx_set);
|
||||
default:return;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
|
||||
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
|
||||
case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -161,12 +230,37 @@ struct FeatMinMaxRecord
|
|||
|
||||
bool has_data () const { return tag; }
|
||||
|
||||
hb_tag_t get_feature_tag () const { return tag; }
|
||||
|
||||
void get_min_max (const BaseCoord **min, const BaseCoord **max) const
|
||||
{
|
||||
if (likely (min)) *min = &(this+minCoord);
|
||||
if (likely (max)) *max = &(this+maxCoord);
|
||||
}
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
const void *base,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
if (!plan->layout_features.has (tag))
|
||||
return;
|
||||
|
||||
(base+minCoord).collect_variation_indices (varidx_set);
|
||||
(base+maxCoord).collect_variation_indices (varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const void *base) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (!(out->minCoord.serialize_subset (c, minCoord, base)))
|
||||
return_trace (false);
|
||||
|
||||
return_trace (out->maxCoord.serialize_subset (c, maxCoord, base));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -206,6 +300,39 @@ struct MinMax
|
|||
}
|
||||
}
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
(this+minCoord).collect_variation_indices (varidx_set);
|
||||
(this+maxCoord).collect_variation_indices (varidx_set);
|
||||
for (const FeatMinMaxRecord& record : featMinMaxRecords)
|
||||
record.collect_variation_indices (plan, this, varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
if (!(out->minCoord.serialize_subset (c, minCoord, this)) ||
|
||||
!(out->maxCoord.serialize_subset (c, maxCoord, this)))
|
||||
return_trace (false);
|
||||
|
||||
unsigned len = 0;
|
||||
for (const FeatMinMaxRecord& _ : featMinMaxRecords)
|
||||
{
|
||||
hb_tag_t feature_tag = _.get_feature_tag ();
|
||||
if (!c->plan->layout_features.has (feature_tag))
|
||||
continue;
|
||||
|
||||
if (!_.subset (c, this)) return false;
|
||||
len++;
|
||||
}
|
||||
return_trace (c->serializer->check_assign (out->featMinMaxRecords.len, len,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -240,6 +367,26 @@ struct BaseValues
|
|||
return this+baseCoords[baseline_tag_index];
|
||||
}
|
||||
|
||||
void collect_variation_indices (hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
for (const auto& _ : baseCoords)
|
||||
(this+_).collect_variation_indices (varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
out->defaultIndex = defaultIndex;
|
||||
|
||||
for (const auto& _ : baseCoords)
|
||||
if (!subset_offset_array (c, out->baseCoords, this) (_))
|
||||
return_trace (false);
|
||||
|
||||
return_trace (bool (out->baseCoords));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -268,7 +415,22 @@ struct BaseLangSysRecord
|
|||
|
||||
bool has_data () const { return baseLangSysTag; }
|
||||
|
||||
const MinMax &get_min_max () const { return this+minMax; }
|
||||
const MinMax &get_min_max (const void* base) const { return base+minMax; }
|
||||
|
||||
void collect_variation_indices (const void* base,
|
||||
const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{ (base+minMax).collect_variation_indices (plan, varidx_set); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const void *base) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
return_trace (out->minMax.serialize_subset (c, minMax, base));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
{
|
||||
|
|
@ -291,7 +453,7 @@ struct BaseScript
|
|||
const MinMax &get_min_max (hb_tag_t language_tag) const
|
||||
{
|
||||
const BaseLangSysRecord& record = baseLangSysRecords.bsearch (language_tag);
|
||||
return record.has_data () ? record.get_min_max () : this+defaultMinMax;
|
||||
return record.has_data () ? record.get_min_max (this) : this+defaultMinMax;
|
||||
}
|
||||
|
||||
const BaseCoord &get_base_coord (int baseline_tag_index) const
|
||||
|
|
@ -300,6 +462,35 @@ struct BaseScript
|
|||
bool has_values () const { return baseValues; }
|
||||
bool has_min_max () const { return defaultMinMax; /* TODO What if only per-language is present? */ }
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
(this+baseValues).collect_variation_indices (varidx_set);
|
||||
(this+defaultMinMax).collect_variation_indices (plan, varidx_set);
|
||||
|
||||
for (const BaseLangSysRecord& _ : baseLangSysRecords)
|
||||
_.collect_variation_indices (this, plan, varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
if (baseValues && !out->baseValues.serialize_subset (c, baseValues, this))
|
||||
return_trace (false);
|
||||
|
||||
if (defaultMinMax && !out->defaultMinMax.serialize_subset (c, defaultMinMax, this))
|
||||
return_trace (false);
|
||||
|
||||
for (const auto& _ : baseLangSysRecords)
|
||||
if (!_.subset (c, this)) return_trace (false);
|
||||
|
||||
return_trace (c->serializer->check_assign (out->baseLangSysRecords.len, baseLangSysRecords.len,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -332,9 +523,31 @@ struct BaseScriptRecord
|
|||
|
||||
bool has_data () const { return baseScriptTag; }
|
||||
|
||||
hb_tag_t get_script_tag () const { return baseScriptTag; }
|
||||
|
||||
const BaseScript &get_base_script (const BaseScriptList *list) const
|
||||
{ return list+baseScript; }
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
const void* list,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
if (!plan->layout_scripts.has (baseScriptTag))
|
||||
return;
|
||||
|
||||
(list+baseScript).collect_variation_indices (plan, varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const void *base) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
return_trace (out->baseScript.serialize_subset (c, baseScript, base));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -361,6 +574,33 @@ struct BaseScriptList
|
|||
return record->has_data () ? record->get_base_script (this) : Null (BaseScript);
|
||||
}
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
for (const BaseScriptRecord& _ : baseScriptRecords)
|
||||
_.collect_variation_indices (plan, this, varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
unsigned len = 0;
|
||||
for (const BaseScriptRecord& _ : baseScriptRecords)
|
||||
{
|
||||
hb_tag_t script_tag = _.get_script_tag ();
|
||||
if (!c->plan->layout_scripts.has (script_tag))
|
||||
continue;
|
||||
|
||||
if (!_.subset (c, this)) return false;
|
||||
len++;
|
||||
}
|
||||
return_trace (c->serializer->check_assign (out->baseScriptRecords.len, len,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -422,6 +662,20 @@ struct Axis
|
|||
return true;
|
||||
}
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{ (this+baseScriptList).collect_variation_indices (plan, varidx_set); }
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
out->baseTagList.serialize_copy (c->serializer, baseTagList, this);
|
||||
return_trace (out->baseScriptList.serialize_subset (c, baseScriptList, this));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
|
@ -453,8 +707,77 @@ struct BASE
|
|||
const Axis &get_axis (hb_direction_t direction) const
|
||||
{ return HB_DIRECTION_IS_VERTICAL (direction) ? this+vAxis : this+hAxis; }
|
||||
|
||||
const VariationStore &get_var_store () const
|
||||
{ return version.to_int () < 0x00010001u ? Null (VariationStore) : this+varStore; }
|
||||
bool has_var_store () const
|
||||
{ return version.to_int () >= 0x00010001u && varStore != 0; }
|
||||
|
||||
const ItemVariationStore &get_var_store () const
|
||||
{ return version.to_int () < 0x00010001u ? Null (ItemVariationStore) : this+varStore; }
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
(this+hAxis).collect_variation_indices (plan, varidx_set);
|
||||
(this+vAxis).collect_variation_indices (plan, varidx_set);
|
||||
}
|
||||
|
||||
bool subset_varstore (hb_subset_context_t *c,
|
||||
BASE *out /* OUT */) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
if (!c->serializer->allocate_size<Offset32To<ItemVariationStore>> (Offset32To<ItemVariationStore>::static_size))
|
||||
return_trace (false);
|
||||
if (!c->plan->normalized_coords)
|
||||
return_trace (out->varStore.serialize_subset (c, varStore, this, c->plan->base_varstore_inner_maps.as_array ()));
|
||||
|
||||
if (c->plan->all_axes_pinned)
|
||||
return_trace (true);
|
||||
|
||||
item_variations_t item_vars;
|
||||
if (!item_vars.instantiate (this+varStore, c->plan, true, true,
|
||||
c->plan->base_varstore_inner_maps.as_array ()))
|
||||
return_trace (false);
|
||||
|
||||
if (!out->varStore.serialize_serialize (c->serializer,
|
||||
item_vars.has_long_word (),
|
||||
c->plan->axis_tags,
|
||||
item_vars.get_region_list (),
|
||||
item_vars.get_vardata_encodings ()))
|
||||
return_trace (false);
|
||||
|
||||
const hb_map_t &varidx_map = item_vars.get_varidx_map ();
|
||||
/* base_variation_idx_map in the plan is old_varidx->(varidx, delta)
|
||||
* mapping, new varidx is generated for subsetting, we need to remap this
|
||||
* after instancing */
|
||||
for (auto _ : c->plan->base_variation_idx_map.iter_ref ())
|
||||
{
|
||||
uint32_t varidx = _.second.first;
|
||||
uint32_t *new_varidx;
|
||||
if (varidx_map.has (varidx, &new_varidx))
|
||||
_.second.first = *new_varidx;
|
||||
else
|
||||
_.second.first = HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
|
||||
}
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
out->version = version;
|
||||
if (has_var_store () && !subset_varstore (c, out))
|
||||
return_trace (false);
|
||||
|
||||
if (hAxis && !out->hAxis.serialize_subset (c, hAxis, this))
|
||||
return_trace (false);
|
||||
|
||||
if (vAxis && !out->vAxis.serialize_subset (c, vAxis, this))
|
||||
return_trace (false);
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool get_baseline (hb_font_t *font,
|
||||
hb_tag_t baseline_tag,
|
||||
|
|
@ -487,7 +810,7 @@ struct BASE
|
|||
&min_coord, &max_coord))
|
||||
return false;
|
||||
|
||||
const VariationStore &var_store = get_var_store ();
|
||||
const ItemVariationStore &var_store = get_var_store ();
|
||||
if (likely (min && min_coord)) *min = min_coord->get_coord (font, var_store, direction);
|
||||
if (likely (max && max_coord)) *max = max_coord->get_coord (font, var_store, direction);
|
||||
return true;
|
||||
|
|
@ -510,7 +833,7 @@ struct BASE
|
|||
* of BASE table (may be NULL) */
|
||||
Offset16To<Axis>vAxis; /* Offset to vertical Axis table, from beginning
|
||||
* of BASE table (may be NULL) */
|
||||
Offset32To<VariationStore>
|
||||
Offset32To<ItemVariationStore>
|
||||
varStore; /* Offset to the table of Item Variation
|
||||
* Store--from beginning of BASE
|
||||
* header (may be NULL). Introduced
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -708,8 +708,8 @@ struct hb_ot_apply_context_t :
|
|||
recurse_func_t recurse_func = nullptr;
|
||||
const GDEF &gdef;
|
||||
const GDEF::accelerator_t &gdef_accel;
|
||||
const VariationStore &var_store;
|
||||
VariationStore::cache_t *var_store_cache;
|
||||
const ItemVariationStore &var_store;
|
||||
ItemVariationStore::cache_t *var_store_cache;
|
||||
hb_set_digest_t digest;
|
||||
|
||||
hb_direction_t direction;
|
||||
|
|
@ -723,7 +723,6 @@ struct hb_ot_apply_context_t :
|
|||
bool auto_zwj = true;
|
||||
bool per_syllable = false;
|
||||
bool random = false;
|
||||
uint32_t random_state = 1;
|
||||
unsigned new_syllables = (unsigned) -1;
|
||||
|
||||
signed last_base = -1; // GPOS uses
|
||||
|
|
@ -766,7 +765,7 @@ struct hb_ot_apply_context_t :
|
|||
~hb_ot_apply_context_t ()
|
||||
{
|
||||
#ifndef HB_NO_VAR
|
||||
VariationStore::destroy_cache (var_store_cache);
|
||||
ItemVariationStore::destroy_cache (var_store_cache);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
@ -788,8 +787,8 @@ struct hb_ot_apply_context_t :
|
|||
uint32_t random_number ()
|
||||
{
|
||||
/* http://www.cplusplus.com/reference/random/minstd_rand/ */
|
||||
random_state = random_state * 48271 % 2147483647;
|
||||
return random_state;
|
||||
buffer->random_state = buffer->random_state * 48271 % 2147483647;
|
||||
return buffer->random_state;
|
||||
}
|
||||
|
||||
bool match_properties_mark (hb_codepoint_t glyph,
|
||||
|
|
@ -1255,7 +1254,7 @@ static bool match_input (hb_ot_apply_context_t *c,
|
|||
match_func_t match_func,
|
||||
const void *match_data,
|
||||
unsigned int *end_position,
|
||||
unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
|
||||
unsigned int *match_positions,
|
||||
unsigned int *p_total_component_count = nullptr)
|
||||
{
|
||||
TRACE_APPLY (nullptr);
|
||||
|
|
@ -1379,7 +1378,7 @@ static bool match_input (hb_ot_apply_context_t *c,
|
|||
}
|
||||
static inline bool ligate_input (hb_ot_apply_context_t *c,
|
||||
unsigned int count, /* Including the first glyph */
|
||||
const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
|
||||
const unsigned int *match_positions, /* Including the first glyph */
|
||||
unsigned int match_end,
|
||||
hb_codepoint_t lig_glyph,
|
||||
unsigned int total_component_count)
|
||||
|
|
@ -1687,7 +1686,7 @@ static inline void recurse_lookups (context_t *c,
|
|||
|
||||
static inline void apply_lookup (hb_ot_apply_context_t *c,
|
||||
unsigned int count, /* Including the first glyph */
|
||||
unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
|
||||
unsigned int *match_positions, /* Including the first glyph */
|
||||
unsigned int lookupCount,
|
||||
const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
|
||||
unsigned int match_end)
|
||||
|
|
@ -1695,6 +1694,9 @@ static inline void apply_lookup (hb_ot_apply_context_t *c,
|
|||
hb_buffer_t *buffer = c->buffer;
|
||||
int end;
|
||||
|
||||
unsigned int *match_positions_input = match_positions;
|
||||
unsigned int match_positions_count = count;
|
||||
|
||||
/* All positions are distance from beginning of *output* buffer.
|
||||
* Adjust. */
|
||||
{
|
||||
|
|
@ -1798,6 +1800,27 @@ static inline void apply_lookup (hb_ot_apply_context_t *c,
|
|||
{
|
||||
if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
|
||||
break;
|
||||
if (unlikely (delta + count > match_positions_count))
|
||||
{
|
||||
unsigned new_match_positions_count = hb_max (delta + count, hb_max(match_positions_count, 4u) * 1.5);
|
||||
if (match_positions == match_positions_input)
|
||||
{
|
||||
match_positions = (unsigned int *) hb_malloc (new_match_positions_count * sizeof (match_positions[0]));
|
||||
if (unlikely (!match_positions))
|
||||
break;
|
||||
memcpy (match_positions, match_positions_input, count * sizeof (match_positions[0]));
|
||||
match_positions_count = new_match_positions_count;
|
||||
}
|
||||
else
|
||||
{
|
||||
unsigned int *new_match_positions = (unsigned int *) hb_realloc (match_positions, new_match_positions_count * sizeof (match_positions[0]));
|
||||
if (unlikely (!new_match_positions))
|
||||
break;
|
||||
match_positions = new_match_positions;
|
||||
match_positions_count = new_match_positions_count;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
@ -1821,6 +1844,9 @@ static inline void apply_lookup (hb_ot_apply_context_t *c,
|
|||
match_positions[next] += delta;
|
||||
}
|
||||
|
||||
if (match_positions != match_positions_input)
|
||||
hb_free (match_positions);
|
||||
|
||||
(void) buffer->move_to (end);
|
||||
}
|
||||
|
||||
|
|
@ -1921,8 +1947,18 @@ static bool context_apply_lookup (hb_ot_apply_context_t *c,
|
|||
const LookupRecord lookupRecord[],
|
||||
const ContextApplyLookupContext &lookup_context)
|
||||
{
|
||||
if (unlikely (inputCount > HB_MAX_CONTEXT_LENGTH)) return false;
|
||||
unsigned match_positions_stack[4];
|
||||
unsigned *match_positions = match_positions_stack;
|
||||
if (unlikely (inputCount > ARRAY_LENGTH (match_positions_stack)))
|
||||
{
|
||||
match_positions = (unsigned *) hb_malloc (hb_max (inputCount, 1u) * sizeof (match_positions[0]));
|
||||
if (unlikely (!match_positions))
|
||||
return false;
|
||||
}
|
||||
|
||||
unsigned match_end = 0;
|
||||
unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
|
||||
bool ret = false;
|
||||
if (match_input (c,
|
||||
inputCount, input,
|
||||
lookup_context.funcs.match, lookup_context.match_data,
|
||||
|
|
@ -1933,13 +1969,18 @@ static bool context_apply_lookup (hb_ot_apply_context_t *c,
|
|||
inputCount, match_positions,
|
||||
lookupCount, lookupRecord,
|
||||
match_end);
|
||||
return true;
|
||||
ret = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
|
||||
return false;
|
||||
ret = false;
|
||||
}
|
||||
|
||||
if (unlikely (match_positions != match_positions_stack))
|
||||
hb_free (match_positions);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
template <typename Types>
|
||||
|
|
@ -3019,9 +3060,20 @@ static bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
|
|||
const LookupRecord lookupRecord[],
|
||||
const ChainContextApplyLookupContext &lookup_context)
|
||||
{
|
||||
if (unlikely (inputCount > HB_MAX_CONTEXT_LENGTH)) return false;
|
||||
unsigned match_positions_stack[4];
|
||||
unsigned *match_positions = match_positions_stack;
|
||||
if (unlikely (inputCount > ARRAY_LENGTH (match_positions_stack)))
|
||||
{
|
||||
match_positions = (unsigned *) hb_malloc (hb_max (inputCount, 1u) * sizeof (match_positions[0]));
|
||||
if (unlikely (!match_positions))
|
||||
return false;
|
||||
}
|
||||
|
||||
unsigned start_index = c->buffer->out_len;
|
||||
unsigned end_index = c->buffer->idx;
|
||||
unsigned match_end = 0;
|
||||
unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
|
||||
bool ret = true;
|
||||
if (!(match_input (c,
|
||||
inputCount, input,
|
||||
lookup_context.funcs.match[1], lookup_context.match_data[1],
|
||||
|
|
@ -3032,17 +3084,18 @@ static bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
|
|||
match_end, &end_index)))
|
||||
{
|
||||
c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
|
||||
return false;
|
||||
ret = false;
|
||||
goto done;
|
||||
}
|
||||
|
||||
unsigned start_index = c->buffer->out_len;
|
||||
if (!match_backtrack (c,
|
||||
backtrackCount, backtrack,
|
||||
lookup_context.funcs.match[0], lookup_context.match_data[0],
|
||||
&start_index))
|
||||
{
|
||||
c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
|
||||
return false;
|
||||
ret = false;
|
||||
goto done;
|
||||
}
|
||||
|
||||
c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
|
||||
|
|
@ -3050,7 +3103,12 @@ static bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
|
|||
inputCount, match_positions,
|
||||
lookupCount, lookupRecord,
|
||||
match_end);
|
||||
return true;
|
||||
done:
|
||||
|
||||
if (unlikely (match_positions != match_positions_stack))
|
||||
hb_free (match_positions);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
template <typename Types>
|
||||
|
|
@ -4329,7 +4387,7 @@ struct hb_ot_layout_lookup_accelerator_t
|
|||
|
||||
thiz->digest.init ();
|
||||
for (auto& subtable : hb_iter (thiz->subtables, count))
|
||||
thiz->digest.add (subtable.digest);
|
||||
thiz->digest.union_ (subtable.digest);
|
||||
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
thiz->cache_user_idx = c_accelerate_subtables.cache_user_idx;
|
||||
|
|
@ -4616,13 +4674,14 @@ struct GSUBGPOS
|
|||
{ return get_feature_list ().find_index (tag, index); }
|
||||
|
||||
bool find_variations_index (const int *coords, unsigned int num_coords,
|
||||
unsigned int *index) const
|
||||
unsigned int *index,
|
||||
ItemVarStoreInstancer *instancer) const
|
||||
{
|
||||
#ifdef HB_NO_VAR
|
||||
*index = FeatureVariations::NOT_FOUND_INDEX;
|
||||
return false;
|
||||
#endif
|
||||
return get_feature_variations ().find_index (coords, num_coords, index);
|
||||
return get_feature_variations ().find_index (coords, num_coords, index, instancer);
|
||||
}
|
||||
const Feature& get_feature_variation (unsigned int feature_index,
|
||||
unsigned int variations_index) const
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ using OT::Layout::GPOS;
|
|||
bool
|
||||
hb_ot_layout_has_kerning (hb_face_t *face)
|
||||
{
|
||||
return face->table.kern->has_data ();
|
||||
return face->table.kern->table->has_data ();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -103,7 +103,7 @@ hb_ot_layout_has_kerning (hb_face_t *face)
|
|||
bool
|
||||
hb_ot_layout_has_machine_kerning (hb_face_t *face)
|
||||
{
|
||||
return face->table.kern->has_state_machine ();
|
||||
return face->table.kern->table->has_state_machine ();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -123,7 +123,7 @@ hb_ot_layout_has_machine_kerning (hb_face_t *face)
|
|||
bool
|
||||
hb_ot_layout_has_cross_kerning (hb_face_t *face)
|
||||
{
|
||||
return face->table.kern->has_cross_stream ();
|
||||
return face->table.kern->table->has_cross_stream ();
|
||||
}
|
||||
|
||||
void
|
||||
|
|
@ -132,7 +132,7 @@ hb_ot_layout_kern (const hb_ot_shape_plan_t *plan,
|
|||
hb_buffer_t *buffer)
|
||||
{
|
||||
hb_blob_t *blob = font->face->table.kern.get_blob ();
|
||||
const AAT::kern& kern = *blob->as<AAT::kern> ();
|
||||
const auto& kern = *font->face->table.kern;
|
||||
|
||||
AAT::hb_aat_apply_context_t c (plan, font, buffer, blob);
|
||||
|
||||
|
|
@ -1443,8 +1443,12 @@ hb_ot_layout_table_find_feature_variations (hb_face_t *face,
|
|||
unsigned int *variations_index /* out */)
|
||||
{
|
||||
const OT::GSUBGPOS &g = get_gsubgpos_table (face, table_tag);
|
||||
const OT::GDEF &gdef = *face->table.GDEF->table;
|
||||
|
||||
return g.find_variations_index (coords, num_coords, variations_index);
|
||||
auto instancer = OT::ItemVarStoreInstancer(&gdef.get_var_store(), nullptr,
|
||||
hb_array (coords, num_coords));
|
||||
|
||||
return g.find_variations_index (coords, num_coords, variations_index, &instancer);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -2127,7 +2131,7 @@ hb_ot_layout_get_font_extents (hb_font_t *font,
|
|||
hb_tag_t language_tag,
|
||||
hb_font_extents_t *extents)
|
||||
{
|
||||
hb_position_t min, max;
|
||||
hb_position_t min = 0, max = 0;
|
||||
if (font->face->table.BASE->get_min_max (font, direction, script_tag, language_tag, HB_TAG_NONE,
|
||||
&min, &max))
|
||||
{
|
||||
|
|
|
|||
|
|
@ -344,27 +344,20 @@ struct MathKern
|
|||
const MathValueRecord* kernValue = mathValueRecordsZ.arrayZ + heightCount;
|
||||
int sign = font->y_scale < 0 ? -1 : +1;
|
||||
|
||||
/* The description of the MathKern table is a ambiguous, but interpreting
|
||||
* "between the two heights found at those indexes" for 0 < i < len as
|
||||
*
|
||||
* correctionHeight[i-1] < correction_height <= correctionHeight[i]
|
||||
*
|
||||
* makes the result consistent with the limit cases and we can just use the
|
||||
* binary search algorithm of std::upper_bound:
|
||||
/* According to OpenType spec (v1.9), except for the boundary cases, the index
|
||||
* chosen for kern value should be i such that
|
||||
* correctionHeight[i-1] <= correction_height < correctionHeight[i]
|
||||
* We can use the binary search algorithm of std::upper_bound(). Or, we can
|
||||
* use the internal hb_bsearch_impl.
|
||||
*/
|
||||
unsigned int i = 0;
|
||||
unsigned int count = heightCount;
|
||||
while (count > 0)
|
||||
{
|
||||
unsigned int half = count / 2;
|
||||
hb_position_t height = correctionHeight[i + half].get_y_value (font, this);
|
||||
if (sign * height < sign * correction_height)
|
||||
{
|
||||
i += half + 1;
|
||||
count -= half + 1;
|
||||
} else
|
||||
count = half;
|
||||
}
|
||||
unsigned int pos;
|
||||
auto cmp = +[](const void* key, const void* p,
|
||||
int sign, hb_font_t* font, const MathKern* mathKern) -> int {
|
||||
return sign * *(hb_position_t*)key - sign * ((MathValueRecord*)p)->get_y_value(font, mathKern);
|
||||
};
|
||||
unsigned int i = hb_bsearch_impl(&pos, correction_height, correctionHeight,
|
||||
heightCount, MathValueRecord::static_size,
|
||||
cmp, sign, font, this) ? pos + 1 : pos;
|
||||
return kernValue[i].get_x_value (font, this);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -223,7 +223,7 @@ struct OS2
|
|||
}
|
||||
}
|
||||
|
||||
return num ? (unsigned) roundf (total_width / num) : 0;
|
||||
return num ? (unsigned) roundf ((double) total_width / (double) num) : 0;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
|
|
@ -272,7 +272,7 @@ struct OS2
|
|||
Triple *axis_range;
|
||||
if (c->plan->user_axes_location.has (HB_TAG ('w','g','h','t'), &axis_range))
|
||||
{
|
||||
unsigned weight_class = static_cast<unsigned> (roundf (hb_clamp (axis_range->middle, 1.0f, 1000.0f)));
|
||||
unsigned weight_class = static_cast<unsigned> (roundf (hb_clamp (axis_range->middle, 1.0, 1000.0)));
|
||||
if (os2_prime->usWeightClass != weight_class)
|
||||
os2_prime->usWeightClass = weight_class;
|
||||
}
|
||||
|
|
@ -284,12 +284,12 @@ struct OS2
|
|||
os2_prime->usWidthClass = width_class;
|
||||
}
|
||||
|
||||
if (c->plan->flags & HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES)
|
||||
return_trace (true);
|
||||
|
||||
os2_prime->usFirstCharIndex = hb_min (0xFFFFu, c->plan->unicodes.get_min ());
|
||||
os2_prime->usLastCharIndex = hb_min (0xFFFFu, c->plan->unicodes.get_max ());
|
||||
|
||||
if (c->plan->flags & HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES)
|
||||
return_trace (true);
|
||||
|
||||
_update_unicode_ranges (&c->plan->unicodes, os2_prime->ulUnicodeRange);
|
||||
|
||||
return_trace (true);
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ struct post
|
|||
Triple *axis_range;
|
||||
if (c->plan->user_axes_location.has (HB_TAG ('s','l','n','t'), &axis_range))
|
||||
{
|
||||
float italic_angle = hb_max (-90.f, hb_min (axis_range->middle, 90.f));
|
||||
float italic_angle = hb_max (-90.0, hb_min (axis_range->middle, 90.0));
|
||||
if (post_prime->italicAngle.to_float () != italic_angle)
|
||||
post_prime->italicAngle.set_float (italic_angle);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -74,23 +74,6 @@
|
|||
* Indic shaper may want to disallow recomposing of two matras.
|
||||
*/
|
||||
|
||||
static bool
|
||||
decompose_unicode (const hb_ot_shape_normalize_context_t *c,
|
||||
hb_codepoint_t ab,
|
||||
hb_codepoint_t *a,
|
||||
hb_codepoint_t *b)
|
||||
{
|
||||
return (bool) c->unicode->decompose (ab, a, b);
|
||||
}
|
||||
|
||||
static bool
|
||||
compose_unicode (const hb_ot_shape_normalize_context_t *c,
|
||||
hb_codepoint_t a,
|
||||
hb_codepoint_t b,
|
||||
hb_codepoint_t *ab)
|
||||
{
|
||||
return (bool) c->unicode->compose (a, b, ab);
|
||||
}
|
||||
|
||||
static inline void
|
||||
set_glyph (hb_glyph_info_t &info, hb_font_t *font)
|
||||
|
|
@ -307,15 +290,14 @@ _hb_ot_shape_normalize (const hb_ot_shape_plan_t *plan,
|
|||
mode = HB_OT_SHAPE_NORMALIZATION_MODE_COMPOSED_DIACRITICS;
|
||||
}
|
||||
|
||||
const hb_ot_shape_normalize_context_t c = {
|
||||
hb_ot_shape_normalize_context_t c = {
|
||||
plan,
|
||||
buffer,
|
||||
font,
|
||||
buffer->unicode,
|
||||
buffer->not_found,
|
||||
plan->shaper->decompose ? plan->shaper->decompose : decompose_unicode,
|
||||
plan->shaper->compose ? plan->shaper->compose : compose_unicode
|
||||
};
|
||||
c.override_decompose_and_compose (plan->shaper->decompose, plan->shaper->compose);
|
||||
|
||||
bool always_short_circuit = mode == HB_OT_SHAPE_NORMALIZATION_MODE_NONE;
|
||||
bool might_short_circuit = always_short_circuit ||
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@
|
|||
#define HB_OT_SHAPE_NORMALIZE_HH
|
||||
|
||||
#include "hb.hh"
|
||||
#include "hb-unicode.hh"
|
||||
|
||||
|
||||
/* buffer var allocations, used during the normalization process */
|
||||
|
|
@ -52,6 +53,38 @@ HB_INTERNAL void _hb_ot_shape_normalize (const hb_ot_shape_plan_t *shaper,
|
|||
|
||||
struct hb_ot_shape_normalize_context_t
|
||||
{
|
||||
static bool
|
||||
decompose_unicode (const hb_ot_shape_normalize_context_t *c,
|
||||
hb_codepoint_t ab,
|
||||
hb_codepoint_t *a,
|
||||
hb_codepoint_t *b)
|
||||
{
|
||||
return (bool) c->unicode->decompose (ab, a, b);
|
||||
}
|
||||
|
||||
static bool
|
||||
compose_unicode (const hb_ot_shape_normalize_context_t *c,
|
||||
hb_codepoint_t a,
|
||||
hb_codepoint_t b,
|
||||
hb_codepoint_t *ab)
|
||||
{
|
||||
return (bool) c->unicode->compose (a, b, ab);
|
||||
}
|
||||
|
||||
void
|
||||
override_decompose_and_compose (bool (*decompose) (const hb_ot_shape_normalize_context_t *c,
|
||||
hb_codepoint_t ab,
|
||||
hb_codepoint_t *a,
|
||||
hb_codepoint_t *b),
|
||||
bool (*compose) (const hb_ot_shape_normalize_context_t *c,
|
||||
hb_codepoint_t a,
|
||||
hb_codepoint_t b,
|
||||
hb_codepoint_t *ab))
|
||||
{
|
||||
this->decompose = decompose ? decompose : decompose_unicode;
|
||||
this->compose = compose ? compose : compose_unicode;
|
||||
}
|
||||
|
||||
const hb_ot_shape_plan_t *plan;
|
||||
hb_buffer_t *buffer;
|
||||
hb_font_t *font;
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ hb_ot_shape_planner_t::hb_ot_shape_planner_t (hb_face_t *fac
|
|||
, apply_morx (_hb_apply_morx (face, props))
|
||||
#endif
|
||||
{
|
||||
shaper = hb_ot_shaper_categorize (this);
|
||||
shaper = hb_ot_shaper_categorize (props.script, props.direction, map.chosen_script[0]);
|
||||
|
||||
script_zero_marks = shaper->zero_width_marks != HB_OT_SHAPE_ZERO_WIDTH_MARKS_NONE;
|
||||
script_fallback_mark_positioning = shaper->fallback_position;
|
||||
|
|
@ -155,7 +155,7 @@ hb_ot_shape_planner_t::compile (hb_ot_shape_plan_t &plan,
|
|||
#endif
|
||||
bool has_gpos = !disable_gpos && hb_ot_layout_has_positioning (face);
|
||||
if (false)
|
||||
;
|
||||
{}
|
||||
#ifndef HB_NO_AAT_SHAPE
|
||||
/* Prefer GPOS over kerx if GSUB is present;
|
||||
* https://github.com/harfbuzz/harfbuzz/issues/3008 */
|
||||
|
|
@ -167,15 +167,16 @@ hb_ot_shape_planner_t::compile (hb_ot_shape_plan_t &plan,
|
|||
|
||||
if (!plan.apply_kerx && (!has_gpos_kern || !plan.apply_gpos))
|
||||
{
|
||||
if (false) {}
|
||||
#ifndef HB_NO_AAT_SHAPE
|
||||
if (has_kerx)
|
||||
else if (has_kerx)
|
||||
plan.apply_kerx = true;
|
||||
else
|
||||
#endif
|
||||
#ifndef HB_NO_OT_KERN
|
||||
if (hb_ot_layout_has_kerning (face))
|
||||
else if (hb_ot_layout_has_kerning (face))
|
||||
plan.apply_kern = true;
|
||||
#endif
|
||||
else {}
|
||||
}
|
||||
|
||||
plan.apply_fallback_kern = !(plan.apply_gpos || plan.apply_kerx || plan.apply_kern);
|
||||
|
|
|
|||
|
|
@ -560,9 +560,9 @@ apply_stch (const hb_ot_shape_plan_t *plan HB_UNUSED,
|
|||
|
||||
DEBUG_MSG (ARABIC, nullptr, "%s stretch at (%u,%u,%u)",
|
||||
step == MEASURE ? "measuring" : "cutting", context, start, end);
|
||||
DEBUG_MSG (ARABIC, nullptr, "rest of word: count=%u width %d", start - context, w_total);
|
||||
DEBUG_MSG (ARABIC, nullptr, "fixed tiles: count=%d width=%d", n_fixed, w_fixed);
|
||||
DEBUG_MSG (ARABIC, nullptr, "repeating tiles: count=%d width=%d", n_repeating, w_repeating);
|
||||
DEBUG_MSG (ARABIC, nullptr, "rest of word: count=%u width %" PRId32, start - context, w_total);
|
||||
DEBUG_MSG (ARABIC, nullptr, "fixed tiles: count=%d width=%" PRId32, n_fixed, w_fixed);
|
||||
DEBUG_MSG (ARABIC, nullptr, "repeating tiles: count=%d width=%" PRId32, n_repeating, w_repeating);
|
||||
|
||||
/* Number of additional times to repeat each repeating tile. */
|
||||
int n_copies = 0;
|
||||
|
|
@ -602,7 +602,7 @@ apply_stch (const hb_ot_shape_plan_t *plan HB_UNUSED,
|
|||
if (info[k - 1].arabic_shaping_action() == STCH_REPEATING)
|
||||
repeat += n_copies;
|
||||
|
||||
DEBUG_MSG (ARABIC, nullptr, "appending %u copies of glyph %u; j=%u",
|
||||
DEBUG_MSG (ARABIC, nullptr, "appending %u copies of glyph %" PRIu32 "; j=%u",
|
||||
repeat, info[k - 1].codepoint, j);
|
||||
pos[k - 1].x_advance = 0;
|
||||
for (unsigned int n = 0; n < repeat; n++)
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ compose_hebrew (const hb_ot_shape_normalize_context_t *c,
|
|||
return found;
|
||||
#endif
|
||||
|
||||
if (!found && !c->plan->has_gpos_mark)
|
||||
if (!found && (c->plan && !c->plan->has_gpos_mark))
|
||||
{
|
||||
/* Special-case Hebrew presentation forms that are excluded from
|
||||
* standard normalization, but wanted for old fonts. */
|
||||
|
|
|
|||
|
|
@ -174,9 +174,11 @@ HB_OT_SHAPERS_IMPLEMENT_SHAPERS
|
|||
|
||||
|
||||
static inline const hb_ot_shaper_t *
|
||||
hb_ot_shaper_categorize (const hb_ot_shape_planner_t *planner)
|
||||
hb_ot_shaper_categorize (hb_script_t script,
|
||||
hb_direction_t direction,
|
||||
hb_tag_t gsub_script)
|
||||
{
|
||||
switch ((hb_tag_t) planner->props.script)
|
||||
switch ((hb_tag_t) script)
|
||||
{
|
||||
default:
|
||||
return &_hb_ot_shaper_default;
|
||||
|
|
@ -192,9 +194,8 @@ hb_ot_shaper_categorize (const hb_ot_shape_planner_t *planner)
|
|||
* This is because we do fallback shaping for Arabic script (and not others).
|
||||
* But note that Arabic shaping is applicable only to horizontal layout; for
|
||||
* vertical text, just use the generic shaper instead. */
|
||||
if ((planner->map.chosen_script[0] != HB_OT_TAG_DEFAULT_SCRIPT ||
|
||||
planner->props.script == HB_SCRIPT_ARABIC) &&
|
||||
HB_DIRECTION_IS_HORIZONTAL(planner->props.direction))
|
||||
if ((gsub_script != HB_OT_TAG_DEFAULT_SCRIPT || script == HB_SCRIPT_ARABIC) &&
|
||||
HB_DIRECTION_IS_HORIZONTAL (direction))
|
||||
return &_hb_ot_shaper_arabic;
|
||||
else
|
||||
return &_hb_ot_shaper_default;
|
||||
|
|
@ -235,10 +236,10 @@ hb_ot_shaper_categorize (const hb_ot_shape_planner_t *planner)
|
|||
* Otherwise, use the specific shaper.
|
||||
*
|
||||
* If it's indy3 tag, send to USE. */
|
||||
if (planner->map.chosen_script[0] == HB_TAG ('D','F','L','T') ||
|
||||
planner->map.chosen_script[0] == HB_TAG ('l','a','t','n'))
|
||||
if (gsub_script == HB_TAG ('D','F','L','T') ||
|
||||
gsub_script == HB_TAG ('l','a','t','n'))
|
||||
return &_hb_ot_shaper_default;
|
||||
else if ((planner->map.chosen_script[0] & 0x000000FF) == '3')
|
||||
else if ((gsub_script & 0x000000FF) == '3')
|
||||
return &_hb_ot_shaper_use;
|
||||
else
|
||||
return &_hb_ot_shaper_indic;
|
||||
|
|
@ -254,9 +255,9 @@ hb_ot_shaper_categorize (const hb_ot_shape_planner_t *planner)
|
|||
* If designer designed for 'mymr' tag, also send to default
|
||||
* shaper. That's tag used from before Myanmar shaping spec
|
||||
* was developed. The shaping spec uses 'mym2' tag. */
|
||||
if (planner->map.chosen_script[0] == HB_TAG ('D','F','L','T') ||
|
||||
planner->map.chosen_script[0] == HB_TAG ('l','a','t','n') ||
|
||||
planner->map.chosen_script[0] == HB_TAG ('m','y','m','r'))
|
||||
if (gsub_script == HB_TAG ('D','F','L','T') ||
|
||||
gsub_script == HB_TAG ('l','a','t','n') ||
|
||||
gsub_script == HB_TAG ('m','y','m','r'))
|
||||
return &_hb_ot_shaper_default;
|
||||
else
|
||||
return &_hb_ot_shaper_myanmar;
|
||||
|
|
@ -391,8 +392,8 @@ hb_ot_shaper_categorize (const hb_ot_shape_planner_t *planner)
|
|||
* Otherwise, use the specific shaper.
|
||||
* Note that for some simple scripts, there may not be *any*
|
||||
* GSUB/GPOS needed, so there may be no scripts found! */
|
||||
if (planner->map.chosen_script[0] == HB_TAG ('D','F','L','T') ||
|
||||
planner->map.chosen_script[0] == HB_TAG ('l','a','t','n'))
|
||||
if (gsub_script == HB_TAG ('D','F','L','T') ||
|
||||
gsub_script == HB_TAG ('l','a','t','n'))
|
||||
return &_hb_ot_shaper_default;
|
||||
else
|
||||
return &_hb_ot_shaper_use;
|
||||
|
|
|
|||
|
|
@ -63,8 +63,9 @@ static bool axis_value_is_outside_axis_range (hb_tag_t axis_tag, float axis_valu
|
|||
if (!user_axes_location->has (axis_tag))
|
||||
return false;
|
||||
|
||||
double axis_value_double = static_cast<double>(axis_value);
|
||||
Triple axis_range = user_axes_location->get (axis_tag);
|
||||
return (axis_value < axis_range.minimum || axis_value > axis_range.maximum);
|
||||
return (axis_value_double < axis_range.minimum || axis_value_double > axis_range.maximum);
|
||||
}
|
||||
|
||||
struct StatAxisRecord
|
||||
|
|
@ -349,7 +350,7 @@ struct AxisValueFormat4
|
|||
|
||||
struct AxisValue
|
||||
{
|
||||
bool get_value (unsigned int axis_index) const
|
||||
float get_value (unsigned int axis_index) const
|
||||
{
|
||||
switch (u.format)
|
||||
{
|
||||
|
|
@ -357,7 +358,7 @@ struct AxisValue
|
|||
case 2: return u.format2.get_value ();
|
||||
case 3: return u.format3.get_value ();
|
||||
case 4: return u.format4.get_axis_record (axis_index).get_value ();
|
||||
default:return 0;
|
||||
default:return 0.f;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -485,7 +486,7 @@ struct STAT
|
|||
hb_array_t<const Offset16To<AxisValue>> axis_values = get_axis_value_offsets ();
|
||||
for (unsigned int i = 0; i < axis_values.length; i++)
|
||||
{
|
||||
const AxisValue& axis_value = this+axis_values[i];
|
||||
const AxisValue& axis_value = this+offsetToAxisValueOffsets+axis_values[i];
|
||||
if (axis_value.get_axis_index () == axis_index)
|
||||
{
|
||||
if (value)
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@
|
|||
*
|
||||
* on files with these headers:
|
||||
*
|
||||
* <meta name="updated_at" content="2022-09-30 11:47 PM" />
|
||||
* File-Date: 2023-08-02
|
||||
* <meta name="updated_at" content="2024-05-31 05:41 PM" />
|
||||
* File-Date: 2024-05-16
|
||||
*/
|
||||
|
||||
#ifndef HB_OT_TAG_TABLE_HH
|
||||
|
|
@ -26,12 +26,12 @@ static const LangTag ot_languages2[] = {
|
|||
{HB_TAG('a','y',' ',' '), HB_TAG('A','Y','M',' ')}, /* Aymara [macrolanguage] */
|
||||
{HB_TAG('a','z',' ',' '), HB_TAG('A','Z','E',' ')}, /* Azerbaijani [macrolanguage] */
|
||||
{HB_TAG('b','a',' ',' '), HB_TAG('B','S','H',' ')}, /* Bashkir */
|
||||
{HB_TAG('b','e',' ',' '), HB_TAG('B','E','L',' ')}, /* Belarusian -> Belarussian */
|
||||
{HB_TAG('b','e',' ',' '), HB_TAG('B','E','L',' ')}, /* Belarusian */
|
||||
{HB_TAG('b','g',' ',' '), HB_TAG('B','G','R',' ')}, /* Bulgarian */
|
||||
{HB_TAG('b','i',' ',' '), HB_TAG('B','I','S',' ')}, /* Bislama */
|
||||
{HB_TAG('b','i',' ',' '), HB_TAG('C','P','P',' ')}, /* Bislama -> Creoles */
|
||||
{HB_TAG('b','m',' ',' '), HB_TAG('B','M','B',' ')}, /* Bambara (Bamanankan) */
|
||||
{HB_TAG('b','n',' ',' '), HB_TAG('B','E','N',' ')}, /* Bengali */
|
||||
{HB_TAG('b','n',' ',' '), HB_TAG('B','E','N',' ')}, /* Bangla */
|
||||
{HB_TAG('b','o',' ',' '), HB_TAG('T','I','B',' ')}, /* Tibetan */
|
||||
{HB_TAG('b','r',' ',' '), HB_TAG('B','R','E',' ')}, /* Breton */
|
||||
{HB_TAG('b','s',' ',' '), HB_TAG('B','O','S',' ')}, /* Bosnian */
|
||||
|
|
@ -64,7 +64,8 @@ static const LangTag ot_languages2[] = {
|
|||
{HB_TAG('f','r',' ',' '), HB_TAG('F','R','A',' ')}, /* French */
|
||||
{HB_TAG('f','y',' ',' '), HB_TAG('F','R','I',' ')}, /* Western Frisian -> Frisian */
|
||||
{HB_TAG('g','a',' ',' '), HB_TAG('I','R','I',' ')}, /* Irish */
|
||||
{HB_TAG('g','d',' ',' '), HB_TAG('G','A','E',' ')}, /* Scottish Gaelic (Gaelic) */
|
||||
{HB_TAG('g','a',' ',' '), HB_TAG('I','R','T',' ')}, /* Irish -> Irish Traditional */
|
||||
{HB_TAG('g','d',' ',' '), HB_TAG('G','A','E',' ')}, /* Scottish Gaelic */
|
||||
{HB_TAG('g','l',' ',' '), HB_TAG('G','A','L',' ')}, /* Galician */
|
||||
{HB_TAG('g','n',' ',' '), HB_TAG('G','U','A',' ')}, /* Guarani [macrolanguage] */
|
||||
{HB_TAG('g','u',' ',' '), HB_TAG('G','U','J',' ')}, /* Gujarati */
|
||||
|
|
@ -153,7 +154,7 @@ static const LangTag ot_languages2[] = {
|
|||
{HB_TAG('o','c',' ',' '), HB_TAG('O','C','I',' ')}, /* Occitan (post 1500) */
|
||||
{HB_TAG('o','j',' ',' '), HB_TAG('O','J','B',' ')}, /* Ojibwa [macrolanguage] -> Ojibway */
|
||||
{HB_TAG('o','m',' ',' '), HB_TAG('O','R','O',' ')}, /* Oromo [macrolanguage] */
|
||||
{HB_TAG('o','r',' ',' '), HB_TAG('O','R','I',' ')}, /* Odia (formerly Oriya) [macrolanguage] */
|
||||
{HB_TAG('o','r',' ',' '), HB_TAG('O','R','I',' ')}, /* Odia [macrolanguage] */
|
||||
{HB_TAG('o','s',' ',' '), HB_TAG('O','S','S',' ')}, /* Ossetian */
|
||||
{HB_TAG('p','a',' ',' '), HB_TAG('P','A','N',' ')}, /* Punjabi */
|
||||
{HB_TAG('p','i',' ',' '), HB_TAG('P','A','L',' ')}, /* Pali */
|
||||
|
|
@ -166,7 +167,7 @@ static const LangTag ot_languages2[] = {
|
|||
{HB_TAG('r','o',' ',' '), HB_TAG('R','O','M',' ')}, /* Romanian */
|
||||
{HB_TAG('r','u',' ',' '), HB_TAG('R','U','S',' ')}, /* Russian */
|
||||
{HB_TAG('r','w',' ',' '), HB_TAG('R','U','A',' ')}, /* Kinyarwanda */
|
||||
{HB_TAG('s','a',' ',' '), HB_TAG('S','A','N',' ')}, /* Sanskrit */
|
||||
{HB_TAG('s','a',' ',' '), HB_TAG('S','A','N',' ')}, /* Sanskrit [macrolanguage] */
|
||||
{HB_TAG('s','c',' ',' '), HB_TAG('S','R','D',' ')}, /* Sardinian [macrolanguage] */
|
||||
{HB_TAG('s','d',' ',' '), HB_TAG('S','N','D',' ')}, /* Sindhi */
|
||||
{HB_TAG('s','e',' ',' '), HB_TAG('N','S','M',' ')}, /* Northern Sami */
|
||||
|
|
@ -223,6 +224,7 @@ static const LangTag ot_languages2[] = {
|
|||
static const LangTag ot_languages3[] = {
|
||||
{HB_TAG('a','a','e',' '), HB_TAG('S','Q','I',' ')}, /* Arbëreshë Albanian -> Albanian */
|
||||
{HB_TAG('a','a','o',' '), HB_TAG('A','R','A',' ')}, /* Algerian Saharan Arabic -> Arabic */
|
||||
/*{HB_TAG('a','a','q',' '), HB_TAG('A','A','Q',' ')},*/ /* Eastern Abnaki -> Eastern Abenaki */
|
||||
{HB_TAG('a','a','t',' '), HB_TAG('S','Q','I',' ')}, /* Arvanitika Albanian -> Albanian */
|
||||
{HB_TAG('a','b','a',' '), HB_TAG_NONE }, /* Abé != Abaza */
|
||||
{HB_TAG('a','b','h',' '), HB_TAG('A','R','A',' ')}, /* Tajiki Arabic -> Arabic */
|
||||
|
|
@ -238,6 +240,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('a','c','r',' '), HB_TAG('M','Y','N',' ')}, /* Achi -> Mayan */
|
||||
{HB_TAG('a','c','w',' '), HB_TAG('A','R','A',' ')}, /* Hijazi Arabic -> Arabic */
|
||||
{HB_TAG('a','c','x',' '), HB_TAG('A','R','A',' ')}, /* Omani Arabic -> Arabic */
|
||||
{HB_TAG('a','c','y',' '), HB_TAG('A','C','Y',' ')}, /* Cypriot Arabic */
|
||||
{HB_TAG('a','c','y',' '), HB_TAG('A','R','A',' ')}, /* Cypriot Arabic -> Arabic */
|
||||
{HB_TAG('a','d','a',' '), HB_TAG('D','N','G',' ')}, /* Adangme -> Dangme */
|
||||
{HB_TAG('a','d','f',' '), HB_TAG('A','R','A',' ')}, /* Dhofari Arabic -> Arabic */
|
||||
|
|
@ -288,6 +291,7 @@ static const LangTag ot_languages3[] = {
|
|||
/*{HB_TAG('a','s','t',' '), HB_TAG('A','S','T',' ')},*/ /* Asturian */
|
||||
/*{HB_TAG('a','t','h',' '), HB_TAG('A','T','H',' ')},*/ /* Athapascan [collection] -> Athapaskan */
|
||||
{HB_TAG('a','t','j',' '), HB_TAG('R','C','R',' ')}, /* Atikamekw -> R-Cree */
|
||||
/*{HB_TAG('a','t','s',' '), HB_TAG('A','T','S',' ')},*/ /* Gros Ventre (Atsina) */
|
||||
{HB_TAG('a','t','v',' '), HB_TAG('A','L','T',' ')}, /* Northern Altai -> Altai */
|
||||
{HB_TAG('a','u','j',' '), HB_TAG('B','B','R',' ')}, /* Awjilah -> Berber */
|
||||
{HB_TAG('a','u','z',' '), HB_TAG('A','R','A',' ')}, /* Uzbeki Arabic -> Arabic */
|
||||
|
|
@ -326,6 +330,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('b','c','l',' '), HB_TAG('B','I','K',' ')}, /* Central Bikol -> Bikol */
|
||||
{HB_TAG('b','c','q',' '), HB_TAG('B','C','H',' ')}, /* Bench */
|
||||
{HB_TAG('b','c','r',' '), HB_TAG('A','T','H',' ')}, /* Babine -> Athapaskan */
|
||||
/*{HB_TAG('b','d','c',' '), HB_TAG('B','D','C',' ')},*/ /* Emberá-Baudó */
|
||||
/*{HB_TAG('b','d','y',' '), HB_TAG('B','D','Y',' ')},*/ /* Bandjalang */
|
||||
{HB_TAG('b','e','a',' '), HB_TAG('A','T','H',' ')}, /* Beaver -> Athapaskan */
|
||||
{HB_TAG('b','e','b',' '), HB_TAG('B','T','I',' ')}, /* Bebele -> Beti */
|
||||
|
|
@ -421,6 +426,8 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('c','a','f',' '), HB_TAG('A','T','H',' ')}, /* Southern Carrier -> Athapaskan */
|
||||
{HB_TAG('c','a','k',' '), HB_TAG('C','A','K',' ')}, /* Kaqchikel */
|
||||
{HB_TAG('c','a','k',' '), HB_TAG('M','Y','N',' ')}, /* Kaqchikel -> Mayan */
|
||||
/*{HB_TAG('c','a','y',' '), HB_TAG('C','A','Y',' ')},*/ /* Cayuga */
|
||||
/*{HB_TAG('c','b','g',' '), HB_TAG('C','B','G',' ')},*/ /* Chimila */
|
||||
{HB_TAG('c','b','k',' '), HB_TAG('C','B','K',' ')}, /* Chavacano -> Zamboanga Chavacano */
|
||||
{HB_TAG('c','b','k',' '), HB_TAG('C','P','P',' ')}, /* Chavacano -> Creoles */
|
||||
{HB_TAG('c','b','l',' '), HB_TAG('Q','I','N',' ')}, /* Bualkhaw Chin -> Chin */
|
||||
|
|
@ -465,7 +472,9 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('c','l','d',' '), HB_TAG('S','Y','R',' ')}, /* Chaldean Neo-Aramaic -> Syriac */
|
||||
{HB_TAG('c','l','e',' '), HB_TAG('C','C','H','N')}, /* Lealao Chinantec -> Chinantec */
|
||||
{HB_TAG('c','l','j',' '), HB_TAG('Q','I','N',' ')}, /* Laitu Chin -> Chin */
|
||||
{HB_TAG('c','l','s',' '), HB_TAG('S','A','N',' ')}, /* Classical Sanskrit -> Sanskrit */
|
||||
{HB_TAG('c','l','t',' '), HB_TAG('Q','I','N',' ')}, /* Lautu Chin -> Chin */
|
||||
/*{HB_TAG('c','m','i',' '), HB_TAG('C','M','I',' ')},*/ /* Emberá-Chamí */
|
||||
{HB_TAG('c','m','n',' '), HB_TAG('Z','H','S',' ')}, /* Mandarin Chinese -> Chinese, Simplified */
|
||||
{HB_TAG('c','m','r',' '), HB_TAG('Q','I','N',' ')}, /* Mro-Khimi Chin -> Chin */
|
||||
{HB_TAG('c','n','b',' '), HB_TAG('Q','I','N',' ')}, /* Chinbon Chin -> Chin */
|
||||
|
|
@ -479,6 +488,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('c','n','w',' '), HB_TAG('Q','I','N',' ')}, /* Ngawn Chin -> Chin */
|
||||
{HB_TAG('c','o','a',' '), HB_TAG('M','L','Y',' ')}, /* Cocos Islands Malay -> Malay */
|
||||
{HB_TAG('c','o','b',' '), HB_TAG('M','Y','N',' ')}, /* Chicomuceltec -> Mayan */
|
||||
/*{HB_TAG('c','o','o',' '), HB_TAG('C','O','O',' ')},*/ /* Comox */
|
||||
/*{HB_TAG('c','o','p',' '), HB_TAG('C','O','P',' ')},*/ /* Coptic */
|
||||
{HB_TAG('c','o','q',' '), HB_TAG('A','T','H',' ')}, /* Coquille -> Athapaskan */
|
||||
{HB_TAG('c','p','a',' '), HB_TAG('C','C','H','N')}, /* Palantla Chinantec -> Chinantec */
|
||||
|
|
@ -528,6 +538,7 @@ static const LangTag ot_languages3[] = {
|
|||
/*{HB_TAG('c','t','g',' '), HB_TAG('C','T','G',' ')},*/ /* Chittagonian */
|
||||
{HB_TAG('c','t','h',' '), HB_TAG('Q','I','N',' ')}, /* Thaiphum Chin -> Chin */
|
||||
{HB_TAG('c','t','l',' '), HB_TAG('C','C','H','N')}, /* Tlacoatzintepec Chinantec -> Chinantec */
|
||||
/*{HB_TAG('c','t','o',' '), HB_TAG('C','T','O',' ')},*/ /* Emberá-Catío */
|
||||
{HB_TAG('c','t','s',' '), HB_TAG('B','I','K',' ')}, /* Northern Catanduanes Bikol -> Bikol */
|
||||
/*{HB_TAG('c','t','t',' '), HB_TAG('C','T','T',' ')},*/ /* Wayanad Chetti */
|
||||
{HB_TAG('c','t','u',' '), HB_TAG('M','Y','N',' ')}, /* Chol -> Mayan */
|
||||
|
|
@ -551,7 +562,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('d','e','p',' '), HB_TAG('C','P','P',' ')}, /* Pidgin Delaware -> Creoles */
|
||||
{HB_TAG('d','g','o',' '), HB_TAG('D','G','O',' ')}, /* Dogri (individual language) */
|
||||
{HB_TAG('d','g','o',' '), HB_TAG('D','G','R',' ')}, /* Dogri (macrolanguage) */
|
||||
{HB_TAG('d','g','r',' '), HB_TAG('A','T','H',' ')}, /* Dogrib -> Athapaskan */
|
||||
{HB_TAG('d','g','r',' '), HB_TAG('A','T','H',' ')}, /* Tlicho -> Athapaskan */
|
||||
{HB_TAG('d','h','d',' '), HB_TAG('M','A','W',' ')}, /* Dhundari -> Marwari */
|
||||
/*{HB_TAG('d','h','g',' '), HB_TAG('D','H','G',' ')},*/ /* Dhangu */
|
||||
{HB_TAG('d','h','v',' '), HB_TAG_NONE }, /* Dehu != Divehi (Dhivehi, Maldivian) (deprecated) */
|
||||
|
|
@ -590,6 +601,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('e','k','y',' '), HB_TAG('K','R','N',' ')}, /* Eastern Kayah -> Karen */
|
||||
{HB_TAG('e','m','k',' '), HB_TAG('E','M','K',' ')}, /* Eastern Maninkakan */
|
||||
{HB_TAG('e','m','k',' '), HB_TAG('M','N','K',' ')}, /* Eastern Maninkakan -> Maninka */
|
||||
/*{HB_TAG('e','m','p',' '), HB_TAG('E','M','P',' ')},*/ /* Northern Emberá */
|
||||
{HB_TAG('e','m','y',' '), HB_TAG('M','Y','N',' ')}, /* Epigraphic Mayan -> Mayan */
|
||||
{HB_TAG('e','n','b',' '), HB_TAG('K','A','L',' ')}, /* Markweeta -> Kalenjin */
|
||||
{HB_TAG('e','n','f',' '), HB_TAG('F','N','E',' ')}, /* Forest Enets */
|
||||
|
|
@ -637,7 +649,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('g','a','a',' '), HB_TAG('G','A','D',' ')}, /* Ga */
|
||||
{HB_TAG('g','a','c',' '), HB_TAG('C','P','P',' ')}, /* Mixed Great Andamanese -> Creoles */
|
||||
{HB_TAG('g','a','d',' '), HB_TAG_NONE }, /* Gaddang != Ga */
|
||||
{HB_TAG('g','a','e',' '), HB_TAG_NONE }, /* Guarequena != Scottish Gaelic (Gaelic) */
|
||||
{HB_TAG('g','a','e',' '), HB_TAG_NONE }, /* Guarequena != Scottish Gaelic */
|
||||
/*{HB_TAG('g','a','g',' '), HB_TAG('G','A','G',' ')},*/ /* Gagauz */
|
||||
{HB_TAG('g','a','l',' '), HB_TAG_NONE }, /* Galolen != Galician */
|
||||
{HB_TAG('g','a','n',' '), HB_TAG('Z','H','S',' ')}, /* Gan Chinese -> Chinese, Simplified */
|
||||
|
|
@ -654,6 +666,7 @@ static const LangTag ot_languages3[] = {
|
|||
/*{HB_TAG('g','e','z',' '), HB_TAG('G','E','Z',' ')},*/ /* Geez */
|
||||
{HB_TAG('g','g','o',' '), HB_TAG('G','O','N',' ')}, /* Southern Gondi (retired code) -> Gondi */
|
||||
{HB_TAG('g','h','a',' '), HB_TAG('B','B','R',' ')}, /* Ghadamès -> Berber */
|
||||
{HB_TAG('g','h','c',' '), HB_TAG('I','R','T',' ')}, /* Hiberno-Scottish Gaelic -> Irish Traditional */
|
||||
{HB_TAG('g','h','k',' '), HB_TAG('K','R','N',' ')}, /* Geko Karen -> Karen */
|
||||
{HB_TAG('g','h','o',' '), HB_TAG('B','B','R',' ')}, /* Ghomara -> Berber */
|
||||
{HB_TAG('g','i','b',' '), HB_TAG('C','P','P',' ')}, /* Gibanawa -> Creoles */
|
||||
|
|
@ -743,6 +756,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('h','s','n',' '), HB_TAG('Z','H','S',' ')}, /* Xiang Chinese -> Chinese, Simplified */
|
||||
{HB_TAG('h','u','j',' '), HB_TAG('H','M','N',' ')}, /* Northern Guiyang Hmong -> Hmong */
|
||||
{HB_TAG('h','u','p',' '), HB_TAG('A','T','H',' ')}, /* Hupa -> Athapaskan */
|
||||
/*{HB_TAG('h','u','r',' '), HB_TAG('H','U','R',' ')},*/ /* Halkomelem */
|
||||
{HB_TAG('h','u','s',' '), HB_TAG('M','Y','N',' ')}, /* Huastec -> Mayan */
|
||||
{HB_TAG('h','w','c',' '), HB_TAG('C','P','P',' ')}, /* Hawai'i Creole English -> Creoles */
|
||||
{HB_TAG('h','y','w',' '), HB_TAG('H','Y','E',' ')}, /* Western Armenian -> Armenian */
|
||||
|
|
@ -780,6 +794,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('j','b','n',' '), HB_TAG('B','B','R',' ')}, /* Nafusi -> Berber */
|
||||
/*{HB_TAG('j','b','o',' '), HB_TAG('J','B','O',' ')},*/ /* Lojban */
|
||||
/*{HB_TAG('j','c','t',' '), HB_TAG('J','C','T',' ')},*/ /* Krymchak */
|
||||
/*{HB_TAG('j','d','t',' '), HB_TAG('J','D','T',' ')},*/ /* Judeo-Tat */
|
||||
{HB_TAG('j','g','o',' '), HB_TAG('B','M','L',' ')}, /* Ngomba -> Bamileke */
|
||||
{HB_TAG('j','i','i',' '), HB_TAG_NONE }, /* Jiiddu != Yiddish */
|
||||
{HB_TAG('j','k','m',' '), HB_TAG('K','R','N',' ')}, /* Mobwa Karen -> Karen */
|
||||
|
|
@ -794,6 +809,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('k','a','m',' '), HB_TAG('K','M','B',' ')}, /* Kamba (Kenya) */
|
||||
{HB_TAG('k','a','r',' '), HB_TAG('K','R','N',' ')}, /* Karen [collection] */
|
||||
/*{HB_TAG('k','a','w',' '), HB_TAG('K','A','W',' ')},*/ /* Kawi (Old Javanese) */
|
||||
/*{HB_TAG('k','b','c',' '), HB_TAG('K','B','C',' ')},*/ /* Kadiwéu */
|
||||
{HB_TAG('k','b','d',' '), HB_TAG('K','A','B',' ')}, /* Kabardian */
|
||||
{HB_TAG('k','b','y',' '), HB_TAG('K','N','R',' ')}, /* Manga Kanuri -> Kanuri */
|
||||
{HB_TAG('k','c','a',' '), HB_TAG('K','H','K',' ')}, /* Khanty -> Khanty-Kazim */
|
||||
|
|
@ -829,6 +845,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('k','j','b',' '), HB_TAG('M','Y','N',' ')}, /* Q'anjob'al -> Mayan */
|
||||
/*{HB_TAG('k','j','d',' '), HB_TAG('K','J','D',' ')},*/ /* Southern Kiwai */
|
||||
{HB_TAG('k','j','h',' '), HB_TAG('K','H','A',' ')}, /* Khakas -> Khakass */
|
||||
/*{HB_TAG('k','j','j',' '), HB_TAG('K','J','J',' ')},*/ /* Khinalugh -> Khinalug */
|
||||
{HB_TAG('k','j','p',' '), HB_TAG('K','J','P',' ')}, /* Pwo Eastern Karen -> Eastern Pwo Karen */
|
||||
{HB_TAG('k','j','p',' '), HB_TAG('K','R','N',' ')}, /* Pwo Eastern Karen -> Karen */
|
||||
{HB_TAG('k','j','t',' '), HB_TAG('K','R','N',' ')}, /* Phrae Pwo Karen -> Karen */
|
||||
|
|
@ -930,6 +947,7 @@ static const LangTag ot_languages3[] = {
|
|||
/*{HB_TAG('l','i','j',' '), HB_TAG('L','I','J',' ')},*/ /* Ligurian */
|
||||
{HB_TAG('l','i','r',' '), HB_TAG('C','P','P',' ')}, /* Liberian English -> Creoles */
|
||||
/*{HB_TAG('l','i','s',' '), HB_TAG('L','I','S',' ')},*/ /* Lisu */
|
||||
/*{HB_TAG('l','i','v',' '), HB_TAG('L','I','V',' ')},*/ /* Liv */
|
||||
{HB_TAG('l','i','w',' '), HB_TAG('M','L','Y',' ')}, /* Col -> Malay */
|
||||
{HB_TAG('l','i','y',' '), HB_TAG('B','A','D','0')}, /* Banda-Bambari -> Banda */
|
||||
/*{HB_TAG('l','j','p',' '), HB_TAG('L','J','P',' ')},*/ /* Lampung Api -> Lampung */
|
||||
|
|
@ -995,12 +1013,14 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('m','e','n',' '), HB_TAG('M','D','E',' ')}, /* Mende (Sierra Leone) */
|
||||
{HB_TAG('m','e','o',' '), HB_TAG('M','L','Y',' ')}, /* Kedah Malay -> Malay */
|
||||
/*{HB_TAG('m','e','r',' '), HB_TAG('M','E','R',' ')},*/ /* Meru */
|
||||
/*{HB_TAG('m','e','v',' '), HB_TAG('M','E','V',' ')},*/ /* Mano */
|
||||
{HB_TAG('m','f','a',' '), HB_TAG('M','F','A',' ')}, /* Pattani Malay */
|
||||
{HB_TAG('m','f','a',' '), HB_TAG('M','L','Y',' ')}, /* Pattani Malay -> Malay */
|
||||
{HB_TAG('m','f','b',' '), HB_TAG('M','L','Y',' ')}, /* Bangka -> Malay */
|
||||
{HB_TAG('m','f','e',' '), HB_TAG('M','F','E',' ')}, /* Morisyen */
|
||||
{HB_TAG('m','f','e',' '), HB_TAG('C','P','P',' ')}, /* Morisyen -> Creoles */
|
||||
{HB_TAG('m','f','p',' '), HB_TAG('C','P','P',' ')}, /* Makassar Malay -> Creoles */
|
||||
{HB_TAG('m','g','a',' '), HB_TAG('S','G','A',' ')}, /* Middle Irish (900-1200) -> Old Irish */
|
||||
{HB_TAG('m','h','c',' '), HB_TAG('M','Y','N',' ')}, /* Mocho -> Mayan */
|
||||
{HB_TAG('m','h','r',' '), HB_TAG('L','M','A',' ')}, /* Eastern Mari -> Low Mari */
|
||||
{HB_TAG('m','h','v',' '), HB_TAG('A','R','K',' ')}, /* Arakanese (retired code) -> Rakhine */
|
||||
|
|
@ -1153,6 +1173,8 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('o','k','i',' '), HB_TAG('K','A','L',' ')}, /* Okiek -> Kalenjin */
|
||||
{HB_TAG('o','k','m',' '), HB_TAG('K','O','H',' ')}, /* Middle Korean (10th-16th cent.) -> Korean Old Hangul */
|
||||
{HB_TAG('o','k','r',' '), HB_TAG('I','J','O',' ')}, /* Kirike -> Ijo */
|
||||
/*{HB_TAG('o','n','e',' '), HB_TAG('O','N','E',' ')},*/ /* Oneida */
|
||||
/*{HB_TAG('o','n','o',' '), HB_TAG('O','N','O',' ')},*/ /* Onondaga */
|
||||
{HB_TAG('o','n','x',' '), HB_TAG('C','P','P',' ')}, /* Onin Based Pidgin -> Creoles */
|
||||
{HB_TAG('o','o','r',' '), HB_TAG('C','P','P',' ')}, /* Oorlams -> Creoles */
|
||||
{HB_TAG('o','r','c',' '), HB_TAG('O','R','O',' ')}, /* Orma -> Oromo */
|
||||
|
|
@ -1160,7 +1182,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('o','r','o',' '), HB_TAG_NONE }, /* Orokolo != Oromo */
|
||||
{HB_TAG('o','r','r',' '), HB_TAG('I','J','O',' ')}, /* Oruma -> Ijo */
|
||||
{HB_TAG('o','r','s',' '), HB_TAG('M','L','Y',' ')}, /* Orang Seletar -> Malay */
|
||||
{HB_TAG('o','r','y',' '), HB_TAG('O','R','I',' ')}, /* Odia (formerly Oriya) */
|
||||
{HB_TAG('o','r','y',' '), HB_TAG('O','R','I',' ')}, /* Odia */
|
||||
{HB_TAG('o','t','w',' '), HB_TAG('O','J','B',' ')}, /* Ottawa -> Ojibway */
|
||||
{HB_TAG('o','u','a',' '), HB_TAG('B','B','R',' ')}, /* Tagargrent -> Berber */
|
||||
{HB_TAG('p','a','a',' '), HB_TAG_NONE }, /* Papuan [collection] != Palestinian Aramaic */
|
||||
|
|
@ -1193,7 +1215,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('p','i','s',' '), HB_TAG('C','P','P',' ')}, /* Pijin -> Creoles */
|
||||
{HB_TAG('p','k','h',' '), HB_TAG('Q','I','N',' ')}, /* Pankhu -> Chin */
|
||||
{HB_TAG('p','k','o',' '), HB_TAG('K','A','L',' ')}, /* Pökoot -> Kalenjin */
|
||||
{HB_TAG('p','l','g',' '), HB_TAG_NONE }, /* Pilagá != Palaung */
|
||||
{HB_TAG('p','l','g',' '), HB_TAG('P','L','G','0')}, /* Pilagá */
|
||||
{HB_TAG('p','l','k',' '), HB_TAG_NONE }, /* Kohistani Shina != Polish */
|
||||
{HB_TAG('p','l','l',' '), HB_TAG('P','L','G',' ')}, /* Shwe Palaung -> Palaung */
|
||||
{HB_TAG('p','l','n',' '), HB_TAG('C','P','P',' ')}, /* Palenquero -> Creoles */
|
||||
|
|
@ -1353,6 +1375,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('s','d','h',' '), HB_TAG('K','U','R',' ')}, /* Southern Kurdish -> Kurdish */
|
||||
{HB_TAG('s','d','n',' '), HB_TAG('S','R','D',' ')}, /* Gallurese Sardinian -> Sardinian */
|
||||
{HB_TAG('s','d','s',' '), HB_TAG('B','B','R',' ')}, /* Sened -> Berber */
|
||||
/*{HB_TAG('s','e','e',' '), HB_TAG('S','E','E',' ')},*/ /* Seneca */
|
||||
{HB_TAG('s','e','h',' '), HB_TAG('S','N','A',' ')}, /* Sena */
|
||||
{HB_TAG('s','e','k',' '), HB_TAG('A','T','H',' ')}, /* Sekani -> Athapaskan */
|
||||
/*{HB_TAG('s','e','l',' '), HB_TAG('S','E','L',' ')},*/ /* Selkup */
|
||||
|
|
@ -1374,6 +1397,7 @@ static const LangTag ot_languages3[] = {
|
|||
/*{HB_TAG('s','i','d',' '), HB_TAG('S','I','D',' ')},*/ /* Sidamo */
|
||||
{HB_TAG('s','i','g',' '), HB_TAG_NONE }, /* Paasaal != Silte Gurage */
|
||||
{HB_TAG('s','i','z',' '), HB_TAG('B','B','R',' ')}, /* Siwi -> Berber */
|
||||
/*{HB_TAG('s','j','a',' '), HB_TAG('S','J','A',' ')},*/ /* Epena */
|
||||
{HB_TAG('s','j','d',' '), HB_TAG('K','S','M',' ')}, /* Kildin Sami */
|
||||
{HB_TAG('s','j','o',' '), HB_TAG('S','I','B',' ')}, /* Xibe -> Sibe */
|
||||
{HB_TAG('s','j','s',' '), HB_TAG('B','B','R',' ')}, /* Senhaja De Srair -> Berber */
|
||||
|
|
@ -1395,7 +1419,7 @@ static const LangTag ot_languages3[] = {
|
|||
/*{HB_TAG('s','n','k',' '), HB_TAG('S','N','K',' ')},*/ /* Soninke */
|
||||
{HB_TAG('s','o','g',' '), HB_TAG_NONE }, /* Sogdian != Sodo Gurage */
|
||||
/*{HB_TAG('s','o','p',' '), HB_TAG('S','O','P',' ')},*/ /* Songe */
|
||||
{HB_TAG('s','p','v',' '), HB_TAG('O','R','I',' ')}, /* Sambalpuri -> Odia (formerly Oriya) */
|
||||
{HB_TAG('s','p','v',' '), HB_TAG('O','R','I',' ')}, /* Sambalpuri -> Odia */
|
||||
{HB_TAG('s','p','y',' '), HB_TAG('K','A','L',' ')}, /* Sabaot -> Kalenjin */
|
||||
{HB_TAG('s','r','b',' '), HB_TAG_NONE }, /* Sora != Serbian */
|
||||
{HB_TAG('s','r','c',' '), HB_TAG('S','R','D',' ')}, /* Logudorese Sardinian -> Sardinian */
|
||||
|
|
@ -1410,6 +1434,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('s','s','m',' '), HB_TAG_NONE }, /* Semnam != Southern Sami */
|
||||
{HB_TAG('s','t','a',' '), HB_TAG('C','P','P',' ')}, /* Settla -> Creoles */
|
||||
/*{HB_TAG('s','t','q',' '), HB_TAG('S','T','Q',' ')},*/ /* Saterfriesisch -> Saterland Frisian */
|
||||
/*{HB_TAG('s','t','r',' '), HB_TAG('S','T','R',' ')},*/ /* Straits Salish */
|
||||
{HB_TAG('s','t','v',' '), HB_TAG('S','I','G',' ')}, /* Silt'e -> Silte Gurage */
|
||||
/*{HB_TAG('s','u','k',' '), HB_TAG('S','U','K',' ')},*/ /* Sukuma */
|
||||
{HB_TAG('s','u','q',' '), HB_TAG('S','U','R',' ')}, /* Suri */
|
||||
|
|
@ -1431,6 +1456,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('t','a','a',' '), HB_TAG('A','T','H',' ')}, /* Lower Tanana -> Athapaskan */
|
||||
/*{HB_TAG('t','a','b',' '), HB_TAG('T','A','B',' ')},*/ /* Tabassaran -> Tabasaran */
|
||||
{HB_TAG('t','a','j',' '), HB_TAG_NONE }, /* Eastern Tamang != Tajiki */
|
||||
{HB_TAG('t','a','q',' '), HB_TAG('T','A','Q',' ')}, /* Tamasheq */
|
||||
{HB_TAG('t','a','q',' '), HB_TAG('T','M','H',' ')}, /* Tamasheq -> Tamashek */
|
||||
{HB_TAG('t','a','q',' '), HB_TAG('B','B','R',' ')}, /* Tamasheq -> Berber */
|
||||
{HB_TAG('t','a','s',' '), HB_TAG('C','P','P',' ')}, /* Tay Boi -> Creoles */
|
||||
|
|
@ -1442,6 +1468,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('t','c','s',' '), HB_TAG('C','P','P',' ')}, /* Torres Strait Creole -> Creoles */
|
||||
{HB_TAG('t','c','y',' '), HB_TAG('T','U','L',' ')}, /* Tulu */
|
||||
{HB_TAG('t','c','z',' '), HB_TAG('Q','I','N',' ')}, /* Thado Chin -> Chin */
|
||||
/*{HB_TAG('t','d','c',' '), HB_TAG('T','D','C',' ')},*/ /* Emberá-Tadó */
|
||||
/*{HB_TAG('t','d','d',' '), HB_TAG('T','D','D',' ')},*/ /* Tai Nüa -> Dehong Dai */
|
||||
{HB_TAG('t','d','x',' '), HB_TAG('M','L','G',' ')}, /* Tandroy-Mahafaly Malagasy -> Malagasy */
|
||||
{HB_TAG('t','e','c',' '), HB_TAG('K','A','L',' ')}, /* Terik -> Kalenjin */
|
||||
|
|
@ -1455,9 +1482,12 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('t','g','r',' '), HB_TAG_NONE }, /* Tareng != Tigre */
|
||||
{HB_TAG('t','g','x',' '), HB_TAG('A','T','H',' ')}, /* Tagish -> Athapaskan */
|
||||
{HB_TAG('t','g','y',' '), HB_TAG_NONE }, /* Togoyo != Tigrinya */
|
||||
/*{HB_TAG('t','h','p',' '), HB_TAG('T','H','P',' ')},*/ /* Thompson */
|
||||
{HB_TAG('t','h','t',' '), HB_TAG('A','T','H',' ')}, /* Tahltan -> Athapaskan */
|
||||
{HB_TAG('t','h','v',' '), HB_TAG('T','H','V',' ')}, /* Tahaggart Tamahaq */
|
||||
{HB_TAG('t','h','v',' '), HB_TAG('T','M','H',' ')}, /* Tahaggart Tamahaq -> Tamashek */
|
||||
{HB_TAG('t','h','v',' '), HB_TAG('B','B','R',' ')}, /* Tahaggart Tamahaq -> Berber */
|
||||
{HB_TAG('t','h','z',' '), HB_TAG('T','H','Z',' ')}, /* Tayart Tamajeq */
|
||||
{HB_TAG('t','h','z',' '), HB_TAG('T','M','H',' ')}, /* Tayart Tamajeq -> Tamashek */
|
||||
{HB_TAG('t','h','z',' '), HB_TAG('B','B','R',' ')}, /* Tayart Tamajeq -> Berber */
|
||||
{HB_TAG('t','i','a',' '), HB_TAG('B','B','R',' ')}, /* Tidikelt Tamazight -> Berber */
|
||||
|
|
@ -1468,6 +1498,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('t','k','g',' '), HB_TAG('M','L','G',' ')}, /* Tesaka Malagasy -> Malagasy */
|
||||
{HB_TAG('t','k','m',' '), HB_TAG_NONE }, /* Takelma != Turkmen */
|
||||
/*{HB_TAG('t','l','i',' '), HB_TAG('T','L','I',' ')},*/ /* Tlingit */
|
||||
/*{HB_TAG('t','l','y',' '), HB_TAG('T','L','Y',' ')},*/ /* Talysh */
|
||||
{HB_TAG('t','m','g',' '), HB_TAG('C','P','P',' ')}, /* Ternateño -> Creoles */
|
||||
{HB_TAG('t','m','h',' '), HB_TAG('T','M','H',' ')}, /* Tamashek [macrolanguage] */
|
||||
{HB_TAG('t','m','h',' '), HB_TAG('B','B','R',' ')}, /* Tamashek [macrolanguage] -> Berber */
|
||||
|
|
@ -1493,11 +1524,13 @@ static const LangTag ot_languages3[] = {
|
|||
/*{HB_TAG('t','s','j',' '), HB_TAG('T','S','J',' ')},*/ /* Tshangla */
|
||||
{HB_TAG('t','t','c',' '), HB_TAG('M','Y','N',' ')}, /* Tektiteko -> Mayan */
|
||||
{HB_TAG('t','t','m',' '), HB_TAG('A','T','H',' ')}, /* Northern Tutchone -> Athapaskan */
|
||||
{HB_TAG('t','t','q',' '), HB_TAG('T','T','Q',' ')}, /* Tawallammat Tamajaq */
|
||||
{HB_TAG('t','t','q',' '), HB_TAG('T','M','H',' ')}, /* Tawallammat Tamajaq -> Tamashek */
|
||||
{HB_TAG('t','t','q',' '), HB_TAG('B','B','R',' ')}, /* Tawallammat Tamajaq -> Berber */
|
||||
{HB_TAG('t','u','a',' '), HB_TAG_NONE }, /* Wiarumus != Turoyo Aramaic */
|
||||
{HB_TAG('t','u','l',' '), HB_TAG_NONE }, /* Tula != Tulu */
|
||||
/*{HB_TAG('t','u','m',' '), HB_TAG('T','U','M',' ')},*/ /* Tumbuka */
|
||||
/*{HB_TAG('t','u','s',' '), HB_TAG('T','U','S',' ')},*/ /* Tuscarora */
|
||||
{HB_TAG('t','u','u',' '), HB_TAG('A','T','H',' ')}, /* Tututni -> Athapaskan */
|
||||
{HB_TAG('t','u','v',' '), HB_TAG_NONE }, /* Turkana != Tuvin */
|
||||
{HB_TAG('t','u','y',' '), HB_TAG('K','A','L',' ')}, /* Tugen -> Kalenjin */
|
||||
|
|
@ -1514,6 +1547,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('t','z','o',' '), HB_TAG('T','Z','O',' ')}, /* Tzotzil */
|
||||
{HB_TAG('t','z','o',' '), HB_TAG('M','Y','N',' ')}, /* Tzotzil -> Mayan */
|
||||
{HB_TAG('u','b','l',' '), HB_TAG('B','I','K',' ')}, /* Buhi'non Bikol -> Bikol */
|
||||
/*{HB_TAG('u','d','i',' '), HB_TAG('U','D','I',' ')},*/ /* Udi */
|
||||
/*{HB_TAG('u','d','m',' '), HB_TAG('U','D','M',' ')},*/ /* Udmurt */
|
||||
{HB_TAG('u','k','i',' '), HB_TAG('K','U','I',' ')}, /* Kui (India) */
|
||||
{HB_TAG('u','l','n',' '), HB_TAG('C','P','P',' ')}, /* Unserdeutsch -> Creoles */
|
||||
|
|
@ -1532,13 +1566,17 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('v','k','t',' '), HB_TAG('M','L','Y',' ')}, /* Tenggarong Kutai Malay -> Malay */
|
||||
{HB_TAG('v','l','s',' '), HB_TAG('F','L','E',' ')}, /* Vlaams -> Dutch (Flemish) */
|
||||
{HB_TAG('v','m','w',' '), HB_TAG('M','A','K',' ')}, /* Makhuwa */
|
||||
/*{HB_TAG('v','r','o',' '), HB_TAG('V','R','O',' ')},*/ /* Võro */
|
||||
{HB_TAG('v','r','o',' '), HB_TAG('V','R','O',' ')}, /* Võro */
|
||||
{HB_TAG('v','r','o',' '), HB_TAG('E','T','I',' ')}, /* Võro -> Estonian */
|
||||
{HB_TAG('v','s','n',' '), HB_TAG('S','A','N',' ')}, /* Vedic Sanskrit -> Sanskrit */
|
||||
{HB_TAG('w','a','g',' '), HB_TAG_NONE }, /* Wa'ema != Wagdi */
|
||||
/*{HB_TAG('w','a','r',' '), HB_TAG('W','A','R',' ')},*/ /* Waray (Philippines) -> Waray-Waray */
|
||||
/*{HB_TAG('w','b','l',' '), HB_TAG('W','B','L',' ')},*/ /* Wakhi */
|
||||
{HB_TAG('w','b','m',' '), HB_TAG('W','A',' ',' ')}, /* Wa */
|
||||
{HB_TAG('w','b','r',' '), HB_TAG('W','A','G',' ')}, /* Wagdi */
|
||||
{HB_TAG('w','b','r',' '), HB_TAG('R','A','J',' ')}, /* Wagdi -> Rajasthani */
|
||||
/*{HB_TAG('w','c','i',' '), HB_TAG('W','C','I',' ')},*/ /* Waci Gbe */
|
||||
/*{HB_TAG('w','d','t',' '), HB_TAG('W','D','T',' ')},*/ /* Wendat */
|
||||
{HB_TAG('w','e','a',' '), HB_TAG('K','R','N',' ')}, /* Wewaw -> Karen */
|
||||
{HB_TAG('w','e','s',' '), HB_TAG('C','P','P',' ')}, /* Cameroon Pidgin -> Creoles */
|
||||
{HB_TAG('w','e','u',' '), HB_TAG('Q','I','N',' ')}, /* Rawngtu Chin -> Chin */
|
||||
|
|
@ -1550,6 +1588,9 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('w','s','g',' '), HB_TAG('G','O','N',' ')}, /* Adilabad Gondi -> Gondi */
|
||||
/*{HB_TAG('w','t','m',' '), HB_TAG('W','T','M',' ')},*/ /* Mewati */
|
||||
{HB_TAG('w','u','u',' '), HB_TAG('Z','H','S',' ')}, /* Wu Chinese -> Chinese, Simplified */
|
||||
{HB_TAG('w','y','a',' '), HB_TAG('W','D','T',' ')}, /* Wyandot (retired code) -> Wendat */
|
||||
{HB_TAG('w','y','a',' '), HB_TAG('W','Y','N',' ')}, /* Wyandot (retired code) */
|
||||
/*{HB_TAG('w','y','n',' '), HB_TAG('W','Y','N',' ')},*/ /* Wyandot */
|
||||
{HB_TAG('x','a','l',' '), HB_TAG('K','L','M',' ')}, /* Kalmyk */
|
||||
{HB_TAG('x','a','l',' '), HB_TAG('T','O','D',' ')}, /* Kalmyk -> Todo */
|
||||
{HB_TAG('x','a','n',' '), HB_TAG('S','E','K',' ')}, /* Xamtanga -> Sekota */
|
||||
|
|
@ -1591,6 +1632,7 @@ static const LangTag ot_languages3[] = {
|
|||
{HB_TAG('y','o','s',' '), HB_TAG('Q','I','N',' ')}, /* Yos (retired code) -> Chin */
|
||||
{HB_TAG('y','u','a',' '), HB_TAG('M','Y','N',' ')}, /* Yucateco -> Mayan */
|
||||
{HB_TAG('y','u','e',' '), HB_TAG('Z','H','H',' ')}, /* Yue Chinese -> Chinese, Traditional, Hong Kong SAR */
|
||||
/*{HB_TAG('y','u','f',' '), HB_TAG('Y','U','F',' ')},*/ /* Havasupai-Walapai-Yavapai */
|
||||
/*{HB_TAG('y','w','q',' '), HB_TAG('Y','W','Q',' ')},*/ /* Wuding-Luquan Yi */
|
||||
{HB_TAG('z','c','h',' '), HB_TAG('Z','H','A',' ')}, /* Central Hongshuihe Zhuang -> Zhuang */
|
||||
{HB_TAG('z','d','j',' '), HB_TAG('C','M','R',' ')}, /* Ngazidja Comorian -> Comorian */
|
||||
|
|
@ -2816,9 +2858,10 @@ out:
|
|||
* @tag: A language tag.
|
||||
*
|
||||
* Converts @tag to a BCP 47 language tag if it is ambiguous (it corresponds to
|
||||
* many language tags) and the best tag is not the alphabetically first, or if
|
||||
* the best tag consists of multiple subtags, or if the best tag does not appear
|
||||
* in #ot_languages.
|
||||
* many language tags) and the best tag is not the first (sorted alphabetically,
|
||||
* with two-letter tags having priority over all three-letter tags), or if the
|
||||
* best tag consists of multiple subtags, or if the best tag does not appear in
|
||||
* #ot_languages2 or #ot_languages3.
|
||||
*
|
||||
* Return value: The #hb_language_t corresponding to the BCP 47 language tag,
|
||||
* or #HB_LANGUAGE_INVALID if @tag is not ambiguous.
|
||||
|
|
@ -2832,8 +2875,6 @@ hb_ot_ambiguous_tag_to_language (hb_tag_t tag)
|
|||
return hb_language_from_string ("alt", -1); /* Southern Altai */
|
||||
case HB_TAG('A','P','P','H'): /* Phonetic transcription—Americanist conventions */
|
||||
return hb_language_from_string ("und-fonnapa", -1); /* Undetermined; North American Phonetic Alphabet */
|
||||
case HB_TAG('A','R','A',' '): /* Arabic */
|
||||
return hb_language_from_string ("ar", -1); /* Arabic [macrolanguage] */
|
||||
case HB_TAG('A','R','K',' '): /* Rakhine */
|
||||
return hb_language_from_string ("rki", -1); /* Rakhine */
|
||||
case HB_TAG('A','T','H',' '): /* Athapaskan */
|
||||
|
|
@ -2854,12 +2895,6 @@ hb_ot_ambiguous_tag_to_language (hb_tag_t tag)
|
|||
return hb_language_from_string ("din", -1); /* Dinka [macrolanguage] */
|
||||
case HB_TAG('D','R','I',' '): /* Dari */
|
||||
return hb_language_from_string ("prs", -1); /* Dari */
|
||||
case HB_TAG('D','Z','N',' '): /* Dzongkha */
|
||||
return hb_language_from_string ("dz", -1); /* Dzongkha */
|
||||
case HB_TAG('E','T','I',' '): /* Estonian */
|
||||
return hb_language_from_string ("et", -1); /* Estonian [macrolanguage] */
|
||||
case HB_TAG('F','A','R',' '): /* Persian */
|
||||
return hb_language_from_string ("fa", -1); /* Persian [macrolanguage] */
|
||||
case HB_TAG('G','O','N',' '): /* Gondi */
|
||||
return hb_language_from_string ("gon", -1); /* Gondi [macrolanguage] */
|
||||
case HB_TAG('H','M','A',' '): /* High Mari */
|
||||
|
|
@ -2874,50 +2909,34 @@ hb_ot_ambiguous_tag_to_language (hb_tag_t tag)
|
|||
return hb_language_from_string ("iba", -1); /* Iban */
|
||||
case HB_TAG('I','J','O',' '): /* Ijo */
|
||||
return hb_language_from_string ("ijo", -1); /* Ijo [collection] */
|
||||
case HB_TAG('I','N','U',' '): /* Inuktitut */
|
||||
return hb_language_from_string ("iu", -1); /* Inuktitut [macrolanguage] */
|
||||
case HB_TAG('I','P','K',' '): /* Inupiat */
|
||||
return hb_language_from_string ("ik", -1); /* Inupiaq [macrolanguage] */
|
||||
case HB_TAG('I','P','P','H'): /* Phonetic transcription—IPA conventions */
|
||||
return hb_language_from_string ("und-fonipa", -1); /* Undetermined; International Phonetic Alphabet */
|
||||
case HB_TAG('I','R','T',' '): /* Irish Traditional */
|
||||
return hb_language_from_string ("ga-Latg", -1); /* Irish; Latin (Gaelic variant) */
|
||||
return hb_language_from_string ("ghc", -1); /* Hiberno-Scottish Gaelic */
|
||||
case HB_TAG('J','I','I',' '): /* Yiddish */
|
||||
return hb_language_from_string ("yi", -1); /* Yiddish [macrolanguage] */
|
||||
case HB_TAG('K','A','L',' '): /* Kalenjin */
|
||||
return hb_language_from_string ("kln", -1); /* Kalenjin [macrolanguage] */
|
||||
case HB_TAG('K','G','E',' '): /* Khutsuri Georgian */
|
||||
return hb_language_from_string ("und-Geok", -1); /* Undetermined; Khutsuri (Asomtavruli and Nuskhuri) */
|
||||
case HB_TAG('K','N','R',' '): /* Kanuri */
|
||||
return hb_language_from_string ("kr", -1); /* Kanuri [macrolanguage] */
|
||||
case HB_TAG('K','O','H',' '): /* Korean Old Hangul */
|
||||
return hb_language_from_string ("okm", -1); /* Middle Korean (10th-16th cent.) */
|
||||
case HB_TAG('K','O','K',' '): /* Konkani */
|
||||
return hb_language_from_string ("kok", -1); /* Konkani [macrolanguage] */
|
||||
case HB_TAG('K','O','M',' '): /* Komi */
|
||||
return hb_language_from_string ("kv", -1); /* Komi [macrolanguage] */
|
||||
case HB_TAG('K','P','L',' '): /* Kpelle */
|
||||
return hb_language_from_string ("kpe", -1); /* Kpelle [macrolanguage] */
|
||||
case HB_TAG('K','R','N',' '): /* Karen */
|
||||
return hb_language_from_string ("kar", -1); /* Karen [collection] */
|
||||
case HB_TAG('K','U','I',' '): /* Kui */
|
||||
return hb_language_from_string ("uki", -1); /* Kui (India) */
|
||||
case HB_TAG('K','U','R',' '): /* Kurdish */
|
||||
return hb_language_from_string ("ku", -1); /* Kurdish [macrolanguage] */
|
||||
case HB_TAG('L','M','A',' '): /* Low Mari */
|
||||
return hb_language_from_string ("mhr", -1); /* Eastern Mari */
|
||||
case HB_TAG('L','U','H',' '): /* Luyia */
|
||||
return hb_language_from_string ("luy", -1); /* Luyia [macrolanguage] */
|
||||
case HB_TAG('L','V','I',' '): /* Latvian */
|
||||
return hb_language_from_string ("lv", -1); /* Latvian [macrolanguage] */
|
||||
case HB_TAG('M','A','W',' '): /* Marwari */
|
||||
return hb_language_from_string ("mwr", -1); /* Marwari [macrolanguage] */
|
||||
case HB_TAG('M','L','G',' '): /* Malagasy */
|
||||
return hb_language_from_string ("mg", -1); /* Malagasy [macrolanguage] */
|
||||
case HB_TAG('M','L','Y',' '): /* Malay */
|
||||
return hb_language_from_string ("ms", -1); /* Malay [macrolanguage] */
|
||||
case HB_TAG('M','N','G',' '): /* Mongolian */
|
||||
return hb_language_from_string ("mn", -1); /* Mongolian [macrolanguage] */
|
||||
case HB_TAG('M','N','K',' '): /* Maninka */
|
||||
return hb_language_from_string ("man", -1); /* Mandingo [macrolanguage] */
|
||||
case HB_TAG('M','O','L',' '): /* Moldavian */
|
||||
|
|
@ -2928,26 +2947,16 @@ hb_ot_ambiguous_tag_to_language (hb_tag_t tag)
|
|||
return hb_language_from_string ("myn", -1); /* Mayan [collection] */
|
||||
case HB_TAG('N','A','H',' '): /* Nahuatl */
|
||||
return hb_language_from_string ("nah", -1); /* Nahuatl [collection] */
|
||||
case HB_TAG('N','E','P',' '): /* Nepali */
|
||||
return hb_language_from_string ("ne", -1); /* Nepali [macrolanguage] */
|
||||
case HB_TAG('N','I','S',' '): /* Nisi */
|
||||
return hb_language_from_string ("njz", -1); /* Nyishi */
|
||||
case HB_TAG('N','O','R',' '): /* Norwegian */
|
||||
return hb_language_from_string ("no", -1); /* Norwegian [macrolanguage] */
|
||||
case HB_TAG('O','J','B',' '): /* Ojibway */
|
||||
return hb_language_from_string ("oj", -1); /* Ojibwa [macrolanguage] */
|
||||
case HB_TAG('O','R','O',' '): /* Oromo */
|
||||
return hb_language_from_string ("om", -1); /* Oromo [macrolanguage] */
|
||||
case HB_TAG('P','A','S',' '): /* Pashto */
|
||||
return hb_language_from_string ("ps", -1); /* Pashto [macrolanguage] */
|
||||
case HB_TAG('P','G','R',' '): /* Polytonic Greek */
|
||||
return hb_language_from_string ("el-polyton", -1); /* Modern Greek (1453-); Polytonic Greek */
|
||||
case HB_TAG('P','R','O',' '): /* Provençal / Old Provençal */
|
||||
return hb_language_from_string ("pro", -1); /* Old Provençal (to 1500) */
|
||||
case HB_TAG('Q','U','H',' '): /* Quechua (Bolivia) */
|
||||
return hb_language_from_string ("quh", -1); /* South Bolivian Quechua */
|
||||
case HB_TAG('Q','U','Z',' '): /* Quechua */
|
||||
return hb_language_from_string ("qu", -1); /* Quechua [macrolanguage] */
|
||||
case HB_TAG('Q','V','I',' '): /* Quechua (Ecuador) */
|
||||
return hb_language_from_string ("qvi", -1); /* Imbabura Highland Quichua */
|
||||
case HB_TAG('Q','W','H',' '): /* Quechua (Peru) */
|
||||
|
|
@ -2958,8 +2967,8 @@ hb_ot_ambiguous_tag_to_language (hb_tag_t tag)
|
|||
return hb_language_from_string ("ro", -1); /* Romanian */
|
||||
case HB_TAG('R','O','Y',' '): /* Romany */
|
||||
return hb_language_from_string ("rom", -1); /* Romany [macrolanguage] */
|
||||
case HB_TAG('S','Q','I',' '): /* Albanian */
|
||||
return hb_language_from_string ("sq", -1); /* Albanian [macrolanguage] */
|
||||
case HB_TAG('S','G','A',' '): /* Old Irish */
|
||||
return hb_language_from_string ("sga", -1); /* Old Irish (to 900) */
|
||||
case HB_TAG('S','R','B',' '): /* Serbian */
|
||||
return hb_language_from_string ("sr", -1); /* Serbian */
|
||||
case HB_TAG('S','X','T',' '): /* Sutu */
|
||||
|
|
@ -2976,6 +2985,10 @@ hb_ot_ambiguous_tag_to_language (hb_tag_t tag)
|
|||
return hb_language_from_string ("tmh", -1); /* Tamashek [macrolanguage] */
|
||||
case HB_TAG('T','O','D',' '): /* Todo */
|
||||
return hb_language_from_string ("xwo", -1); /* Written Oirat */
|
||||
case HB_TAG('W','D','T',' '): /* Wendat */
|
||||
return hb_language_from_string ("wdt", -1); /* Wendat */
|
||||
case HB_TAG('W','Y','N',' '): /* Wyandot */
|
||||
return hb_language_from_string ("wyn", -1); /* Wyandot */
|
||||
case HB_TAG('Z','H','H',' '): /* Chinese, Traditional, Hong Kong SAR */
|
||||
return hb_language_from_string ("zh-HK", -1); /* Chinese [macrolanguage]; Hong Kong */
|
||||
case HB_TAG('Z','H','S',' '): /* Chinese, Simplified */
|
||||
|
|
|
|||
|
|
@ -547,7 +547,7 @@ hb_ot_tag_to_language (hb_tag_t tag)
|
|||
buf[3] = '-';
|
||||
str += 4;
|
||||
}
|
||||
snprintf (str, 16, "x-hbot-%08x", tag);
|
||||
snprintf (str, 16, "x-hbot-%08" PRIx32, tag);
|
||||
return hb_language_from_string (&*buf, -1);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ struct avarV2Tail
|
|||
|
||||
protected:
|
||||
Offset32To<DeltaSetIndexMap> varIdxMap; /* Offset from the beginning of 'avar' table. */
|
||||
Offset32To<VariationStore> varStore; /* Offset from the beginning of 'avar' table. */
|
||||
Offset32To<ItemVariationStore> varStore; /* Offset from the beginning of 'avar' table. */
|
||||
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
|
|
@ -80,7 +80,7 @@ struct AxisValueMap
|
|||
|
||||
bool is_outside_axis_range (const Triple& axis_range) const
|
||||
{
|
||||
float from_coord = coords[0].to_float ();
|
||||
double from_coord = (double) coords[0].to_float ();
|
||||
return !axis_range.contains (from_coord);
|
||||
}
|
||||
|
||||
|
|
@ -100,8 +100,8 @@ struct AxisValueMap
|
|||
float from_coord = coords[0].to_float ();
|
||||
float to_coord = coords[1].to_float ();
|
||||
|
||||
from_coord = renormalizeValue (from_coord, unmapped_range, triple_distances);
|
||||
to_coord = renormalizeValue (to_coord, axis_range, triple_distances);
|
||||
from_coord = renormalizeValue ((double) from_coord, unmapped_range, triple_distances);
|
||||
to_coord = renormalizeValue ((double) to_coord, axis_range, triple_distances);
|
||||
|
||||
coords[0].set_float (from_coord);
|
||||
coords[1].set_float (to_coord);
|
||||
|
|
@ -197,7 +197,7 @@ struct SegmentMaps : Array16Of<AxisValueMap>
|
|||
unmapped_val.set_int (unmap (val.to_int ()));
|
||||
float unmapped_max = unmapped_val.to_float ();
|
||||
|
||||
return Triple{unmapped_min, unmapped_middle, unmapped_max};
|
||||
return Triple{(double) unmapped_min, (double) unmapped_middle, (double) unmapped_max};
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c, hb_tag_t axis_tag) const
|
||||
|
|
@ -230,7 +230,7 @@ struct SegmentMaps : Array16Of<AxisValueMap>
|
|||
* duplicates here */
|
||||
if (mapping.must_include ())
|
||||
continue;
|
||||
value_mappings.push (std::move (mapping));
|
||||
value_mappings.push (mapping);
|
||||
}
|
||||
|
||||
AxisValueMap m;
|
||||
|
|
@ -343,7 +343,7 @@ struct avar
|
|||
for (unsigned i = 0; i < coords_length; i++)
|
||||
coords[i] = out[i];
|
||||
|
||||
OT::VariationStore::destroy_cache (var_store_cache);
|
||||
OT::ItemVariationStore::destroy_cache (var_store_cache);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -107,14 +107,14 @@ struct cvar
|
|||
|
||||
bool has_private_points = iterator.current_tuple->has_private_points ();
|
||||
if (has_private_points &&
|
||||
!TupleVariationData::unpack_points (p, private_indices, end))
|
||||
!TupleVariationData::decompile_points (p, private_indices, end))
|
||||
return false;
|
||||
const hb_vector_t<unsigned int> &indices = has_private_points ? private_indices : shared_indices;
|
||||
|
||||
bool apply_to_all = (indices.length == 0);
|
||||
unsigned num_deltas = apply_to_all ? num_cvt_item : indices.length;
|
||||
if (unlikely (!unpacked_deltas.resize (num_deltas, false))) return false;
|
||||
if (unlikely (!TupleVariationData::unpack_deltas (p, unpacked_deltas, end))) return false;
|
||||
if (unlikely (!TupleVariationData::decompile_deltas (p, unpacked_deltas, end))) return false;
|
||||
|
||||
for (unsigned int i = 0; i < num_deltas; i++)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ static bool axis_coord_pinned_or_within_axis_range (const hb_array_t<const F16DO
|
|||
unsigned axis_index,
|
||||
Triple axis_limit)
|
||||
{
|
||||
float axis_coord = coords[axis_index].to_float ();
|
||||
double axis_coord = static_cast<double>(coords[axis_index].to_float ());
|
||||
if (axis_limit.is_point ())
|
||||
{
|
||||
if (axis_limit.minimum != axis_coord)
|
||||
|
|
@ -233,7 +233,10 @@ struct AxisRecord
|
|||
{
|
||||
float min, default_, max;
|
||||
get_coordinates (min, default_, max);
|
||||
return TripleDistances (min, default_, max);
|
||||
return TripleDistances (
|
||||
static_cast<double>(min),
|
||||
static_cast<double>(default_),
|
||||
static_cast<double>(max));
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
|
|
|
|||
|
|
@ -101,10 +101,14 @@ struct glyph_variations_t
|
|||
continue;
|
||||
}
|
||||
|
||||
bool is_composite_glyph = false;
|
||||
is_composite_glyph = plan->composite_new_gids.has (new_gid);
|
||||
|
||||
if (!p->decompile_tuple_variations (all_contour_points->length, true /* is_gvar */,
|
||||
iterator, &(plan->axes_old_index_tag_map),
|
||||
shared_indices, shared_tuples,
|
||||
tuple_vars /* OUT */))
|
||||
tuple_vars, /* OUT */
|
||||
is_composite_glyph))
|
||||
return false;
|
||||
glyph_variations.push (std::move (tuple_vars));
|
||||
}
|
||||
|
|
@ -114,13 +118,15 @@ struct glyph_variations_t
|
|||
bool instantiate (const hb_subset_plan_t *plan)
|
||||
{
|
||||
unsigned count = plan->new_to_old_gid_list.length;
|
||||
bool iup_optimize = false;
|
||||
iup_optimize = plan->flags & HB_SUBSET_FLAGS_OPTIMIZE_IUP_DELTAS;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
{
|
||||
hb_codepoint_t new_gid = plan->new_to_old_gid_list[i].first;
|
||||
contour_point_vector_t *all_points;
|
||||
if (!plan->new_gid_contour_points_map.has (new_gid, &all_points))
|
||||
return false;
|
||||
if (!glyph_variations[i].instantiate (plan->axes_location, plan->axes_triple_distances, all_points))
|
||||
if (!glyph_variations[i].instantiate (plan->axes_location, plan->axes_triple_distances, all_points, iup_optimize))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
|
@ -340,7 +346,8 @@ struct gvar
|
|||
const glyph_variations_t& glyph_vars,
|
||||
Iterator it,
|
||||
unsigned axis_count,
|
||||
unsigned num_glyphs) const
|
||||
unsigned num_glyphs,
|
||||
bool force_long_offsets) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
gvar *out = c->allocate_min<gvar> ();
|
||||
|
|
@ -352,7 +359,7 @@ struct gvar
|
|||
out->glyphCountX = hb_min (0xFFFFu, num_glyphs);
|
||||
|
||||
unsigned glyph_var_data_size = glyph_vars.compiled_byte_size ();
|
||||
bool long_offset = glyph_var_data_size & ~0xFFFFu;
|
||||
bool long_offset = glyph_var_data_size & ~0xFFFFu || force_long_offsets;
|
||||
out->flags = long_offset ? 1 : 0;
|
||||
|
||||
HBUINT8 *glyph_var_data_offsets = c->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false);
|
||||
|
|
@ -393,7 +400,12 @@ struct gvar
|
|||
unsigned axis_count = c->plan->axes_index_map.get_population ();
|
||||
unsigned num_glyphs = c->plan->num_output_glyphs ();
|
||||
auto it = hb_iter (c->plan->new_to_old_gid_list);
|
||||
return_trace (serialize (c->serializer, glyph_vars, it, axis_count, num_glyphs));
|
||||
|
||||
bool force_long_offsets = false;
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
force_long_offsets = c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS;
|
||||
#endif
|
||||
return_trace (serialize (c->serializer, glyph_vars, it, axis_count, num_glyphs, force_long_offsets));
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
|
|
@ -429,7 +441,7 @@ struct gvar
|
|||
}
|
||||
|
||||
bool long_offset = (subset_data_size & ~0xFFFFu);
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
long_offset = long_offset || (c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS);
|
||||
#endif
|
||||
out->flags = long_offset ? 1 : 0;
|
||||
|
|
@ -606,7 +618,7 @@ struct gvar
|
|||
|
||||
public:
|
||||
bool apply_deltas_to_points (hb_codepoint_t glyph,
|
||||
hb_array_t<int> coords,
|
||||
hb_array_t<const int> coords,
|
||||
const hb_array_t<contour_point_t> points,
|
||||
bool phantom_only = false) const
|
||||
{
|
||||
|
|
@ -661,16 +673,16 @@ struct gvar
|
|||
|
||||
bool has_private_points = iterator.current_tuple->has_private_points ();
|
||||
if (has_private_points &&
|
||||
!GlyphVariationData::unpack_points (p, private_indices, end))
|
||||
!GlyphVariationData::decompile_points (p, private_indices, end))
|
||||
return false;
|
||||
const hb_array_t<unsigned int> &indices = has_private_points ? private_indices : shared_indices;
|
||||
|
||||
bool apply_to_all = (indices.length == 0);
|
||||
unsigned int num_deltas = apply_to_all ? points.length : indices.length;
|
||||
if (unlikely (!x_deltas.resize (num_deltas, false))) return false;
|
||||
if (unlikely (!GlyphVariationData::unpack_deltas (p, x_deltas, end))) return false;
|
||||
if (unlikely (!GlyphVariationData::decompile_deltas (p, x_deltas, end))) return false;
|
||||
if (unlikely (!y_deltas.resize (num_deltas, false))) return false;
|
||||
if (unlikely (!GlyphVariationData::unpack_deltas (p, y_deltas, end))) return false;
|
||||
if (unlikely (!GlyphVariationData::decompile_deltas (p, y_deltas, end))) return false;
|
||||
|
||||
if (!apply_to_all)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -188,7 +188,7 @@ struct hvarvvar_subset_plan_t
|
|||
~hvarvvar_subset_plan_t() { fini (); }
|
||||
|
||||
void init (const hb_array_t<const DeltaSetIndexMap *> &index_maps,
|
||||
const VariationStore &_var_store,
|
||||
const ItemVariationStore &_var_store,
|
||||
const hb_subset_plan_t *plan)
|
||||
{
|
||||
index_map_plans.resize (index_maps.length);
|
||||
|
|
@ -263,7 +263,7 @@ struct hvarvvar_subset_plan_t
|
|||
hb_inc_bimap_t outer_map;
|
||||
hb_vector_t<hb_inc_bimap_t> inner_maps;
|
||||
hb_vector_t<index_map_subset_plan_t> index_map_plans;
|
||||
const VariationStore *var_store;
|
||||
const ItemVariationStore *var_store;
|
||||
|
||||
protected:
|
||||
hb_vector_t<hb_set_t *> inner_sets;
|
||||
|
|
@ -296,7 +296,7 @@ struct HVARVVAR
|
|||
rsbMap.sanitize (c, this));
|
||||
}
|
||||
|
||||
const VariationStore& get_var_store () const
|
||||
const ItemVariationStore& get_var_store () const
|
||||
{ return this+varStore; }
|
||||
|
||||
void listup_index_maps (hb_vector_t<const DeltaSetIndexMap *> &index_maps) const
|
||||
|
|
@ -384,7 +384,7 @@ struct HVARVVAR
|
|||
|
||||
float get_advance_delta_unscaled (hb_codepoint_t glyph,
|
||||
const int *coords, unsigned int coord_count,
|
||||
VariationStore::cache_t *store_cache = nullptr) const
|
||||
ItemVariationStore::cache_t *store_cache = nullptr) const
|
||||
{
|
||||
uint32_t varidx = (this+advMap).map (glyph);
|
||||
return (this+varStore).get_delta (varidx,
|
||||
|
|
@ -405,7 +405,7 @@ struct HVARVVAR
|
|||
public:
|
||||
FixedVersion<>version; /* Version of the metrics variation table
|
||||
* initially set to 0x00010000u */
|
||||
Offset32To<VariationStore>
|
||||
Offset32To<ItemVariationStore>
|
||||
varStore; /* Offset to item variation store table. */
|
||||
Offset32To<DeltaSetIndexMap>
|
||||
advMap; /* Offset to advance var-idx mapping. */
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ struct VariationValueRecord
|
|||
|
||||
public:
|
||||
Tag valueTag; /* Four-byte tag identifying a font-wide measure. */
|
||||
VarIdx varIdx; /* Outer/inner index into VariationStore item. */
|
||||
VarIdx varIdx; /* Outer/inner index into ItemVariationStore item. */
|
||||
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
|
|
@ -106,7 +106,7 @@ struct MVAR
|
|||
out->valueRecordCount = valueRecordCount;
|
||||
|
||||
item_variations_t item_vars;
|
||||
const VariationStore& src_var_store = this+varStore;
|
||||
const ItemVariationStore& src_var_store = this+varStore;
|
||||
|
||||
if (!item_vars.instantiate (src_var_store, c->plan))
|
||||
return_trace (false);
|
||||
|
|
@ -159,7 +159,7 @@ protected:
|
|||
HBUINT16 valueRecordSize;/* The size in bytes of each value record —
|
||||
* must be greater than zero. */
|
||||
HBUINT16 valueRecordCount;/* The number of value records — may be zero. */
|
||||
Offset16To<VariationStore>
|
||||
Offset16To<ItemVariationStore>
|
||||
varStore; /* Offset to item variation store table. */
|
||||
UnsizedArrayOf<HBUINT8>
|
||||
valuesZ; /* Array of value records. The records must be
|
||||
|
|
|
|||
32
modules/juce_graphics/fonts/harfbuzz/hb-ot-var-varc-table.hh
Normal file
32
modules/juce_graphics/fonts/harfbuzz/hb-ot-var-varc-table.hh
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright © 2024 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#ifndef HB_OT_VAR_VARC_TABLE_HH
|
||||
#define HB_OT_VAR_VARC_TABLE_HH
|
||||
|
||||
#include "OT/Var/VARC/VARC.hh"
|
||||
|
||||
#endif /* HB_OT_VAR_VARC_TABLE_HH */
|
||||
|
|
@ -28,169 +28,8 @@
|
|||
#include "hb.hh"
|
||||
#include "hb-paint.h"
|
||||
|
||||
#include "hb-geometry.hh"
|
||||
|
||||
typedef struct hb_extents_t
|
||||
{
|
||||
hb_extents_t () {}
|
||||
hb_extents_t (float xmin, float ymin, float xmax, float ymax) :
|
||||
xmin (xmin), ymin (ymin), xmax (xmax), ymax (ymax) {}
|
||||
|
||||
bool is_empty () const { return xmin >= xmax || ymin >= ymax; }
|
||||
bool is_void () const { return xmin > xmax; }
|
||||
|
||||
void union_ (const hb_extents_t &o)
|
||||
{
|
||||
xmin = hb_min (xmin, o.xmin);
|
||||
ymin = hb_min (ymin, o.ymin);
|
||||
xmax = hb_max (xmax, o.xmax);
|
||||
ymax = hb_max (ymax, o.ymax);
|
||||
}
|
||||
|
||||
void intersect (const hb_extents_t &o)
|
||||
{
|
||||
xmin = hb_max (xmin, o.xmin);
|
||||
ymin = hb_max (ymin, o.ymin);
|
||||
xmax = hb_min (xmax, o.xmax);
|
||||
ymax = hb_min (ymax, o.ymax);
|
||||
}
|
||||
|
||||
void
|
||||
add_point (float x, float y)
|
||||
{
|
||||
if (unlikely (is_void ()))
|
||||
{
|
||||
xmin = xmax = x;
|
||||
ymin = ymax = y;
|
||||
}
|
||||
else
|
||||
{
|
||||
xmin = hb_min (xmin, x);
|
||||
ymin = hb_min (ymin, y);
|
||||
xmax = hb_max (xmax, x);
|
||||
ymax = hb_max (ymax, y);
|
||||
}
|
||||
}
|
||||
|
||||
float xmin = 0.f;
|
||||
float ymin = 0.f;
|
||||
float xmax = -1.f;
|
||||
float ymax = -1.f;
|
||||
} hb_extents_t;
|
||||
|
||||
typedef struct hb_transform_t
|
||||
{
|
||||
hb_transform_t () {}
|
||||
hb_transform_t (float xx, float yx,
|
||||
float xy, float yy,
|
||||
float x0, float y0) :
|
||||
xx (xx), yx (yx), xy (xy), yy (yy), x0 (x0), y0 (y0) {}
|
||||
|
||||
void multiply (const hb_transform_t &o)
|
||||
{
|
||||
/* Copied from cairo, with "o" being "a" there and "this" being "b" there. */
|
||||
hb_transform_t r;
|
||||
|
||||
r.xx = o.xx * xx + o.yx * xy;
|
||||
r.yx = o.xx * yx + o.yx * yy;
|
||||
|
||||
r.xy = o.xy * xx + o.yy * xy;
|
||||
r.yy = o.xy * yx + o.yy * yy;
|
||||
|
||||
r.x0 = o.x0 * xx + o.y0 * xy + x0;
|
||||
r.y0 = o.x0 * yx + o.y0 * yy + y0;
|
||||
|
||||
*this = r;
|
||||
}
|
||||
|
||||
void transform_distance (float &dx, float &dy) const
|
||||
{
|
||||
float new_x = xx * dx + xy * dy;
|
||||
float new_y = yx * dx + yy * dy;
|
||||
dx = new_x;
|
||||
dy = new_y;
|
||||
}
|
||||
|
||||
void transform_point (float &x, float &y) const
|
||||
{
|
||||
transform_distance (x, y);
|
||||
x += x0;
|
||||
y += y0;
|
||||
}
|
||||
|
||||
void transform_extents (hb_extents_t &extents) const
|
||||
{
|
||||
float quad_x[4], quad_y[4];
|
||||
|
||||
quad_x[0] = extents.xmin;
|
||||
quad_y[0] = extents.ymin;
|
||||
quad_x[1] = extents.xmin;
|
||||
quad_y[1] = extents.ymax;
|
||||
quad_x[2] = extents.xmax;
|
||||
quad_y[2] = extents.ymin;
|
||||
quad_x[3] = extents.xmax;
|
||||
quad_y[3] = extents.ymax;
|
||||
|
||||
extents = hb_extents_t {};
|
||||
for (unsigned i = 0; i < 4; i++)
|
||||
{
|
||||
transform_point (quad_x[i], quad_y[i]);
|
||||
extents.add_point (quad_x[i], quad_y[i]);
|
||||
}
|
||||
}
|
||||
|
||||
float xx = 1.f;
|
||||
float yx = 0.f;
|
||||
float xy = 0.f;
|
||||
float yy = 1.f;
|
||||
float x0 = 0.f;
|
||||
float y0 = 0.f;
|
||||
} hb_transform_t;
|
||||
|
||||
typedef struct hb_bounds_t
|
||||
{
|
||||
enum status_t {
|
||||
UNBOUNDED,
|
||||
BOUNDED,
|
||||
EMPTY,
|
||||
};
|
||||
|
||||
hb_bounds_t (status_t status) : status (status) {}
|
||||
hb_bounds_t (const hb_extents_t &extents) :
|
||||
status (extents.is_empty () ? EMPTY : BOUNDED), extents (extents) {}
|
||||
|
||||
void union_ (const hb_bounds_t &o)
|
||||
{
|
||||
if (o.status == UNBOUNDED)
|
||||
status = UNBOUNDED;
|
||||
else if (o.status == BOUNDED)
|
||||
{
|
||||
if (status == EMPTY)
|
||||
*this = o;
|
||||
else if (status == BOUNDED)
|
||||
extents.union_ (o.extents);
|
||||
}
|
||||
}
|
||||
|
||||
void intersect (const hb_bounds_t &o)
|
||||
{
|
||||
if (o.status == EMPTY)
|
||||
status = EMPTY;
|
||||
else if (o.status == BOUNDED)
|
||||
{
|
||||
if (status == UNBOUNDED)
|
||||
*this = o;
|
||||
else if (status == BOUNDED)
|
||||
{
|
||||
extents.intersect (o.extents);
|
||||
if (extents.is_empty ())
|
||||
status = EMPTY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
status_t status;
|
||||
hb_extents_t extents;
|
||||
} hb_bounds_t;
|
||||
|
||||
typedef struct hb_paint_extents_context_t hb_paint_extents_context_t;
|
||||
|
||||
|
|
|
|||
|
|
@ -163,7 +163,7 @@ struct hb_priority_queue_t
|
|||
goto repeat;
|
||||
}
|
||||
|
||||
void swap (unsigned a, unsigned b)
|
||||
void swap (unsigned a, unsigned b) noexcept
|
||||
{
|
||||
assert (a < heap.length);
|
||||
assert (b < heap.length);
|
||||
|
|
|
|||
476
modules/juce_graphics/fonts/harfbuzz/hb-repacker.hh
Normal file
476
modules/juce_graphics/fonts/harfbuzz/hb-repacker.hh
Normal file
|
|
@ -0,0 +1,476 @@
|
|||
/*
|
||||
* Copyright © 2020 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Garret Rieger
|
||||
*/
|
||||
|
||||
#ifndef HB_REPACKER_HH
|
||||
#define HB_REPACKER_HH
|
||||
|
||||
#include "hb-open-type.hh"
|
||||
#include "hb-map.hh"
|
||||
#include "hb-vector.hh"
|
||||
#include "graph/graph.hh"
|
||||
#include "graph/gsubgpos-graph.hh"
|
||||
#include "graph/serialize.hh"
|
||||
|
||||
using graph::graph_t;
|
||||
|
||||
/*
|
||||
* For a detailed writeup on the overflow resolution algorithm see:
|
||||
* docs/repacker.md
|
||||
*/
|
||||
|
||||
struct lookup_size_t
|
||||
{
|
||||
unsigned lookup_index;
|
||||
size_t size;
|
||||
unsigned num_subtables;
|
||||
|
||||
static int cmp (const void* a, const void* b)
|
||||
{
|
||||
return cmp ((const lookup_size_t*) a,
|
||||
(const lookup_size_t*) b);
|
||||
}
|
||||
|
||||
static int cmp (const lookup_size_t* a, const lookup_size_t* b)
|
||||
{
|
||||
double subtables_per_byte_a = (double) a->num_subtables / (double) a->size;
|
||||
double subtables_per_byte_b = (double) b->num_subtables / (double) b->size;
|
||||
if (subtables_per_byte_a == subtables_per_byte_b) {
|
||||
return b->lookup_index - a->lookup_index;
|
||||
}
|
||||
|
||||
double cmp = subtables_per_byte_b - subtables_per_byte_a;
|
||||
if (cmp < 0) return -1;
|
||||
if (cmp > 0) return 1;
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
static inline
|
||||
bool _presplit_subtables_if_needed (graph::gsubgpos_graph_context_t& ext_context)
|
||||
{
|
||||
// For each lookup this will check the size of subtables and split them as needed
|
||||
// so that no subtable is at risk of overflowing. (where we support splitting for
|
||||
// that subtable type).
|
||||
//
|
||||
// TODO(grieger): de-dup newly added nodes as necessary. Probably just want a full de-dup
|
||||
// pass after this processing is done. Not super necessary as splits are
|
||||
// only done where overflow is likely, so de-dup probably will get undone
|
||||
// later anyways.
|
||||
|
||||
// The loop below can modify the contents of ext_context.lookups if new subtables are added
|
||||
// to a lookup during a split. So save the initial set of lookup indices so the iteration doesn't
|
||||
// risk access free'd memory if ext_context.lookups gets resized.
|
||||
hb_set_t lookup_indices(ext_context.lookups.keys ());
|
||||
for (unsigned lookup_index : lookup_indices)
|
||||
{
|
||||
graph::Lookup* lookup = ext_context.lookups.get(lookup_index);
|
||||
if (!lookup->split_subtables_if_needed (ext_context, lookup_index))
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* Analyze the lookups in a GSUB/GPOS table and decide if any should be promoted
|
||||
* to extension lookups.
|
||||
*/
|
||||
static inline
|
||||
bool _promote_extensions_if_needed (graph::gsubgpos_graph_context_t& ext_context)
|
||||
{
|
||||
// Simple Algorithm (v1, current):
|
||||
// 1. Calculate how many bytes each non-extension lookup consumes.
|
||||
// 2. Select up to 64k of those to remain as non-extension (greedy, highest subtables per byte first)
|
||||
// 3. Promote the rest.
|
||||
//
|
||||
// Advanced Algorithm (v2, not implemented):
|
||||
// 1. Perform connected component analysis using lookups as roots.
|
||||
// 2. Compute size of each connected component.
|
||||
// 3. Select up to 64k worth of connected components to remain as non-extensions.
|
||||
// (greedy, highest subtables per byte first)
|
||||
// 4. Promote the rest.
|
||||
|
||||
// TODO(garretrieger): support extension demotion, then consider all lookups. Requires advanced algo.
|
||||
// TODO(garretrieger): also support extension promotion during iterative resolution phase, then
|
||||
// we can use a less conservative threshold here.
|
||||
// TODO(grieger): skip this for the 24 bit case.
|
||||
if (!ext_context.lookups) return true;
|
||||
|
||||
unsigned total_lookup_table_sizes = 0;
|
||||
hb_vector_t<lookup_size_t> lookup_sizes;
|
||||
lookup_sizes.alloc (ext_context.lookups.get_population (), true);
|
||||
|
||||
for (unsigned lookup_index : ext_context.lookups.keys ())
|
||||
{
|
||||
const auto& lookup_v = ext_context.graph.vertices_[lookup_index];
|
||||
total_lookup_table_sizes += lookup_v.table_size ();
|
||||
|
||||
const graph::Lookup* lookup = ext_context.lookups.get(lookup_index);
|
||||
hb_set_t visited;
|
||||
lookup_sizes.push (lookup_size_t {
|
||||
lookup_index,
|
||||
ext_context.graph.find_subgraph_size (lookup_index, visited),
|
||||
lookup->number_of_subtables (),
|
||||
});
|
||||
}
|
||||
|
||||
lookup_sizes.qsort ();
|
||||
|
||||
size_t lookup_list_size = ext_context.graph.vertices_[ext_context.lookup_list_index].table_size ();
|
||||
size_t l2_l3_size = lookup_list_size + total_lookup_table_sizes; // Lookup List + Lookups
|
||||
size_t l3_l4_size = total_lookup_table_sizes; // Lookups + SubTables
|
||||
size_t l4_plus_size = 0; // SubTables + their descendants
|
||||
|
||||
// Start by assuming all lookups are using extension subtables, this size will be removed later
|
||||
// if it's decided to not make a lookup extension.
|
||||
for (auto p : lookup_sizes)
|
||||
{
|
||||
// TODO(garretrieger): this overestimates the extension subtables size because some extension subtables may be
|
||||
// reused. However, we can't correct this until we have connected component analysis in place.
|
||||
unsigned subtables_size = p.num_subtables * 8;
|
||||
l3_l4_size += subtables_size;
|
||||
l4_plus_size += subtables_size;
|
||||
}
|
||||
|
||||
bool layers_full = false;
|
||||
for (auto p : lookup_sizes)
|
||||
{
|
||||
const graph::Lookup* lookup = ext_context.lookups.get(p.lookup_index);
|
||||
if (lookup->is_extension (ext_context.table_tag))
|
||||
// already an extension so size is counted by the loop above.
|
||||
continue;
|
||||
|
||||
if (!layers_full)
|
||||
{
|
||||
size_t lookup_size = ext_context.graph.vertices_[p.lookup_index].table_size ();
|
||||
hb_set_t visited;
|
||||
size_t subtables_size = ext_context.graph.find_subgraph_size (p.lookup_index, visited, 1) - lookup_size;
|
||||
size_t remaining_size = p.size - subtables_size - lookup_size;
|
||||
|
||||
l3_l4_size += subtables_size;
|
||||
l3_l4_size -= p.num_subtables * 8;
|
||||
l4_plus_size += subtables_size + remaining_size;
|
||||
|
||||
if (l2_l3_size < (1 << 16)
|
||||
&& l3_l4_size < (1 << 16)
|
||||
&& l4_plus_size < (1 << 16)) continue; // this lookup fits within all layers groups
|
||||
|
||||
layers_full = true;
|
||||
}
|
||||
|
||||
if (!ext_context.lookups.get(p.lookup_index)->make_extension (ext_context, p.lookup_index))
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static inline
|
||||
bool _try_isolating_subgraphs (const hb_vector_t<graph::overflow_record_t>& overflows,
|
||||
graph_t& sorted_graph)
|
||||
{
|
||||
unsigned space = 0;
|
||||
hb_set_t roots_to_isolate;
|
||||
|
||||
for (int i = overflows.length - 1; i >= 0; i--)
|
||||
{
|
||||
const graph::overflow_record_t& r = overflows[i];
|
||||
|
||||
unsigned root;
|
||||
unsigned overflow_space = sorted_graph.space_for (r.parent, &root);
|
||||
if (!overflow_space) continue;
|
||||
if (sorted_graph.num_roots_for_space (overflow_space) <= 1) continue;
|
||||
|
||||
if (!space) {
|
||||
space = overflow_space;
|
||||
}
|
||||
|
||||
if (space == overflow_space)
|
||||
roots_to_isolate.add(root);
|
||||
}
|
||||
|
||||
if (!roots_to_isolate) return false;
|
||||
|
||||
unsigned maximum_to_move = hb_max ((sorted_graph.num_roots_for_space (space) / 2u), 1u);
|
||||
if (roots_to_isolate.get_population () > maximum_to_move) {
|
||||
// Only move at most half of the roots in a space at a time.
|
||||
unsigned extra = roots_to_isolate.get_population () - maximum_to_move;
|
||||
while (extra--) {
|
||||
uint32_t root = HB_SET_VALUE_INVALID;
|
||||
roots_to_isolate.previous (&root);
|
||||
roots_to_isolate.del (root);
|
||||
}
|
||||
}
|
||||
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr,
|
||||
"Overflow in space %u (%u roots). Moving %u roots to space %u.",
|
||||
space,
|
||||
sorted_graph.num_roots_for_space (space),
|
||||
roots_to_isolate.get_population (),
|
||||
sorted_graph.next_space ());
|
||||
|
||||
sorted_graph.isolate_subgraph (roots_to_isolate);
|
||||
sorted_graph.move_to_new_space (roots_to_isolate);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static inline
|
||||
bool _resolve_shared_overflow(const hb_vector_t<graph::overflow_record_t>& overflows,
|
||||
int overflow_index,
|
||||
graph_t& sorted_graph)
|
||||
{
|
||||
const graph::overflow_record_t& r = overflows[overflow_index];
|
||||
|
||||
// Find all of the parents in overflowing links that link to this
|
||||
// same child node. We will then try duplicating the child node and
|
||||
// re-assigning all of these parents to the duplicate.
|
||||
hb_set_t parents;
|
||||
parents.add(r.parent);
|
||||
for (int i = overflow_index - 1; i >= 0; i--) {
|
||||
const graph::overflow_record_t& r2 = overflows[i];
|
||||
if (r2.child == r.child) {
|
||||
parents.add(r2.parent);
|
||||
}
|
||||
}
|
||||
|
||||
unsigned result = sorted_graph.duplicate(&parents, r.child);
|
||||
if (result == (unsigned) -1 && parents.get_population() > 2) {
|
||||
// All links to the child are overflowing, so we can't include all
|
||||
// in the duplication. Remove one parent from the duplication.
|
||||
// Remove the lowest index parent, which will be the closest to the child.
|
||||
parents.del(parents.get_min());
|
||||
result = sorted_graph.duplicate(&parents, r.child);
|
||||
}
|
||||
|
||||
if (result == (unsigned) -1) return result;
|
||||
|
||||
if (parents.get_population() > 1) {
|
||||
// If the duplicated node has more than one parent pre-emptively raise it's priority to the maximum.
|
||||
// This will place it close to the parents. Node's with only one parent, don't need this as normal overflow
|
||||
// resolution will raise priority if needed.
|
||||
//
|
||||
// Reasoning: most of the parents to this child are likely at the same layer in the graph. Duplicating
|
||||
// the child will theoretically allow it to be placed closer to it's parents. However, due to the shortest
|
||||
// distance sort by default it's placement will remain in the same layer, thus it will remain in roughly the
|
||||
// same position (and distance from parents) as the original child node. The overflow resolution will attempt
|
||||
// to move nodes closer, but only for non-shared nodes. Since this node is shared, it will simply be given
|
||||
// further duplication which defeats the attempt to duplicate with multiple parents. To fix this we
|
||||
// pre-emptively raise priority now which allows the duplicated node to pack into the same layer as it's parents.
|
||||
sorted_graph.vertices_[result].give_max_priority();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static inline
|
||||
bool _process_overflows (const hb_vector_t<graph::overflow_record_t>& overflows,
|
||||
hb_set_t& priority_bumped_parents,
|
||||
graph_t& sorted_graph)
|
||||
{
|
||||
bool resolution_attempted = false;
|
||||
|
||||
// Try resolving the furthest overflows first.
|
||||
for (int i = overflows.length - 1; i >= 0; i--)
|
||||
{
|
||||
const graph::overflow_record_t& r = overflows[i];
|
||||
const auto& child = sorted_graph.vertices_[r.child];
|
||||
if (child.is_shared ())
|
||||
{
|
||||
// The child object is shared, we may be able to eliminate the overflow
|
||||
// by duplicating it.
|
||||
if (!_resolve_shared_overflow(overflows, i, sorted_graph)) continue;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (child.is_leaf () && !priority_bumped_parents.has (r.parent))
|
||||
{
|
||||
// This object is too far from it's parent, attempt to move it closer.
|
||||
//
|
||||
// TODO(garretrieger): initially limiting this to leaf's since they can be
|
||||
// moved closer with fewer consequences. However, this can
|
||||
// likely can be used for non-leafs as well.
|
||||
// TODO(garretrieger): also try lowering priority of the parent. Make it
|
||||
// get placed further up in the ordering, closer to it's children.
|
||||
// this is probably preferable if the total size of the parent object
|
||||
// is < then the total size of the children (and the parent can be moved).
|
||||
// Since in that case moving the parent will cause a smaller increase in
|
||||
// the length of other offsets.
|
||||
if (sorted_graph.raise_childrens_priority (r.parent)) {
|
||||
priority_bumped_parents.add (r.parent);
|
||||
resolution_attempted = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO(garretrieger): add additional offset resolution strategies
|
||||
// - Promotion to extension lookups.
|
||||
// - Table splitting.
|
||||
}
|
||||
|
||||
return resolution_attempted;
|
||||
}
|
||||
|
||||
inline bool
|
||||
hb_resolve_graph_overflows (hb_tag_t table_tag,
|
||||
unsigned max_rounds ,
|
||||
bool always_recalculate_extensions,
|
||||
graph_t& sorted_graph /* IN/OUT */)
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Repacking %c%c%c%c.", HB_UNTAG(table_tag));
|
||||
sorted_graph.sort_shortest_distance ();
|
||||
if (sorted_graph.in_error ())
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Sorted graph in error state after initial sort.");
|
||||
return false;
|
||||
}
|
||||
|
||||
bool will_overflow = graph::will_overflow (sorted_graph);
|
||||
if (!will_overflow)
|
||||
return true;
|
||||
|
||||
bool is_gsub_or_gpos = (table_tag == HB_OT_TAG_GPOS || table_tag == HB_OT_TAG_GSUB);
|
||||
graph::gsubgpos_graph_context_t ext_context (table_tag, sorted_graph);
|
||||
if (is_gsub_or_gpos && will_overflow)
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Applying GSUB/GPOS repacking specializations.");
|
||||
if (always_recalculate_extensions)
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Splitting subtables if needed.");
|
||||
if (!_presplit_subtables_if_needed (ext_context)) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Subtable splitting failed.");
|
||||
return false;
|
||||
}
|
||||
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Promoting lookups to extensions if needed.");
|
||||
if (!_promote_extensions_if_needed (ext_context)) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Extensions promotion failed.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Assigning spaces to 32 bit subgraphs.");
|
||||
if (sorted_graph.assign_spaces ())
|
||||
sorted_graph.sort_shortest_distance ();
|
||||
else
|
||||
sorted_graph.sort_shortest_distance_if_needed ();
|
||||
}
|
||||
|
||||
unsigned round = 0;
|
||||
hb_vector_t<graph::overflow_record_t> overflows;
|
||||
// TODO(garretrieger): select a good limit for max rounds.
|
||||
while (!sorted_graph.in_error ()
|
||||
&& graph::will_overflow (sorted_graph, &overflows)
|
||||
&& round < max_rounds) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "=== Overflow resolution round %u ===", round);
|
||||
print_overflows (sorted_graph, overflows);
|
||||
|
||||
hb_set_t priority_bumped_parents;
|
||||
|
||||
if (!_try_isolating_subgraphs (overflows, sorted_graph))
|
||||
{
|
||||
// Don't count space isolation towards round limit. Only increment
|
||||
// round counter if space isolation made no changes.
|
||||
round++;
|
||||
if (!_process_overflows (overflows, priority_bumped_parents, sorted_graph))
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "No resolution available :(");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
sorted_graph.sort_shortest_distance ();
|
||||
}
|
||||
|
||||
if (sorted_graph.in_error ())
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Sorted graph in error state.");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (graph::will_overflow (sorted_graph))
|
||||
{
|
||||
if (is_gsub_or_gpos && !always_recalculate_extensions) {
|
||||
// If this a GSUB/GPOS table and we didn't try to extension promotion and table splitting then
|
||||
// as a last ditch effort, re-run the repacker with it enabled.
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Failed to find a resolution. Re-running with extension promotion and table splitting enabled.");
|
||||
return hb_resolve_graph_overflows (table_tag, max_rounds, true, sorted_graph);
|
||||
}
|
||||
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Offset overflow resolution failed.");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* Attempts to modify the topological sorting of the provided object graph to
|
||||
* eliminate offset overflows in the links between objects of the graph. If a
|
||||
* non-overflowing ordering is found the updated graph is serialized it into the
|
||||
* provided serialization context.
|
||||
*
|
||||
* If necessary the structure of the graph may be modified in ways that do not
|
||||
* affect the functionality of the graph. For example shared objects may be
|
||||
* duplicated.
|
||||
*
|
||||
* For a detailed writeup describing how the algorithm operates see:
|
||||
* docs/repacker.md
|
||||
*/
|
||||
template<typename T>
|
||||
inline hb_blob_t*
|
||||
hb_resolve_overflows (const T& packed,
|
||||
hb_tag_t table_tag,
|
||||
unsigned max_rounds = 32,
|
||||
bool recalculate_extensions = false) {
|
||||
graph_t sorted_graph (packed);
|
||||
if (sorted_graph.in_error ())
|
||||
{
|
||||
// Invalid graph definition.
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (!sorted_graph.is_fully_connected ())
|
||||
{
|
||||
sorted_graph.print_orphaned_nodes ();
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (sorted_graph.in_error ())
|
||||
{
|
||||
// Allocations failed somewhere
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr,
|
||||
"Graph is in error, likely due to a memory allocation error.");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (!hb_resolve_graph_overflows (table_tag, max_rounds, recalculate_extensions, sorted_graph))
|
||||
return nullptr;
|
||||
|
||||
return graph::serialize (sorted_graph);
|
||||
}
|
||||
|
||||
#endif /* HB_REPACKER_HH */
|
||||
|
|
@ -91,7 +91,27 @@ struct hb_serialize_context_t
|
|||
}
|
||||
#endif
|
||||
|
||||
friend void swap (object_t& a, object_t& b)
|
||||
bool add_virtual_link (objidx_t objidx)
|
||||
{
|
||||
if (!objidx)
|
||||
return false;
|
||||
|
||||
auto& link = *virtual_links.push ();
|
||||
if (virtual_links.in_error ())
|
||||
return false;
|
||||
|
||||
link.objidx = objidx;
|
||||
// Remaining fields were previously zero'd by push():
|
||||
// link.width = 0;
|
||||
// link.is_signed = 0;
|
||||
// link.whence = 0;
|
||||
// link.position = 0;
|
||||
// link.bias = 0;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
friend void swap (object_t& a, object_t& b) noexcept
|
||||
{
|
||||
hb_swap (a.head, b.head);
|
||||
hb_swap (a.tail, b.tail);
|
||||
|
|
@ -156,9 +176,9 @@ struct hb_serialize_context_t
|
|||
object_t *next;
|
||||
|
||||
auto all_links () const HB_AUTO_RETURN
|
||||
(( hb_concat (this->real_links, this->virtual_links) ));
|
||||
(( hb_concat (real_links, virtual_links) ));
|
||||
auto all_links_writer () HB_AUTO_RETURN
|
||||
(( hb_concat (this->real_links.writer (), this->virtual_links.writer ()) ));
|
||||
(( hb_concat (real_links.writer (), virtual_links.writer ()) ));
|
||||
};
|
||||
|
||||
struct snapshot_t
|
||||
|
|
@ -469,16 +489,40 @@ struct hb_serialize_context_t
|
|||
|
||||
assert (current);
|
||||
|
||||
auto& link = *current->virtual_links.push ();
|
||||
if (current->virtual_links.in_error ())
|
||||
if (!current->add_virtual_link(objidx))
|
||||
err (HB_SERIALIZE_ERROR_OTHER);
|
||||
}
|
||||
|
||||
link.width = 0;
|
||||
link.objidx = objidx;
|
||||
link.is_signed = 0;
|
||||
link.whence = 0;
|
||||
link.position = 0;
|
||||
link.bias = 0;
|
||||
objidx_t last_added_child_index() const {
|
||||
if (unlikely (in_error ())) return (objidx_t) -1;
|
||||
|
||||
assert (current);
|
||||
if (!bool(current->real_links)) {
|
||||
return (objidx_t) -1;
|
||||
}
|
||||
|
||||
return current->real_links[current->real_links.length - 1].objidx;
|
||||
}
|
||||
|
||||
// For the current object ensure that the sub-table bytes for child objidx are always placed
|
||||
// after the subtable bytes for any other existing children. This only ensures that the
|
||||
// repacker will not move the target subtable before the other children
|
||||
// (by adding virtual links). It is up to the caller to ensure the initial serialization
|
||||
// order is correct.
|
||||
void repack_last(objidx_t objidx) {
|
||||
if (unlikely (in_error ())) return;
|
||||
|
||||
if (!objidx)
|
||||
return;
|
||||
|
||||
assert (current);
|
||||
for (auto& l : current->real_links) {
|
||||
if (l.objidx == objidx) {
|
||||
continue;
|
||||
}
|
||||
|
||||
packed[l.objidx]->add_virtual_link(objidx);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
|
|
|
|||
|
|
@ -82,7 +82,9 @@ struct hb_set_digest_bits_pattern_t
|
|||
|
||||
void init () { mask = 0; }
|
||||
|
||||
void add (const hb_set_digest_bits_pattern_t &o) { mask |= o.mask; }
|
||||
static hb_set_digest_bits_pattern_t full () { hb_set_digest_bits_pattern_t d; d.mask = (mask_t) -1; return d; }
|
||||
|
||||
void union_ (const hb_set_digest_bits_pattern_t &o) { mask |= o.mask; }
|
||||
|
||||
void add (hb_codepoint_t g) { mask |= mask_for (g); }
|
||||
|
||||
|
|
@ -129,11 +131,14 @@ struct hb_set_digest_bits_pattern_t
|
|||
bool may_have (hb_codepoint_t g) const
|
||||
{ return mask & mask_for (g); }
|
||||
|
||||
bool operator [] (hb_codepoint_t g) const
|
||||
{ return may_have (g); }
|
||||
|
||||
private:
|
||||
|
||||
static mask_t mask_for (hb_codepoint_t g)
|
||||
{ return ((mask_t) 1) << ((g >> shift) & (mask_bits - 1)); }
|
||||
mask_t mask;
|
||||
mask_t mask = 0;
|
||||
};
|
||||
|
||||
template <typename head_t, typename tail_t>
|
||||
|
|
@ -145,10 +150,12 @@ struct hb_set_digest_combiner_t
|
|||
tail.init ();
|
||||
}
|
||||
|
||||
void add (const hb_set_digest_combiner_t &o)
|
||||
static hb_set_digest_combiner_t full () { hb_set_digest_combiner_t d; d.head = head_t::full(); d.tail = tail_t::full (); return d; }
|
||||
|
||||
void union_ (const hb_set_digest_combiner_t &o)
|
||||
{
|
||||
head.add (o.head);
|
||||
tail.add (o.tail);
|
||||
head.union_ (o.head);
|
||||
tail.union_(o.tail);
|
||||
}
|
||||
|
||||
void add (hb_codepoint_t g)
|
||||
|
|
@ -188,6 +195,9 @@ struct hb_set_digest_combiner_t
|
|||
return head.may_have (g) && tail.may_have (g);
|
||||
}
|
||||
|
||||
bool operator [] (hb_codepoint_t g) const
|
||||
{ return may_have (g); }
|
||||
|
||||
private:
|
||||
head_t head;
|
||||
tail_t tail;
|
||||
|
|
|
|||
|
|
@ -44,10 +44,10 @@ struct hb_sparseset_t
|
|||
~hb_sparseset_t () { fini (); }
|
||||
|
||||
hb_sparseset_t (const hb_sparseset_t& other) : hb_sparseset_t () { set (other); }
|
||||
hb_sparseset_t (hb_sparseset_t&& other) : hb_sparseset_t () { s = std::move (other.s); }
|
||||
hb_sparseset_t (hb_sparseset_t&& other) noexcept : hb_sparseset_t () { s = std::move (other.s); }
|
||||
hb_sparseset_t& operator = (const hb_sparseset_t& other) { set (other); return *this; }
|
||||
hb_sparseset_t& operator = (hb_sparseset_t&& other) { s = std::move (other.s); return *this; }
|
||||
friend void swap (hb_sparseset_t& a, hb_sparseset_t& b) { hb_swap (a.s, b.s); }
|
||||
hb_sparseset_t& operator = (hb_sparseset_t&& other) noexcept { s = std::move (other.s); return *this; }
|
||||
friend void swap (hb_sparseset_t& a, hb_sparseset_t& b) noexcept { hb_swap (a.s, b.s); }
|
||||
|
||||
hb_sparseset_t (std::initializer_list<hb_codepoint_t> lst) : hb_sparseset_t ()
|
||||
{
|
||||
|
|
@ -86,7 +86,7 @@ struct hb_sparseset_t
|
|||
uint32_t hash () const { return s.hash (); }
|
||||
|
||||
void add (hb_codepoint_t g) { s.add (g); }
|
||||
bool add_range (hb_codepoint_t a, hb_codepoint_t b) { return s.add_range (a, b); }
|
||||
bool add_range (hb_codepoint_t first, hb_codepoint_t last) { return s.add_range (first, last); }
|
||||
|
||||
template <typename T>
|
||||
void add_array (const T *array, unsigned int count, unsigned int stride=sizeof(T))
|
||||
|
|
@ -166,7 +166,7 @@ struct hb_set_t : hb_sparseset_t<hb_bit_set_invertible_t>
|
|||
~hb_set_t () = default;
|
||||
hb_set_t () : sparseset () {};
|
||||
hb_set_t (const hb_set_t &o) : sparseset ((sparseset &) o) {};
|
||||
hb_set_t (hb_set_t&& o) : sparseset (std::move ((sparseset &) o)) {}
|
||||
hb_set_t (hb_set_t&& o) noexcept : sparseset (std::move ((sparseset &) o)) {}
|
||||
hb_set_t& operator = (const hb_set_t&) = default;
|
||||
hb_set_t& operator = (hb_set_t&&) = default;
|
||||
hb_set_t (std::initializer_list<hb_codepoint_t> lst) : sparseset (lst) {}
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ struct str_encoder_t
|
|||
encode_byte (OpCode_BCD);
|
||||
|
||||
// Based on:
|
||||
// https://github.com/fonttools/fonttools/blob/97ed3a61cde03e17b8be36f866192fbd56f1d1a7/Lib/fontTools/misc/psCharStrings.py#L265-L294
|
||||
// https://github.com/fonttools/fonttools/blob/0738c41dfbcbc213ab9263f486ef0cccc6eb5ce5/Lib/fontTools/misc/psCharStrings.py#L267-L316
|
||||
|
||||
char buf[16];
|
||||
/* FontTools has the following comment:
|
||||
|
|
@ -133,6 +133,10 @@ struct str_encoder_t
|
|||
(void) hb_uselocale (((void) freelocale (clocale), oldlocale));
|
||||
|
||||
char *s = buf;
|
||||
size_t len;
|
||||
char *comma = strchr (s, ',');
|
||||
if (comma) // Comma for some European locales in case no uselocale available.
|
||||
*comma = '.';
|
||||
if (s[0] == '0' && s[1] == '.')
|
||||
s++;
|
||||
else if (s[0] == '-' && s[1] == '0' && s[2] == '.')
|
||||
|
|
@ -140,6 +144,45 @@ struct str_encoder_t
|
|||
s[1] = '-';
|
||||
s++;
|
||||
}
|
||||
else if ((len = strlen (s)) > 3 && !strcmp (s + len - 3, "000"))
|
||||
{
|
||||
unsigned exponent = len - 3;
|
||||
char *s2 = s + exponent - 1;
|
||||
while (*s2 == '0' && exponent > 1)
|
||||
{
|
||||
s2--;
|
||||
exponent++;
|
||||
}
|
||||
snprintf (s2 + 1, sizeof (buf) - (s2 + 1 - buf), "E%u", exponent);
|
||||
}
|
||||
else
|
||||
{
|
||||
char *dot = strchr (s, '.');
|
||||
char *e = strchr (s, 'E');
|
||||
if (dot && e)
|
||||
{
|
||||
memmove (dot, dot + 1, e - (dot + 1));
|
||||
int exponent = atoi (e + 1);
|
||||
int new_exponent = exponent - (e - (dot + 1));
|
||||
if (new_exponent == 1)
|
||||
{
|
||||
e[-1] = '0';
|
||||
e[0] = '\0';
|
||||
}
|
||||
else
|
||||
snprintf (e - 1, sizeof (buf) - (e - 1 - buf), "E%d", new_exponent);
|
||||
}
|
||||
}
|
||||
if ((s[0] == '.' && s[1] == '0') || (s[0] == '-' && s[1] == '.' && s[2] == '0'))
|
||||
{
|
||||
int sign = s[0] == '-';
|
||||
char *s2 = s + sign + 1;
|
||||
while (*s2 == '0')
|
||||
s2++;
|
||||
len = strlen (s2);
|
||||
memmove (s + sign, s2, len);
|
||||
snprintf (s + sign + len, sizeof (buf) - (s + sign + len - buf), "E-%u", (unsigned) (strlen (s + sign) - 1));
|
||||
}
|
||||
hb_vector_t<char> nibbles;
|
||||
while (*s)
|
||||
{
|
||||
|
|
@ -155,20 +198,22 @@ struct str_encoder_t
|
|||
{
|
||||
s++;
|
||||
nibbles.push (0x0C); // E-
|
||||
continue;
|
||||
} else {
|
||||
if (c2 == '+')
|
||||
s++;
|
||||
nibbles.push (0x0B); // E
|
||||
}
|
||||
if (c2 == '+')
|
||||
if (*s == '0')
|
||||
s++;
|
||||
nibbles.push (0x0B); // E
|
||||
continue;
|
||||
}
|
||||
|
||||
case '.': case ',': // Comma for some European locales in case no uselocale available.
|
||||
case '.':
|
||||
nibbles.push (0x0A); // .
|
||||
continue;
|
||||
|
||||
case '-':
|
||||
nibbles.push (0x0E); // .
|
||||
nibbles.push (0x0E); // -
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -248,7 +248,7 @@ struct cff2_subr_subsetter_t : subr_subsetter_t<cff2_subr_subsetter_t, CFF2Subrs
|
|||
struct cff2_private_blend_encoder_param_t
|
||||
{
|
||||
cff2_private_blend_encoder_param_t (hb_serialize_context_t *c,
|
||||
const CFF2VariationStore *varStore,
|
||||
const CFF2ItemVariationStore *varStore,
|
||||
hb_array_t<int> normalized_coords) :
|
||||
c (c), varStore (varStore), normalized_coords (normalized_coords) {}
|
||||
|
||||
|
|
@ -284,7 +284,7 @@ struct cff2_private_blend_encoder_param_t
|
|||
unsigned ivs = 0;
|
||||
unsigned region_count = 0;
|
||||
hb_vector_t<float> scalars;
|
||||
const CFF2VariationStore *varStore = nullptr;
|
||||
const CFF2ItemVariationStore *varStore = nullptr;
|
||||
hb_array_t<int> normalized_coords;
|
||||
};
|
||||
|
||||
|
|
@ -378,7 +378,7 @@ struct cff2_private_dict_blend_opset_t : dict_opset_t
|
|||
struct cff2_private_dict_op_serializer_t : op_serializer_t
|
||||
{
|
||||
cff2_private_dict_op_serializer_t (bool desubroutinize_, bool drop_hints_, bool pinned_,
|
||||
const CFF::CFF2VariationStore* varStore_,
|
||||
const CFF::CFF2ItemVariationStore* varStore_,
|
||||
hb_array_t<int> normalized_coords_)
|
||||
: desubroutinize (desubroutinize_), drop_hints (drop_hints_), pinned (pinned_),
|
||||
varStore (varStore_), normalized_coords (normalized_coords_) {}
|
||||
|
|
@ -416,7 +416,7 @@ struct cff2_private_dict_op_serializer_t : op_serializer_t
|
|||
const bool desubroutinize;
|
||||
const bool drop_hints;
|
||||
const bool pinned;
|
||||
const CFF::CFF2VariationStore* varStore;
|
||||
const CFF::CFF2ItemVariationStore* varStore;
|
||||
hb_array_t<int> normalized_coords;
|
||||
};
|
||||
|
||||
|
|
@ -628,10 +628,10 @@ OT::cff2::accelerator_subset_t::serialize (hb_serialize_context_t *c,
|
|||
}
|
||||
|
||||
/* variation store */
|
||||
if (varStore != &Null (CFF2VariationStore) &&
|
||||
if (varStore != &Null (CFF2ItemVariationStore) &&
|
||||
!plan.pinned)
|
||||
{
|
||||
auto *dest = c->push<CFF2VariationStore> ();
|
||||
auto *dest = c->push<CFF2ItemVariationStore> ();
|
||||
if (unlikely (!dest->serialize (c, varStore)))
|
||||
{
|
||||
c->pop_discard ();
|
||||
|
|
@ -666,6 +666,9 @@ OT::cff2::accelerator_subset_t::serialize (hb_serialize_context_t *c,
|
|||
bool
|
||||
OT::cff2::accelerator_subset_t::subset (hb_subset_context_t *c) const
|
||||
{
|
||||
if (c->plan->normalized_coords && !c->plan->all_axes_pinned)
|
||||
fprintf (stdout, "warning: CFF partial instancing is not supported.\n");
|
||||
|
||||
cff2_subset_plan cff2_plan;
|
||||
|
||||
if (unlikely (!cff2_plan.create (*this, c->plan))) return false;
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@
|
|||
* Google Author(s): Garret Rieger, Rod Sheeter, Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#include "hb-subset-instancer-solver.hh"
|
||||
#include "hb-subset.hh"
|
||||
#include "hb-set.hh"
|
||||
#include "hb-utf.hh"
|
||||
|
|
@ -50,7 +51,6 @@ hb_subset_input_t::hb_subset_input_t ()
|
|||
HB_TAG ('k', 'e', 'r', 'n'),
|
||||
|
||||
// Copied from fontTools:
|
||||
HB_TAG ('B', 'A', 'S', 'E'),
|
||||
HB_TAG ('J', 'S', 'T', 'F'),
|
||||
HB_TAG ('D', 'S', 'I', 'G'),
|
||||
HB_TAG ('E', 'B', 'D', 'T'),
|
||||
|
|
@ -412,11 +412,52 @@ hb_subset_input_keep_everything (hb_subset_input_t *input)
|
|||
hb_subset_input_set_flags (input,
|
||||
HB_SUBSET_FLAGS_NOTDEF_OUTLINE |
|
||||
HB_SUBSET_FLAGS_GLYPH_NAMES |
|
||||
HB_SUBSET_FLAGS_NAME_LEGACY |
|
||||
HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES |
|
||||
HB_SUBSET_FLAGS_PASSTHROUGH_UNRECOGNIZED);
|
||||
}
|
||||
|
||||
#ifndef HB_NO_VAR
|
||||
/**
|
||||
* hb_subset_input_pin_all_axes_to_default: (skip)
|
||||
* @input: a #hb_subset_input_t object.
|
||||
* @face: a #hb_face_t object.
|
||||
*
|
||||
* Pin all axes to default locations in the given subset input object.
|
||||
*
|
||||
* All axes in a font must be pinned. Additionally, `CFF2` table, if present,
|
||||
* will be de-subroutinized.
|
||||
*
|
||||
* Return value: `true` if success, `false` otherwise
|
||||
*
|
||||
* Since: 8.3.1
|
||||
**/
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_pin_all_axes_to_default (hb_subset_input_t *input,
|
||||
hb_face_t *face)
|
||||
{
|
||||
unsigned axis_count = hb_ot_var_get_axis_count (face);
|
||||
if (!axis_count) return false;
|
||||
|
||||
hb_ot_var_axis_info_t *axis_infos = (hb_ot_var_axis_info_t *) hb_calloc (axis_count, sizeof (hb_ot_var_axis_info_t));
|
||||
if (unlikely (!axis_infos)) return false;
|
||||
|
||||
(void) hb_ot_var_get_axis_infos (face, 0, &axis_count, axis_infos);
|
||||
|
||||
for (unsigned i = 0; i < axis_count; i++)
|
||||
{
|
||||
hb_tag_t axis_tag = axis_infos[i].tag;
|
||||
double default_val = (double) axis_infos[i].default_value;
|
||||
if (!input->axes_location.set (axis_tag, Triple (default_val, default_val, default_val)))
|
||||
{
|
||||
hb_free (axis_infos);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
hb_free (axis_infos);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_subset_input_pin_axis_to_default: (skip)
|
||||
* @input: a #hb_subset_input_t object.
|
||||
|
|
@ -441,7 +482,7 @@ hb_subset_input_pin_axis_to_default (hb_subset_input_t *input,
|
|||
if (!hb_ot_var_find_axis_info (face, axis_tag, &axis_info))
|
||||
return false;
|
||||
|
||||
float default_val = axis_info.default_value;
|
||||
double default_val = (double) axis_info.default_value;
|
||||
return input->axes_location.set (axis_tag, Triple (default_val, default_val, default_val));
|
||||
}
|
||||
|
||||
|
|
@ -471,26 +512,22 @@ hb_subset_input_pin_axis_location (hb_subset_input_t *input,
|
|||
if (!hb_ot_var_find_axis_info (face, axis_tag, &axis_info))
|
||||
return false;
|
||||
|
||||
float val = hb_clamp(axis_value, axis_info.min_value, axis_info.max_value);
|
||||
double val = hb_clamp((double) axis_value, (double) axis_info.min_value, (double) axis_info.max_value);
|
||||
return input->axes_location.set (axis_tag, Triple (val, val, val));
|
||||
}
|
||||
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
/**
|
||||
* hb_subset_input_set_axis_range: (skip)
|
||||
* @input: a #hb_subset_input_t object.
|
||||
* @face: a #hb_face_t object.
|
||||
* @axis_tag: Tag of the axis
|
||||
* @axis_min_value: Minimum value of the axis variation range to set
|
||||
* @axis_max_value: Maximum value of the axis variation range to set
|
||||
* @axis_def_value: Default value of the axis variation range to set, in case of
|
||||
* null, it'll be determined automatically
|
||||
* @axis_min_value: Minimum value of the axis variation range to set, if NaN the existing min will be used.
|
||||
* @axis_max_value: Maximum value of the axis variation range to set if NaN the existing max will be used.
|
||||
* @axis_def_value: Default value of the axis variation range to set, if NaN the existing default will be used.
|
||||
*
|
||||
* Restricting the range of variation on an axis in the given subset input object.
|
||||
* New min/default/max values will be clamped if they're not within the fvar axis range.
|
||||
* If the new default value is null:
|
||||
* If the fvar axis default value is within the new range, then new default
|
||||
* value is the same as original default value.
|
||||
*
|
||||
* If the fvar axis default value is not within the new range, the new default
|
||||
* value will be changed to the new min or max value, whichever is closer to the fvar
|
||||
* axis default.
|
||||
|
|
@ -501,7 +538,7 @@ hb_subset_input_pin_axis_location (hb_subset_input_t *input,
|
|||
*
|
||||
* Return value: `true` if success, `false` otherwise
|
||||
*
|
||||
* XSince: EXPERIMENTAL
|
||||
* Since: 8.5.0
|
||||
**/
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_set_axis_range (hb_subset_input_t *input,
|
||||
|
|
@ -509,22 +546,57 @@ hb_subset_input_set_axis_range (hb_subset_input_t *input,
|
|||
hb_tag_t axis_tag,
|
||||
float axis_min_value,
|
||||
float axis_max_value,
|
||||
float *axis_def_value /* IN, maybe NULL */)
|
||||
float axis_def_value)
|
||||
{
|
||||
if (axis_min_value > axis_max_value)
|
||||
return false;
|
||||
|
||||
hb_ot_var_axis_info_t axis_info;
|
||||
if (!hb_ot_var_find_axis_info (face, axis_tag, &axis_info))
|
||||
return false;
|
||||
|
||||
float new_min_val = hb_clamp(axis_min_value, axis_info.min_value, axis_info.max_value);
|
||||
float new_max_val = hb_clamp(axis_max_value, axis_info.min_value, axis_info.max_value);
|
||||
float new_default_val = axis_def_value ? *axis_def_value : axis_info.default_value;
|
||||
new_default_val = hb_clamp(new_default_val, new_min_val, new_max_val);
|
||||
return input->axes_location.set (axis_tag, Triple (new_min_val, new_default_val, new_max_val));
|
||||
float min = !std::isnan(axis_min_value) ? axis_min_value : axis_info.min_value;
|
||||
float max = !std::isnan(axis_max_value) ? axis_max_value : axis_info.max_value;
|
||||
float def = !std::isnan(axis_def_value) ? axis_def_value : axis_info.default_value;
|
||||
|
||||
if (min > max)
|
||||
return false;
|
||||
|
||||
float new_min_val = hb_clamp(min, axis_info.min_value, axis_info.max_value);
|
||||
float new_max_val = hb_clamp(max, axis_info.min_value, axis_info.max_value);
|
||||
float new_default_val = hb_clamp(def, new_min_val, new_max_val);
|
||||
return input->axes_location.set (axis_tag, Triple ((double) new_min_val, (double) new_default_val, (double) new_max_val));
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_subset_input_get_axis_range: (skip)
|
||||
* @input: a #hb_subset_input_t object.
|
||||
* @axis_tag: Tag of the axis
|
||||
* @axis_min_value: Set to the previously configured minimum value of the axis variation range.
|
||||
* @axis_max_value: Set to the previously configured maximum value of the axis variation range.
|
||||
* @axis_def_value: Set to the previously configured default value of the axis variation range.
|
||||
*
|
||||
* Gets the axis range assigned by previous calls to hb_subset_input_set_axis_range.
|
||||
*
|
||||
* Return value: `true` if a range has been set for this axis tag, `false` otherwise.
|
||||
*
|
||||
* Since: 8.5.0
|
||||
**/
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_get_axis_range (hb_subset_input_t *input,
|
||||
hb_tag_t axis_tag,
|
||||
float *axis_min_value,
|
||||
float *axis_max_value,
|
||||
float *axis_def_value)
|
||||
|
||||
{
|
||||
Triple* triple;
|
||||
if (!input->axes_location.has(axis_tag, &triple)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
*axis_min_value = triple->minimum;
|
||||
*axis_def_value = triple->middle;
|
||||
*axis_max_value = triple->maximum;
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
/**
|
||||
|
|
@ -659,7 +731,7 @@ hb_subset_input_override_name_table (hb_subset_input_t *input,
|
|||
src = hb_utf8_t::next (src, src_end, &unicode, replacement);
|
||||
if (unicode >= 0x0080u)
|
||||
{
|
||||
printf ("Non-ascii character detected, ignored...This API supports acsii characters only for mac platform\n");
|
||||
printf ("Non-ascii character detected, ignored...This API supports ascii characters only for mac platform\n");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
@ -673,5 +745,4 @@ hb_subset_input_override_name_table (hb_subset_input_t *input,
|
|||
input->name_table_overrides.set (hb_ot_name_record_ids_t (platform_id, encoding_id, language_id, name_id), name_bytes);
|
||||
return true;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
|||
532
modules/juce_graphics/fonts/harfbuzz/hb-subset-instancer-iup.cc
Normal file
532
modules/juce_graphics/fonts/harfbuzz/hb-subset-instancer-iup.cc
Normal file
|
|
@ -0,0 +1,532 @@
|
|||
/*
|
||||
* Copyright © 2024 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*/
|
||||
|
||||
#include "hb-subset-instancer-iup.hh"
|
||||
|
||||
/* This file is a straight port of the following:
|
||||
*
|
||||
* https://github.com/fonttools/fonttools/blob/main/Lib/fontTools/varLib/iup.py
|
||||
*
|
||||
* Where that file returns optimzied deltas vector, we return optimized
|
||||
* referenced point indices.
|
||||
*/
|
||||
|
||||
constexpr static unsigned MAX_LOOKBACK = 8;
|
||||
|
||||
static void _iup_contour_bound_forced_set (const hb_array_t<const contour_point_t> contour_points,
|
||||
const hb_array_t<const int> x_deltas,
|
||||
const hb_array_t<const int> y_deltas,
|
||||
hb_set_t& forced_set, /* OUT */
|
||||
double tolerance = 0.0)
|
||||
{
|
||||
unsigned len = contour_points.length;
|
||||
unsigned next_i = 0;
|
||||
for (int i = len - 1; i >= 0; i--)
|
||||
{
|
||||
unsigned last_i = (len + i -1) % len;
|
||||
for (unsigned j = 0; j < 2; j++)
|
||||
{
|
||||
double cj, lcj, ncj;
|
||||
int dj, ldj, ndj;
|
||||
if (j == 0)
|
||||
{
|
||||
cj = static_cast<double> (contour_points.arrayZ[i].x);
|
||||
dj = x_deltas.arrayZ[i];
|
||||
lcj = static_cast<double> (contour_points.arrayZ[last_i].x);
|
||||
ldj = x_deltas.arrayZ[last_i];
|
||||
ncj = static_cast<double> (contour_points.arrayZ[next_i].x);
|
||||
ndj = x_deltas.arrayZ[next_i];
|
||||
}
|
||||
else
|
||||
{
|
||||
cj = static_cast<double> (contour_points.arrayZ[i].y);
|
||||
dj = y_deltas.arrayZ[i];
|
||||
lcj = static_cast<double> (contour_points.arrayZ[last_i].y);
|
||||
ldj = y_deltas.arrayZ[last_i];
|
||||
ncj = static_cast<double> (contour_points.arrayZ[next_i].y);
|
||||
ndj = y_deltas.arrayZ[next_i];
|
||||
}
|
||||
|
||||
double c1, c2;
|
||||
int d1, d2;
|
||||
if (lcj <= ncj)
|
||||
{
|
||||
c1 = lcj;
|
||||
c2 = ncj;
|
||||
d1 = ldj;
|
||||
d2 = ndj;
|
||||
}
|
||||
else
|
||||
{
|
||||
c1 = ncj;
|
||||
c2 = lcj;
|
||||
d1 = ndj;
|
||||
d2 = ldj;
|
||||
}
|
||||
|
||||
bool force = false;
|
||||
if (c1 == c2)
|
||||
{
|
||||
if (abs (d1 - d2) > tolerance && abs (dj) > tolerance)
|
||||
force = true;
|
||||
}
|
||||
else if (c1 <= cj && cj <= c2)
|
||||
{
|
||||
if (!(hb_min (d1, d2) - tolerance <= dj &&
|
||||
dj <= hb_max (d1, d2) + tolerance))
|
||||
force = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (d1 != d2)
|
||||
{
|
||||
if (cj < c1)
|
||||
{
|
||||
if (abs (dj) > tolerance &&
|
||||
abs (dj - d1) > tolerance &&
|
||||
((dj - tolerance < d1) != (d1 < d2)))
|
||||
force = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (abs (dj) > tolerance &&
|
||||
abs (dj - d2) > tolerance &&
|
||||
((d2 < dj + tolerance) != (d1 < d2)))
|
||||
force = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (force)
|
||||
{
|
||||
forced_set.add (i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
next_i = i;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T,
|
||||
hb_enable_if (hb_is_trivially_copyable (T))>
|
||||
static bool rotate_array (const hb_array_t<const T>& org_array,
|
||||
int k,
|
||||
hb_vector_t<T>& out)
|
||||
{
|
||||
unsigned n = org_array.length;
|
||||
if (!n) return true;
|
||||
if (unlikely (!out.resize (n, false)))
|
||||
return false;
|
||||
|
||||
unsigned item_size = hb_static_size (T);
|
||||
if (k < 0)
|
||||
k = n - (-k) % n;
|
||||
else
|
||||
k %= n;
|
||||
|
||||
hb_memcpy ((void *) out.arrayZ, (const void *) (org_array.arrayZ + n - k), k * item_size);
|
||||
hb_memcpy ((void *) (out.arrayZ + k), (const void *) org_array.arrayZ, (n - k) * item_size);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool rotate_set (const hb_set_t& org_set,
|
||||
int k,
|
||||
unsigned n,
|
||||
hb_set_t& out)
|
||||
{
|
||||
if (!n) return false;
|
||||
k %= n;
|
||||
if (k < 0)
|
||||
k = n + k;
|
||||
|
||||
if (k == 0)
|
||||
{
|
||||
out.set (org_set);
|
||||
}
|
||||
else
|
||||
{
|
||||
for (auto v : org_set)
|
||||
out.add ((v + k) % n);
|
||||
}
|
||||
return !out.in_error ();
|
||||
}
|
||||
|
||||
/* Given two reference coordinates (start and end of contour_points array),
|
||||
* output interpolated deltas for points in between */
|
||||
static bool _iup_segment (const hb_array_t<const contour_point_t> contour_points,
|
||||
const hb_array_t<const int> x_deltas,
|
||||
const hb_array_t<const int> y_deltas,
|
||||
const contour_point_t& p1, const contour_point_t& p2,
|
||||
int p1_dx, int p2_dx,
|
||||
int p1_dy, int p2_dy,
|
||||
hb_vector_t<double>& interp_x_deltas, /* OUT */
|
||||
hb_vector_t<double>& interp_y_deltas /* OUT */)
|
||||
{
|
||||
unsigned n = contour_points.length;
|
||||
if (unlikely (!interp_x_deltas.resize (n, false) ||
|
||||
!interp_y_deltas.resize (n, false)))
|
||||
return false;
|
||||
|
||||
for (unsigned j = 0; j < 2; j++)
|
||||
{
|
||||
double x1, x2, d1, d2;
|
||||
double *out;
|
||||
if (j == 0)
|
||||
{
|
||||
x1 = static_cast<double> (p1.x);
|
||||
x2 = static_cast<double> (p2.x);
|
||||
d1 = p1_dx;
|
||||
d2 = p2_dx;
|
||||
out = interp_x_deltas.arrayZ;
|
||||
}
|
||||
else
|
||||
{
|
||||
x1 = static_cast<double> (p1.y);
|
||||
x2 = static_cast<double> (p2.y);
|
||||
d1 = p1_dy;
|
||||
d2 = p2_dy;
|
||||
out = interp_y_deltas.arrayZ;
|
||||
}
|
||||
|
||||
if (x1 == x2)
|
||||
{
|
||||
if (d1 == d2)
|
||||
{
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
out[i] = d1;
|
||||
}
|
||||
else
|
||||
{
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
out[i] = 0.0;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (x1 > x2)
|
||||
{
|
||||
hb_swap (x1, x2);
|
||||
hb_swap (d1, d2);
|
||||
}
|
||||
|
||||
double scale = (d2 - d1) / (x2 - x1);
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
{
|
||||
double x = (j == 0 ? static_cast<double> (contour_points.arrayZ[i].x) : static_cast<double> (contour_points.arrayZ[i].y));
|
||||
double d;
|
||||
if (x <= x1)
|
||||
d = d1;
|
||||
else if (x >= x2)
|
||||
d = d2;
|
||||
else
|
||||
d = d1 + (x - x1) * scale;
|
||||
|
||||
out[i] = d;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool _can_iup_in_between (const hb_array_t<const contour_point_t> contour_points,
|
||||
const hb_array_t<const int> x_deltas,
|
||||
const hb_array_t<const int> y_deltas,
|
||||
const contour_point_t& p1, const contour_point_t& p2,
|
||||
int p1_dx, int p2_dx,
|
||||
int p1_dy, int p2_dy,
|
||||
double tolerance)
|
||||
{
|
||||
hb_vector_t<double> interp_x_deltas, interp_y_deltas;
|
||||
if (!_iup_segment (contour_points, x_deltas, y_deltas,
|
||||
p1, p2, p1_dx, p2_dx, p1_dy, p2_dy,
|
||||
interp_x_deltas, interp_y_deltas))
|
||||
return false;
|
||||
|
||||
unsigned num = contour_points.length;
|
||||
|
||||
for (unsigned i = 0; i < num; i++)
|
||||
{
|
||||
double dx = static_cast<double> (x_deltas.arrayZ[i]) - interp_x_deltas.arrayZ[i];
|
||||
double dy = static_cast<double> (y_deltas.arrayZ[i]) - interp_y_deltas.arrayZ[i];
|
||||
|
||||
if (sqrt (dx * dx + dy * dy) > tolerance)
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool _iup_contour_optimize_dp (const contour_point_vector_t& contour_points,
|
||||
const hb_vector_t<int>& x_deltas,
|
||||
const hb_vector_t<int>& y_deltas,
|
||||
const hb_set_t& forced_set,
|
||||
double tolerance,
|
||||
unsigned lookback,
|
||||
hb_vector_t<unsigned>& costs, /* OUT */
|
||||
hb_vector_t<int>& chain /* OUT */)
|
||||
{
|
||||
unsigned n = contour_points.length;
|
||||
if (unlikely (!costs.resize (n, false) ||
|
||||
!chain.resize (n, false)))
|
||||
return false;
|
||||
|
||||
lookback = hb_min (lookback, MAX_LOOKBACK);
|
||||
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
{
|
||||
unsigned best_cost = (i == 0 ? 1 : costs.arrayZ[i-1] + 1);
|
||||
|
||||
costs.arrayZ[i] = best_cost;
|
||||
chain.arrayZ[i] = (i == 0 ? -1 : i - 1);
|
||||
|
||||
if (i > 0 && forced_set.has (i - 1))
|
||||
continue;
|
||||
|
||||
int lookback_index = hb_max ((int) i - (int) lookback + 1, -1);
|
||||
for (int j = i - 2; j >= lookback_index; j--)
|
||||
{
|
||||
unsigned cost = j == -1 ? 1 : costs.arrayZ[j] + 1;
|
||||
/* num points between i and j */
|
||||
unsigned num_points = i - j - 1;
|
||||
unsigned p1 = (j == -1 ? n - 1 : j);
|
||||
if (cost < best_cost &&
|
||||
_can_iup_in_between (contour_points.as_array ().sub_array (j + 1, num_points),
|
||||
x_deltas.as_array ().sub_array (j + 1, num_points),
|
||||
y_deltas.as_array ().sub_array (j + 1, num_points),
|
||||
contour_points.arrayZ[p1], contour_points.arrayZ[i],
|
||||
x_deltas.arrayZ[p1], x_deltas.arrayZ[i],
|
||||
y_deltas.arrayZ[p1], y_deltas.arrayZ[i],
|
||||
tolerance))
|
||||
{
|
||||
best_cost = cost;
|
||||
costs.arrayZ[i] = best_cost;
|
||||
chain.arrayZ[i] = j;
|
||||
}
|
||||
|
||||
if (j > 0 && forced_set.has (j))
|
||||
break;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool _iup_contour_optimize (const hb_array_t<const contour_point_t> contour_points,
|
||||
const hb_array_t<const int> x_deltas,
|
||||
const hb_array_t<const int> y_deltas,
|
||||
hb_array_t<bool> opt_indices, /* OUT */
|
||||
double tolerance = 0.0)
|
||||
{
|
||||
unsigned n = contour_points.length;
|
||||
if (opt_indices.length != n ||
|
||||
x_deltas.length != n ||
|
||||
y_deltas.length != n)
|
||||
return false;
|
||||
|
||||
bool all_within_tolerance = true;
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
{
|
||||
int dx = x_deltas.arrayZ[i];
|
||||
int dy = y_deltas.arrayZ[i];
|
||||
if (sqrt ((double) dx * dx + (double) dy * dy) > tolerance)
|
||||
{
|
||||
all_within_tolerance = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/* If all are within tolerance distance, do nothing, opt_indices is
|
||||
* initilized to false */
|
||||
if (all_within_tolerance)
|
||||
return true;
|
||||
|
||||
/* If there's exactly one point, return it */
|
||||
if (n == 1)
|
||||
{
|
||||
opt_indices.arrayZ[0] = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* If all deltas are exactly the same, return just one (the first one) */
|
||||
bool all_deltas_are_equal = true;
|
||||
for (unsigned i = 1; i < n; i++)
|
||||
if (x_deltas.arrayZ[i] != x_deltas.arrayZ[0] ||
|
||||
y_deltas.arrayZ[i] != y_deltas.arrayZ[0])
|
||||
{
|
||||
all_deltas_are_equal = false;
|
||||
break;
|
||||
}
|
||||
|
||||
if (all_deltas_are_equal)
|
||||
{
|
||||
opt_indices.arrayZ[0] = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* else, solve the general problem using Dynamic Programming */
|
||||
hb_set_t forced_set;
|
||||
_iup_contour_bound_forced_set (contour_points, x_deltas, y_deltas, forced_set, tolerance);
|
||||
|
||||
if (!forced_set.is_empty ())
|
||||
{
|
||||
int k = n - 1 - forced_set.get_max ();
|
||||
if (k < 0)
|
||||
return false;
|
||||
|
||||
hb_vector_t<int> rot_x_deltas, rot_y_deltas;
|
||||
contour_point_vector_t rot_points;
|
||||
hb_set_t rot_forced_set;
|
||||
if (!rotate_array (contour_points, k, rot_points) ||
|
||||
!rotate_array (x_deltas, k, rot_x_deltas) ||
|
||||
!rotate_array (y_deltas, k, rot_y_deltas) ||
|
||||
!rotate_set (forced_set, k, n, rot_forced_set))
|
||||
return false;
|
||||
|
||||
hb_vector_t<unsigned> costs;
|
||||
hb_vector_t<int> chain;
|
||||
|
||||
if (!_iup_contour_optimize_dp (rot_points, rot_x_deltas, rot_y_deltas,
|
||||
rot_forced_set, tolerance, n,
|
||||
costs, chain))
|
||||
return false;
|
||||
|
||||
hb_set_t solution;
|
||||
int index = n - 1;
|
||||
while (index != -1)
|
||||
{
|
||||
solution.add (index);
|
||||
index = chain.arrayZ[index];
|
||||
}
|
||||
|
||||
if (solution.is_empty () ||
|
||||
forced_set.get_population () > solution.get_population ())
|
||||
return false;
|
||||
|
||||
for (unsigned i : solution)
|
||||
opt_indices.arrayZ[i] = true;
|
||||
|
||||
hb_vector_t<bool> rot_indices;
|
||||
const hb_array_t<const bool> opt_indices_array (opt_indices.arrayZ, opt_indices.length);
|
||||
rotate_array (opt_indices_array, -k, rot_indices);
|
||||
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
opt_indices.arrayZ[i] = rot_indices.arrayZ[i];
|
||||
}
|
||||
else
|
||||
{
|
||||
hb_vector_t<int> repeat_x_deltas, repeat_y_deltas;
|
||||
contour_point_vector_t repeat_points;
|
||||
|
||||
if (unlikely (!repeat_x_deltas.resize (n * 2, false) ||
|
||||
!repeat_y_deltas.resize (n * 2, false) ||
|
||||
!repeat_points.resize (n * 2, false)))
|
||||
return false;
|
||||
|
||||
unsigned contour_point_size = hb_static_size (contour_point_t);
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
{
|
||||
hb_memcpy ((void *) repeat_x_deltas.arrayZ, (const void *) x_deltas.arrayZ, n * sizeof (repeat_x_deltas[0]));
|
||||
hb_memcpy ((void *) (repeat_x_deltas.arrayZ + n), (const void *) x_deltas.arrayZ, n * sizeof (repeat_x_deltas[0]));
|
||||
|
||||
hb_memcpy ((void *) repeat_y_deltas.arrayZ, (const void *) y_deltas.arrayZ, n * sizeof (repeat_x_deltas[0]));
|
||||
hb_memcpy ((void *) (repeat_y_deltas.arrayZ + n), (const void *) y_deltas.arrayZ, n * sizeof (repeat_x_deltas[0]));
|
||||
|
||||
hb_memcpy ((void *) repeat_points.arrayZ, (const void *) contour_points.arrayZ, n * contour_point_size);
|
||||
hb_memcpy ((void *) (repeat_points.arrayZ + n), (const void *) contour_points.arrayZ, n * contour_point_size);
|
||||
}
|
||||
|
||||
hb_vector_t<unsigned> costs;
|
||||
hb_vector_t<int> chain;
|
||||
if (!_iup_contour_optimize_dp (repeat_points, repeat_x_deltas, repeat_y_deltas,
|
||||
forced_set, tolerance, n,
|
||||
costs, chain))
|
||||
return false;
|
||||
|
||||
unsigned best_cost = n + 1;
|
||||
int len = costs.length;
|
||||
hb_set_t best_sol;
|
||||
for (int start = n - 1; start < len; start++)
|
||||
{
|
||||
hb_set_t solution;
|
||||
int i = start;
|
||||
int lookback = start - (int) n;
|
||||
while (i > lookback)
|
||||
{
|
||||
solution.add (i % n);
|
||||
i = chain.arrayZ[i];
|
||||
}
|
||||
if (i == lookback)
|
||||
{
|
||||
unsigned cost_i = i < 0 ? 0 : costs.arrayZ[i];
|
||||
unsigned cost = costs.arrayZ[start] - cost_i;
|
||||
if (cost <= best_cost)
|
||||
{
|
||||
best_sol.set (solution);
|
||||
best_cost = cost;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
if (best_sol.has (i))
|
||||
opt_indices.arrayZ[i] = true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool iup_delta_optimize (const contour_point_vector_t& contour_points,
|
||||
const hb_vector_t<int>& x_deltas,
|
||||
const hb_vector_t<int>& y_deltas,
|
||||
hb_vector_t<bool>& opt_indices, /* OUT */
|
||||
double tolerance)
|
||||
{
|
||||
if (!opt_indices.resize (contour_points.length))
|
||||
return false;
|
||||
|
||||
hb_vector_t<unsigned> end_points;
|
||||
unsigned count = contour_points.length;
|
||||
if (unlikely (!end_points.alloc (count)))
|
||||
return false;
|
||||
|
||||
for (unsigned i = 0; i < count - 4; i++)
|
||||
if (contour_points.arrayZ[i].is_end_point)
|
||||
end_points.push (i);
|
||||
|
||||
/* phantom points */
|
||||
for (unsigned i = count - 4; i < count; i++)
|
||||
end_points.push (i);
|
||||
|
||||
if (end_points.in_error ()) return false;
|
||||
|
||||
unsigned start = 0;
|
||||
for (unsigned end : end_points)
|
||||
{
|
||||
unsigned len = end - start + 1;
|
||||
if (!_iup_contour_optimize (contour_points.as_array ().sub_array (start, len),
|
||||
x_deltas.as_array ().sub_array (start, len),
|
||||
y_deltas.as_array ().sub_array (start, len),
|
||||
opt_indices.as_array ().sub_array (start, len),
|
||||
tolerance))
|
||||
return false;
|
||||
start = end + 1;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright © 2024 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*/
|
||||
|
||||
#ifndef HB_SUBSET_INSTANCER_IUP_HH
|
||||
#define HB_SUBSET_INSTANCER_IUP_HH
|
||||
|
||||
#include "hb-subset-plan.hh"
|
||||
/* given contour points and deltas, optimize a set of referenced points within error
|
||||
* tolerance. Returns optimized referenced point indices */
|
||||
HB_INTERNAL bool iup_delta_optimize (const contour_point_vector_t& contour_points,
|
||||
const hb_vector_t<int>& x_deltas,
|
||||
const hb_vector_t<int>& y_deltas,
|
||||
hb_vector_t<bool>& opt_indices, /* OUT */
|
||||
double tolerance = 0.0);
|
||||
|
||||
#endif /* HB_SUBSET_INSTANCER_IUP_HH */
|
||||
|
|
@ -32,17 +32,17 @@
|
|||
* This should be safe.
|
||||
*/
|
||||
|
||||
constexpr static float EPSILON = 1.f / (1 << 14);
|
||||
constexpr static float MAX_F2DOT14 = float (0x7FFF) / (1 << 14);
|
||||
constexpr static double EPSILON = 1.0 / (1 << 14);
|
||||
constexpr static double MAX_F2DOT14 = double (0x7FFF) / (1 << 14);
|
||||
|
||||
static inline Triple _reverse_negate(const Triple &v)
|
||||
{ return {-v.maximum, -v.middle, -v.minimum}; }
|
||||
|
||||
|
||||
static inline float supportScalar (float coord, const Triple &tent)
|
||||
static inline double supportScalar (double coord, const Triple &tent)
|
||||
{
|
||||
/* Copied from VarRegionAxis::evaluate() */
|
||||
float start = tent.minimum, peak = tent.middle, end = tent.maximum;
|
||||
double start = tent.minimum, peak = tent.middle, end = tent.maximum;
|
||||
|
||||
if (unlikely (start > peak || peak > end))
|
||||
return 1.;
|
||||
|
|
@ -62,20 +62,20 @@ static inline float supportScalar (float coord, const Triple &tent)
|
|||
return (end - coord) / (end - peak);
|
||||
}
|
||||
|
||||
static inline result_t
|
||||
static inline rebase_tent_result_t
|
||||
_solve (Triple tent, Triple axisLimit, bool negative = false)
|
||||
{
|
||||
float axisMin = axisLimit.minimum;
|
||||
float axisDef = axisLimit.middle;
|
||||
float axisMax = axisLimit.maximum;
|
||||
float lower = tent.minimum;
|
||||
float peak = tent.middle;
|
||||
float upper = tent.maximum;
|
||||
double axisMin = axisLimit.minimum;
|
||||
double axisDef = axisLimit.middle;
|
||||
double axisMax = axisLimit.maximum;
|
||||
double lower = tent.minimum;
|
||||
double peak = tent.middle;
|
||||
double upper = tent.maximum;
|
||||
|
||||
// Mirror the problem such that axisDef <= peak
|
||||
if (axisDef > peak)
|
||||
{
|
||||
result_t vec = _solve (_reverse_negate (tent),
|
||||
rebase_tent_result_t vec = _solve (_reverse_negate (tent),
|
||||
_reverse_negate (axisLimit),
|
||||
!negative);
|
||||
|
||||
|
|
@ -98,7 +98,7 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
* axisMin axisDef axisMax lower upper
|
||||
*/
|
||||
if (axisMax <= lower && axisMax < peak)
|
||||
return result_t{}; // No overlap
|
||||
return rebase_tent_result_t{}; // No overlap
|
||||
|
||||
/* case 2: Only the peak and outermost bound fall outside the new limit;
|
||||
* we keep the deltaset, update peak and outermost bound and scale deltas
|
||||
|
|
@ -130,10 +130,10 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
*/
|
||||
if (axisMax < peak)
|
||||
{
|
||||
float mult = supportScalar (axisMax, tent);
|
||||
double mult = supportScalar (axisMax, tent);
|
||||
tent = Triple{lower, axisMax, axisMax};
|
||||
|
||||
result_t vec = _solve (tent, axisLimit);
|
||||
rebase_tent_result_t vec = _solve (tent, axisLimit);
|
||||
|
||||
for (auto &p : vec)
|
||||
p = hb_pair (p.first * mult, p.second);
|
||||
|
|
@ -143,13 +143,13 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
|
||||
// lower <= axisDef <= peak <= axisMax
|
||||
|
||||
float gain = supportScalar (axisDef, tent);
|
||||
result_t out {hb_pair (gain, Triple{})};
|
||||
double gain = supportScalar (axisDef, tent);
|
||||
rebase_tent_result_t out {hb_pair (gain, Triple{})};
|
||||
|
||||
// First, the positive side
|
||||
|
||||
// outGain is the scalar of axisMax at the tent.
|
||||
float outGain = supportScalar (axisMax, tent);
|
||||
double outGain = supportScalar (axisMax, tent);
|
||||
|
||||
/* Case 3a: Gain is more than outGain. The tent down-slope crosses
|
||||
* the axis into negative. We have to split it into multiples.
|
||||
|
|
@ -173,10 +173,10 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
// Note that this is the branch taken if both gain and outGain are 0.
|
||||
|
||||
// Crossing point on the axis.
|
||||
float crossing = peak + (1 - gain) * (upper - peak);
|
||||
double crossing = peak + (1 - gain) * (upper - peak);
|
||||
|
||||
Triple loc{hb_max (lower, axisDef), peak, crossing};
|
||||
float scalar = 1.f;
|
||||
double scalar = 1.0;
|
||||
|
||||
// The part before the crossing point.
|
||||
out.push (hb_pair (scalar - gain, loc));
|
||||
|
|
@ -191,7 +191,7 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
if (upper >= axisMax)
|
||||
{
|
||||
Triple loc {crossing, axisMax, axisMax};
|
||||
float scalar = outGain;
|
||||
double scalar = outGain;
|
||||
|
||||
out.push (hb_pair (scalar - gain, loc));
|
||||
}
|
||||
|
|
@ -221,11 +221,11 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
|
||||
// Downslope.
|
||||
Triple loc1 {crossing, upper, axisMax};
|
||||
float scalar1 = 0.f;
|
||||
double scalar1 = 0.0;
|
||||
|
||||
// Eternity justify.
|
||||
Triple loc2 {upper, axisMax, axisMax};
|
||||
float scalar2 = 0.f;
|
||||
double scalar2 = 0.0;
|
||||
|
||||
out.push (hb_pair (scalar1 - gain, loc1));
|
||||
out.push (hb_pair (scalar2 - gain, loc2));
|
||||
|
|
@ -254,9 +254,12 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
* | | newUpper
|
||||
* axisDef axisMax
|
||||
*/
|
||||
float newUpper = peak + (1 - gain) * (upper - peak);
|
||||
double newUpper = peak + (1 - gain) * (upper - peak);
|
||||
assert (axisMax <= newUpper); // Because outGain > gain
|
||||
if (newUpper <= axisDef + (axisMax - axisDef) * 2)
|
||||
/* Disabled because ots doesn't like us:
|
||||
* https://github.com/fonttools/fonttools/issues/3350 */
|
||||
|
||||
if (false && (newUpper <= axisDef + (axisMax - axisDef) * 2))
|
||||
{
|
||||
upper = newUpper;
|
||||
if (!negative && axisDef + (axisMax - axisDef) * MAX_F2DOT14 < upper)
|
||||
|
|
@ -267,7 +270,7 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
}
|
||||
|
||||
Triple loc {hb_max (axisDef, lower), peak, upper};
|
||||
float scalar = 1.f;
|
||||
double scalar = 1.0;
|
||||
|
||||
out.push (hb_pair (scalar - gain, loc));
|
||||
}
|
||||
|
|
@ -291,10 +294,10 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
else
|
||||
{
|
||||
Triple loc1 {hb_max (axisDef, lower), peak, axisMax};
|
||||
float scalar1 = 1.f;
|
||||
double scalar1 = 1.0;
|
||||
|
||||
Triple loc2 {peak, axisMax, axisMax};
|
||||
float scalar2 = outGain;
|
||||
double scalar2 = outGain;
|
||||
|
||||
out.push (hb_pair (scalar1 - gain, loc1));
|
||||
// Don't add a dirac delta!
|
||||
|
|
@ -322,7 +325,7 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
if (lower <= axisMin)
|
||||
{
|
||||
Triple loc {axisMin, axisMin, axisDef};
|
||||
float scalar = supportScalar (axisMin, tent);
|
||||
double scalar = supportScalar (axisMin, tent);
|
||||
|
||||
out.push (hb_pair (scalar - gain, loc));
|
||||
}
|
||||
|
|
@ -350,11 +353,11 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
|
||||
// Downslope.
|
||||
Triple loc1 {axisMin, lower, axisDef};
|
||||
float scalar1 = 0.f;
|
||||
double scalar1 = 0.0;
|
||||
|
||||
// Eternity justify.
|
||||
Triple loc2 {axisMin, axisMin, lower};
|
||||
float scalar2 = 0.f;
|
||||
double scalar2 = 0.0;
|
||||
|
||||
out.push (hb_pair (scalar1 - gain, loc1));
|
||||
out.push (hb_pair (scalar2 - gain, loc2));
|
||||
|
|
@ -366,19 +369,19 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||
static inline TripleDistances _reverse_triple_distances (const TripleDistances &v)
|
||||
{ return TripleDistances (v.positive, v.negative); }
|
||||
|
||||
float renormalizeValue (float v, const Triple &triple,
|
||||
const TripleDistances &triple_distances, bool extrapolate)
|
||||
double renormalizeValue (double v, const Triple &triple,
|
||||
const TripleDistances &triple_distances, bool extrapolate)
|
||||
{
|
||||
float lower = triple.minimum, def = triple.middle, upper = triple.maximum;
|
||||
double lower = triple.minimum, def = triple.middle, upper = triple.maximum;
|
||||
assert (lower <= def && def <= upper);
|
||||
|
||||
if (!extrapolate)
|
||||
v = hb_max (hb_min (v, upper), lower);
|
||||
|
||||
if (v == def)
|
||||
return 0.f;
|
||||
return 0.0;
|
||||
|
||||
if (def < 0.f)
|
||||
if (def < 0.0)
|
||||
return -renormalizeValue (-v, _reverse_negate (triple),
|
||||
_reverse_triple_distances (triple_distances), extrapolate);
|
||||
|
||||
|
|
@ -387,14 +390,14 @@ float renormalizeValue (float v, const Triple &triple,
|
|||
return (v - def) / (upper - def);
|
||||
|
||||
/* v < def */
|
||||
if (lower >= 0.f)
|
||||
if (lower >= 0.0)
|
||||
return (v - def) / (def - lower);
|
||||
|
||||
/* lower < 0 and v < default */
|
||||
float total_distance = triple_distances.negative * (-lower) + triple_distances.positive * def;
|
||||
double total_distance = triple_distances.negative * (-lower) + triple_distances.positive * def;
|
||||
|
||||
float v_distance;
|
||||
if (v >= 0.f)
|
||||
double v_distance;
|
||||
if (v >= 0.0)
|
||||
v_distance = (def - v) * triple_distances.positive;
|
||||
else
|
||||
v_distance = (-v) * triple_distances.negative + triple_distances.positive * def;
|
||||
|
|
@ -402,18 +405,18 @@ float renormalizeValue (float v, const Triple &triple,
|
|||
return (-v_distance) /total_distance;
|
||||
}
|
||||
|
||||
result_t
|
||||
rebase_tent_result_t
|
||||
rebase_tent (Triple tent, Triple axisLimit, TripleDistances axis_triple_distances)
|
||||
{
|
||||
assert (-1.f <= axisLimit.minimum && axisLimit.minimum <= axisLimit.middle && axisLimit.middle <= axisLimit.maximum && axisLimit.maximum <= +1.f);
|
||||
assert (-2.f <= tent.minimum && tent.minimum <= tent.middle && tent.middle <= tent.maximum && tent.maximum <= +2.f);
|
||||
assert (tent.middle != 0.f);
|
||||
assert (-1.0 <= axisLimit.minimum && axisLimit.minimum <= axisLimit.middle && axisLimit.middle <= axisLimit.maximum && axisLimit.maximum <= +1.0);
|
||||
assert (-2.0 <= tent.minimum && tent.minimum <= tent.middle && tent.middle <= tent.maximum && tent.maximum <= +2.0);
|
||||
assert (tent.middle != 0.0);
|
||||
|
||||
result_t sols = _solve (tent, axisLimit);
|
||||
rebase_tent_result_t sols = _solve (tent, axisLimit);
|
||||
|
||||
auto n = [&axisLimit, &axis_triple_distances] (float v) { return renormalizeValue (v, axisLimit, axis_triple_distances); };
|
||||
auto n = [&axisLimit, &axis_triple_distances] (double v) { return renormalizeValue (v, axisLimit, axis_triple_distances); };
|
||||
|
||||
result_t out;
|
||||
rebase_tent_result_t out;
|
||||
for (auto &p : sols)
|
||||
{
|
||||
if (!p.first) continue;
|
||||
|
|
|
|||
|
|
@ -30,24 +30,24 @@
|
|||
/* pre-normalized distances */
|
||||
struct TripleDistances
|
||||
{
|
||||
TripleDistances (): negative (1.f), positive (1.f) {}
|
||||
TripleDistances (float neg_, float pos_): negative (neg_), positive (pos_) {}
|
||||
TripleDistances (float min, float default_, float max)
|
||||
TripleDistances (): negative (1.0), positive (1.0) {}
|
||||
TripleDistances (double neg_, double pos_): negative (neg_), positive (pos_) {}
|
||||
TripleDistances (double min, double default_, double max)
|
||||
{
|
||||
negative = default_ - min;
|
||||
positive = max - default_;
|
||||
}
|
||||
|
||||
float negative;
|
||||
float positive;
|
||||
double negative;
|
||||
double positive;
|
||||
};
|
||||
|
||||
struct Triple {
|
||||
|
||||
Triple () :
|
||||
minimum (0.f), middle (0.f), maximum (0.f) {}
|
||||
minimum (0.0), middle (0.0), maximum (0.0) {}
|
||||
|
||||
Triple (float minimum_, float middle_, float maximum_) :
|
||||
Triple (double minimum_, double middle_, double maximum_) :
|
||||
minimum (minimum_), middle (middle_), maximum (maximum_) {}
|
||||
|
||||
bool operator == (const Triple &o) const
|
||||
|
|
@ -63,7 +63,7 @@ struct Triple {
|
|||
bool is_point () const
|
||||
{ return minimum == middle && middle == maximum; }
|
||||
|
||||
bool contains (float point) const
|
||||
bool contains (double point) const
|
||||
{ return minimum <= point && point <= maximum; }
|
||||
|
||||
/* from hb_array_t hash ()*/
|
||||
|
|
@ -82,18 +82,18 @@ struct Triple {
|
|||
}
|
||||
|
||||
|
||||
float minimum;
|
||||
float middle;
|
||||
float maximum;
|
||||
double minimum;
|
||||
double middle;
|
||||
double maximum;
|
||||
};
|
||||
|
||||
using result_item_t = hb_pair_t<float, Triple>;
|
||||
using result_t = hb_vector_t<result_item_t>;
|
||||
using rebase_tent_result_item_t = hb_pair_t<double, Triple>;
|
||||
using rebase_tent_result_t = hb_vector_t<rebase_tent_result_item_t>;
|
||||
|
||||
/* renormalize a normalized value v to the range of an axis,
|
||||
* considering the prenormalized distances as well as the new axis limits.
|
||||
* Ported from fonttools */
|
||||
HB_INTERNAL float renormalizeValue (float v, const Triple &triple,
|
||||
HB_INTERNAL double renormalizeValue (double v, const Triple &triple,
|
||||
const TripleDistances &triple_distances,
|
||||
bool extrapolate = true);
|
||||
/* Given a tuple (lower,peak,upper) "tent" and new axis limits
|
||||
|
|
@ -107,6 +107,8 @@ HB_INTERNAL float renormalizeValue (float v, const Triple &triple,
|
|||
* If tent value is Triple{}, that is a special deltaset that should
|
||||
* be always-enabled (called "gain").
|
||||
*/
|
||||
HB_INTERNAL result_t rebase_tent (Triple tent, Triple axisLimit, TripleDistances axis_triple_distances);
|
||||
HB_INTERNAL rebase_tent_result_t rebase_tent (Triple tent,
|
||||
Triple axisLimit,
|
||||
TripleDistances axis_triple_distances);
|
||||
|
||||
#endif /* HB_SUBSET_INSTANCER_SOLVER_HH */
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue