Skip to content

Commit

Permalink
[klippa] subset cmap table
Browse files Browse the repository at this point in the history
  • Loading branch information
qxliu76 committed Dec 19, 2024
1 parent 0c9fec3 commit 684964f
Show file tree
Hide file tree
Showing 447 changed files with 1,357 additions and 68 deletions.
1,110 changes: 1,110 additions & 0 deletions klippa/src/cmap.rs

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion klippa/src/gvar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ fn subset_with_offset_type<OffsetType: GvarOffset>(
//pre-allocate glyphVariationDataOffsets array
let offsets_array_len = (num_glyphs as usize + 1) * off_size;
let mut start_idx = s
.allocate_size(offsets_array_len)
.allocate_size(offsets_array_len, false)
.map_err(|_| SubsetError::SubsetTableError(Gvar::TAG))?;

// shared tuples array
Expand Down
32 changes: 14 additions & 18 deletions klippa/src/hmtx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use crate::{Plan, Subset, SubsetError, SubsetError::SubsetTableError};
use write_fonts::types::{FWord, GlyphId, UfWord};
use write_fonts::{
read::{
collections::IntSet,
tables::{hhea::Hhea, hmtx::Hmtx},
FontRef, TableProvider, TopLevelTable,
},
Expand All @@ -30,11 +29,10 @@ impl Subset for Hmtx<'_> {
return Err(SubsetTableError(Hmtx::TAG));
}

let new_num_h_metrics =
compute_new_num_h_metrics(self, &plan.glyphset, plan.num_output_glyphs);
let new_num_h_metrics = compute_new_num_h_metrics(self, plan);
//subsetted hmtx table length
let hmtx_cap = new_num_h_metrics * 4 + (plan.num_output_glyphs - new_num_h_metrics) * 2;
s.allocate_size(hmtx_cap)
s.allocate_size(hmtx_cap, false)
.map_err(|_| SubsetError::SubsetTableError(Hmtx::TAG))?;

for (new_gid, old_gid) in &plan.new_to_old_gid_list {
Expand Down Expand Up @@ -69,25 +67,23 @@ impl Subset for Hmtx<'_> {
}
}

fn compute_new_num_h_metrics(
hmtx: &Hmtx,
gid_set: &IntSet<GlyphId>,
num_output_glyphs: usize,
) -> usize {
let mut num_long_metrics = num_output_glyphs.min(0xFFFF) as u32;
let last_gid = num_long_metrics - 1;
let last_advance = hmtx.advance(GlyphId::from(last_gid)).unwrap();
fn compute_new_num_h_metrics(hmtx: &Hmtx, plan: &Plan) -> usize {
let mut num_long_metrics = plan.num_output_glyphs.min(0xFFFF);
let last_advance = get_new_gid_advance(hmtx, GlyphId::from(num_long_metrics as u32 - 1), plan);

while num_long_metrics > 1 {
let gid = GlyphId::from(num_long_metrics - 2);
let advance = gid_set
.contains(gid)
.then(|| hmtx.advance(gid).unwrap())
.unwrap_or(0);
let advance = get_new_gid_advance(hmtx, GlyphId::from(num_long_metrics as u32 - 2), plan);
if advance != last_advance {
break;
}
num_long_metrics -= 1;
}
num_long_metrics as usize
num_long_metrics
}

fn get_new_gid_advance(hmtx: &Hmtx, new_gid: GlyphId, plan: &Plan) -> u16 {
let Some(old_gid) = plan.reverse_glyph_map.get(&new_gid) else {
return 0;
};
hmtx.advance(*old_gid).unwrap_or(0)
}
132 changes: 97 additions & 35 deletions klippa/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
//! try to define Subset trait so I can add methods for Hmtx
//! TODO: make it generic for all tables
mod cmap;
mod cpal;
mod fvar;
mod glyf_loca;
Expand All @@ -22,28 +23,33 @@ pub use parsing_util::{

use fnv::FnvHashMap;
use serialize::Serializer;
use skrifa::raw::tables::cmap::CmapSubtable;
use skrifa::MetadataProvider;
use thiserror::Error;
use write_fonts::read::{
collections::{int_set::Domain, IntSet},
tables::{
cff::Cff,
cff2::Cff2,
glyf::{Glyf, Glyph},
gpos::Gpos,
gsub::Gsub,
gvar::Gvar,
head::Head,
loca::Loca,
name::Name,
os2::Os2,
post::Post,
},
types::NameId,
FontRef, TableProvider, TopLevelTable,
};
use write_fonts::types::GlyphId;
use write_fonts::types::Tag;
use write_fonts::{
read::{
collections::{int_set::Domain, IntSet},
tables::{
cff::Cff,
cff2::Cff2,
cmap::Cmap,
glyf::{Glyf, Glyph},
gpos::Gpos,
gsub::Gsub,
gvar::Gvar,
head::Head,
loca::Loca,
name::Name,
os2::Os2,
post::Post,
},
types::NameId,
FontRef, TableProvider, TopLevelTable,
},
tables::cmap::PlatformId,
};
use write_fonts::{tables::hhea::Hhea, tables::hmtx::Hmtx, tables::maxp::Maxp, FontBuilder};

const MAX_COMPOSITE_OPERATIONS_PER_GLYPH: u8 = 64;
Expand Down Expand Up @@ -149,11 +155,14 @@ impl std::ops::BitOrAssign for SubsetFlags {
#[derive(Default)]
pub struct Plan {
unicodes: IntSet<u32>,
glyphs_requested: IntSet<GlyphId>,
glyphset_gsub: IntSet<GlyphId>,
glyphset_colred: IntSet<GlyphId>,
glyphset: IntSet<GlyphId>,
//Old->New glyph id mapping,
glyph_map: FnvHashMap<GlyphId, GlyphId>,
//New->Old glyph id mapping,
reverse_glyph_map: FnvHashMap<GlyphId, GlyphId>,

new_to_old_gid_list: Vec<(GlyphId, GlyphId)>,

Expand All @@ -175,6 +184,14 @@ pub struct Plan {
colrv1_layers: FnvHashMap<u32, u32>,
//old->new CPAL palette index map
colr_palettes: FnvHashMap<u16, u16>,

os2_info: Os2Info,
}

#[derive(Default)]
struct Os2Info {
min_cmap_codepoint: u32,
max_cmap_codepoint: u32,
}

impl Plan {
Expand All @@ -188,6 +205,7 @@ impl Plan {
name_languages: &IntSet<u16>,
) -> Self {
let mut this = Plan {
glyphs_requested: input_gids.clone(),
font_num_glyphs: get_font_num_glyphs(font),
subset_flags: flags,
drop_tables: drop_tables.clone(),
Expand Down Expand Up @@ -273,6 +291,41 @@ impl Plan {
.extend(self.unicode_to_new_gid_list.iter().map(|t| t.1));
self.unicodes
.extend(self.unicode_to_new_gid_list.iter().map(|t| t.0));

// ref: <https://github.com/harfbuzz/harfbuzz/blob/e451e91ec3608a2ebfec34d0c4f0b3d880e00e33/src/hb-subset-plan.cc#L802>
self.os2_info.min_cmap_codepoint = self.unicodes.first().unwrap_or(0xFFFF_u32);
self.os2_info.max_cmap_codepoint = self.unicodes.last().unwrap_or(0xFFFF_u32);

self.collect_variation_selectors(font, input_unicodes);
}

fn collect_variation_selectors(&mut self, font: &FontRef, input_unicodes: &IntSet<u32>) {
if let Ok(cmap) = font.cmap() {
let encoding_records = cmap.encoding_records();
if let Ok(i) = encoding_records.binary_search_by(|r| {
if r.platform_id() != PlatformId::Unicode {
r.platform_id().cmp(&PlatformId::Unicode)
} else if r.encoding_id() != 5 {
r.encoding_id().cmp(&5)
} else {
std::cmp::Ordering::Equal
}
}) {
if let Ok(CmapSubtable::Format14(cmap14)) = encoding_records
.get(i)
.unwrap()
.subtable(cmap.offset_data())
{
self.unicodes.extend(
cmap14
.var_selector()
.iter()
.map(|s| s.var_selector().to_u32())
.filter(|v| input_unicodes.contains(*v)),
);
}
}
}
}

fn populate_gids_to_retain(&mut self, font: &FontRef) {
Expand All @@ -295,28 +348,32 @@ impl Plan {
}

/* Populate a full set of glyphs to retain by adding all referenced composite glyphs. */
let loca = font.loca(None).expect("Error reading loca table");
let glyf = font.glyf().expect("Error reading glyf table");
let operation_count =
self.glyphset_gsub.len() * (MAX_COMPOSITE_OPERATIONS_PER_GLYPH as u64);
for gid in self.glyphset_colred.iter() {
glyf_closure_glyphs(
&loca,
&glyf,
gid,
&mut self.glyphset,
operation_count as i32,
0,
);
if let Ok(loca) = font.loca(None) {
let glyf = font.glyf().expect("Error reading glyf table");
let operation_count =
self.glyphset_gsub.len() * (MAX_COMPOSITE_OPERATIONS_PER_GLYPH as u64);
for gid in self.glyphset_colred.iter() {
glyf_closure_glyphs(
&loca,
&glyf,
gid,
&mut self.glyphset,
operation_count as i32,
0,
);
}
remove_invalid_gids(&mut self.glyphset, self.font_num_glyphs);
} else {
self.glyphset = self.glyphset_colred.clone();
}
remove_invalid_gids(&mut self.glyphset, self.font_num_glyphs);

self.nameid_closure(font);
}

fn create_old_gid_to_new_gid_map(&mut self) {
let pop = self.glyphset.len();
self.glyph_map.reserve(pop as usize);
self.reverse_glyph_map.reserve(pop as usize);
self.new_to_old_gid_list.reserve(pop as usize);

//TODO: Add support for requested_glyph_map, command line option --gid-map
Expand All @@ -341,6 +398,8 @@ impl Plan {
}
self.glyph_map
.extend(self.new_to_old_gid_list.iter().map(|x| (x.1, x.0)));
self.reverse_glyph_map
.extend(self.new_to_old_gid_list.iter().map(|x| (x.0, x.1)));
}

fn colr_closure(&mut self, font: &FontRef) {
Expand Down Expand Up @@ -445,9 +504,7 @@ fn remove_invalid_gids(gids: &mut IntSet<GlyphId>, num_glyphs: usize) {
}

fn get_font_num_glyphs(font: &FontRef) -> usize {
let loca = font.loca(None).expect("Error reading loca table");
let ret = loca.len();

let ret = font.loca(None).map(|loca| loca.len()).unwrap_or_default();
let maxp = font.maxp().expect("Error reading maxp table");
ret.max(maxp.num_glyphs() as usize)
}
Expand Down Expand Up @@ -601,6 +658,11 @@ fn subset_table<'a>(
s: &mut Serializer,
) -> Result<(), SubsetError> {
match tag {
Cmap::TAG => font
.cmap()
.map_err(|_| SubsetError::SubsetTableError(Cmap::TAG))?
.subset(plan, font, s, builder),

Glyf::TAG => font
.glyf()
.map_err(|_| SubsetError::SubsetTableError(Glyf::TAG))?
Expand Down
4 changes: 2 additions & 2 deletions klippa/src/os2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ impl Subset for Os2<'_> {
s.embed_bytes(self.offset_data().as_bytes())
.map_err(|_| SubsetError::SubsetTableError(Os2::TAG))?;

let us_first_char_index: u16 = plan.unicodes.first().unwrap_or(0xFFFF).min(0xFFFF) as u16;
let us_first_char_index: u16 = plan.os2_info.min_cmap_codepoint.min(0xFFFF) as u16;
s.copy_assign(
self.shape().us_first_char_index_byte_range().start,
us_first_char_index,
);

let us_last_char_index: u16 = plan.unicodes.last().unwrap_or(0xFFFF).min(0xFFFF) as u16;
let us_last_char_index: u16 = plan.os2_info.max_cmap_codepoint.min(0xFFFF) as u16;
s.copy_assign(
self.shape().us_last_char_index_byte_range().start,
us_last_char_index,
Expand Down
2 changes: 1 addition & 1 deletion klippa/src/post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ fn subset_post_v2tail(post: &Post, plan: &Plan, s: &mut Serializer) -> Result<()
//init all glyphNameIndex as 0
let glyph_index_arr_len = plan.num_output_glyphs * 2;
let idx_start = s
.allocate_size(glyph_index_arr_len)
.allocate_size(glyph_index_arr_len, false)
.map_err(|_| SubsetError::SubsetTableError(Post::TAG))?;

let max_old_gid = plan.glyphset.last().unwrap().to_u32() as usize;
Expand Down
Loading

0 comments on commit 684964f

Please sign in to comment.