Merge branch 'main' into gradient-fixes

This commit is contained in:
Chad Brokaw 2023-05-03 14:49:57 -04:00
commit 4f9ae4c937
9 changed files with 287 additions and 133 deletions

View file

@ -3,8 +3,14 @@ name = "vello_encoding"
version = "0.1.0"
edition = "2021"
[features]
default = ["full"]
# Enables support for the full pipeline including late-bound
# resources (gradients, images and glyph runs)
full = ["fello", "guillotiere"]
[dependencies]
bytemuck = { workspace = true }
fello = { workspace = true }
fello = { workspace = true, optional = true }
peniko = { workspace = true }
guillotiere = "0.6.2"
guillotiere = { version = "0.6.2", optional = true }

View file

@ -196,6 +196,7 @@ impl<T: Sized> BufferSize<T> {
}
/// Returns the number of elements.
#[allow(clippy::len_without_is_empty)]
pub const fn len(self) -> u32 {
self.len
}

View file

@ -1,13 +1,16 @@
// Copyright 2022 The Vello authors
// SPDX-License-Identifier: Apache-2.0 OR MIT
use super::{
resolve::Patch, DrawColor, DrawImage, DrawLinearGradient, DrawRadialGradient, DrawTag, Glyph,
GlyphRun, PathEncoder, PathTag, Transform,
};
use super::{DrawColor, DrawTag, PathEncoder, PathTag, Transform};
use fello::NormalizedCoord;
use peniko::{kurbo::Shape, BlendMode, BrushRef, ColorStop, Extend, GradientKind, Image};
use peniko::{kurbo::Shape, BlendMode, BrushRef};
#[cfg(feature = "full")]
use {
super::{DrawImage, DrawLinearGradient, DrawRadialGradient, Glyph, GlyphRun, Patch},
fello::NormalizedCoord,
peniko::{ColorStop, Extend, GradientKind, Image},
};
/// Encoded data streams for a scene.
#[derive(Clone, Default)]
@ -20,20 +23,13 @@ pub struct Encoding {
pub draw_tags: Vec<DrawTag>,
/// The draw data stream.
pub draw_data: Vec<u8>,
/// Draw data patches for late bound resources.
pub patches: Vec<Patch>,
/// Color stop collection for gradients.
pub color_stops: Vec<ColorStop>,
/// The transform stream.
pub transforms: Vec<Transform>,
/// The line width stream.
pub linewidths: Vec<f32>,
/// Positioned glyph buffer.
pub glyphs: Vec<Glyph>,
/// Sequences of glyphs.
pub glyph_runs: Vec<GlyphRun>,
/// Normalized coordinate buffer for variable fonts.
pub normalized_coords: Vec<NormalizedCoord>,
/// Late bound resource data.
#[cfg(feature = "full")]
pub resources: Resources,
/// Number of encoded paths.
pub n_paths: u32,
/// Number of encoded path segments.
@ -63,15 +59,12 @@ impl Encoding {
self.linewidths.clear();
self.draw_data.clear();
self.draw_tags.clear();
self.glyphs.clear();
self.glyph_runs.clear();
self.normalized_coords.clear();
self.n_paths = 0;
self.n_path_segments = 0;
self.n_clips = 0;
self.n_open_clips = 0;
self.patches.clear();
self.color_stops.clear();
#[cfg(feature = "full")]
self.resources.reset();
if !is_fragment {
self.transforms.push(Transform::IDENTITY);
self.linewidths.push(-1.0);
@ -80,64 +73,74 @@ impl Encoding {
/// Appends another encoding to this one with an optional transform.
pub fn append(&mut self, other: &Self, transform: &Option<Transform>) {
let stops_base = self.color_stops.len();
let glyph_runs_base = self.glyph_runs.len();
let glyphs_base = self.glyphs.len();
let coords_base = self.normalized_coords.len();
let offsets = self.stream_offsets();
#[cfg(feature = "full")]
let glyph_runs_base = {
let offsets = self.stream_offsets();
let stops_base = self.resources.color_stops.len();
let glyph_runs_base = self.resources.glyph_runs.len();
let glyphs_base = self.resources.glyphs.len();
let coords_base = self.resources.normalized_coords.len();
self.resources
.glyphs
.extend_from_slice(&other.resources.glyphs);
self.resources
.normalized_coords
.extend_from_slice(&other.resources.normalized_coords);
self.resources
.glyph_runs
.extend(other.resources.glyph_runs.iter().cloned().map(|mut run| {
run.glyphs.start += glyphs_base;
run.normalized_coords.start += coords_base;
run.stream_offsets.path_tags += offsets.path_tags;
run.stream_offsets.path_data += offsets.path_data;
run.stream_offsets.draw_tags += offsets.draw_tags;
run.stream_offsets.draw_data += offsets.draw_data;
run.stream_offsets.transforms += offsets.transforms;
run.stream_offsets.linewidths += offsets.linewidths;
run
}));
self.resources
.patches
.extend(other.resources.patches.iter().map(|patch| match patch {
Patch::Ramp {
draw_data_offset: offset,
stops,
} => {
let stops = stops.start + stops_base..stops.end + stops_base;
Patch::Ramp {
draw_data_offset: offset + offsets.draw_data,
stops,
}
}
Patch::GlyphRun { index } => Patch::GlyphRun {
index: index + glyph_runs_base,
},
Patch::Image {
image,
draw_data_offset,
} => Patch::Image {
image: image.clone(),
draw_data_offset: *draw_data_offset + offsets.draw_data,
},
}));
self.resources
.color_stops
.extend_from_slice(&other.resources.color_stops);
glyph_runs_base
};
self.path_tags.extend_from_slice(&other.path_tags);
self.path_data.extend_from_slice(&other.path_data);
self.draw_tags.extend_from_slice(&other.draw_tags);
self.draw_data.extend_from_slice(&other.draw_data);
self.glyphs.extend_from_slice(&other.glyphs);
self.normalized_coords
.extend_from_slice(&other.normalized_coords);
self.glyph_runs
.extend(other.glyph_runs.iter().cloned().map(|mut run| {
run.glyphs.start += glyphs_base;
run.normalized_coords.start += coords_base;
run.stream_offsets.path_tags += offsets.path_tags;
run.stream_offsets.path_data += offsets.path_data;
run.stream_offsets.draw_tags += offsets.draw_tags;
run.stream_offsets.draw_data += offsets.draw_data;
run.stream_offsets.transforms += offsets.transforms;
run.stream_offsets.linewidths += offsets.linewidths;
run
}));
self.n_paths += other.n_paths;
self.n_path_segments += other.n_path_segments;
self.n_clips += other.n_clips;
self.n_open_clips += other.n_open_clips;
self.patches
.extend(other.patches.iter().map(|patch| match patch {
Patch::Ramp {
draw_data_offset: offset,
stops,
extend,
} => {
let stops = stops.start + stops_base..stops.end + stops_base;
Patch::Ramp {
draw_data_offset: offset + offsets.draw_data,
stops,
extend: *extend,
}
}
Patch::GlyphRun { index } => Patch::GlyphRun {
index: index + glyph_runs_base,
},
Patch::Image {
image,
draw_data_offset,
} => Patch::Image {
image: image.clone(),
draw_data_offset: *draw_data_offset + offsets.draw_data,
},
}));
self.color_stops.extend_from_slice(&other.color_stops);
if let Some(transform) = *transform {
self.transforms
.extend(other.transforms.iter().map(|x| transform * *x));
for run in &mut self.glyph_runs[glyph_runs_base..] {
#[cfg(feature = "full")]
for run in &mut self.resources.glyph_runs[glyph_runs_base..] {
run.transform = transform * run.transform;
}
} else {
@ -201,7 +204,9 @@ impl Encoding {
}
/// Encodes a brush with an optional alpha modifier.
#[allow(unused_variables)]
pub fn encode_brush<'b>(&mut self, brush: impl Into<BrushRef<'b>>, alpha: f32) {
#[cfg(feature = "full")]
use super::math::point_to_f32;
match brush.into() {
BrushRef::Solid(color) => {
@ -212,6 +217,7 @@ impl Encoding {
};
self.encode_color(DrawColor::new(color));
}
#[cfg(feature = "full")]
BrushRef::Gradient(gradient) => match gradient.kind {
GradientKind::Linear { start, end } => {
self.encode_linear_gradient(
@ -248,9 +254,13 @@ impl Encoding {
todo!("sweep gradients aren't supported yet!")
}
},
#[cfg(feature = "full")]
BrushRef::Image(image) => {
#[cfg(feature = "full")]
self.encode_image(image, alpha);
}
#[cfg(not(feature = "full"))]
_ => panic!("brushes other than solid require the 'full' feature to be enabled"),
}
}
@ -261,6 +271,7 @@ impl Encoding {
}
/// Encodes a linear gradient brush.
#[cfg(feature = "full")]
pub fn encode_linear_gradient(
&mut self,
gradient: DrawLinearGradient,
@ -275,6 +286,7 @@ impl Encoding {
}
/// Encodes a radial gradient brush.
#[cfg(feature = "full")]
pub fn encode_radial_gradient(
&mut self,
gradient: DrawRadialGradient,
@ -289,10 +301,11 @@ impl Encoding {
}
/// Encodes an image brush.
#[cfg(feature = "full")]
pub fn encode_image(&mut self, image: &Image, _alpha: f32) {
// TODO: feed the alpha multiplier through the full pipeline for consistency
// with other brushes?
self.patches.push(Patch::Image {
self.resources.patches.push(Patch::Image {
image: image.clone(),
draw_data_offset: self.draw_data.len(),
});
@ -333,28 +346,51 @@ impl Encoding {
self.path_tags.swap(len - 1, len - 2);
}
fn add_ramp(
&mut self,
color_stops: impl Iterator<Item = ColorStop>,
alpha: f32,
extend: Extend,
) {
#[cfg(feature = "full")]
fn add_ramp(&mut self, color_stops: impl Iterator<Item = ColorStop>, alpha: f32) {
let offset = self.draw_data.len();
let stops_start = self.color_stops.len();
let stops_start = self.resources.color_stops.len();
if alpha != 1.0 {
self.color_stops
self.resources
.color_stops
.extend(color_stops.map(|stop| stop.with_alpha_factor(alpha)));
} else {
self.color_stops.extend(color_stops);
self.resources.color_stops.extend(color_stops);
}
self.patches.push(Patch::Ramp {
self.resources.patches.push(Patch::Ramp {
draw_data_offset: offset,
stops: stops_start..self.color_stops.len(),
extend,
stops: stops_start..self.resources.color_stops.len(),
});
}
}
/// Encoded data for late bound resources.
#[cfg(feature = "full")]
#[derive(Clone, Default)]
pub struct Resources {
/// Draw data patches for late bound resources.
pub patches: Vec<Patch>,
/// Color stop collection for gradients.
pub color_stops: Vec<ColorStop>,
/// Positioned glyph buffer.
pub glyphs: Vec<Glyph>,
/// Sequences of glyphs.
pub glyph_runs: Vec<GlyphRun>,
/// Normalized coordinate buffer for variable fonts.
pub normalized_coords: Vec<NormalizedCoord>,
}
#[cfg(feature = "full")]
impl Resources {
fn reset(&mut self) {
self.patches.clear();
self.color_stops.clear();
self.glyphs.clear();
self.glyph_runs.clear();
self.normalized_coords.clear();
}
}
/// Snapshot of offsets for encoded streams.
#[derive(Copy, Clone, Default, Debug)]
pub struct StreamOffsets {
@ -373,6 +409,7 @@ pub struct StreamOffsets {
}
impl StreamOffsets {
#[cfg(feature = "full")]
pub(crate) fn add(&mut self, other: &Self) {
self.path_tags += other.path_tags;
self.path_data += other.path_data;

View file

@ -8,6 +8,7 @@ use std::collections::{hash_map::Entry, HashMap};
const DEFAULT_ATLAS_SIZE: i32 = 1024;
const MAX_ATLAS_SIZE: i32 = 8192;
#[derive(Default)]
pub struct Images<'a> {
pub width: u32,
pub height: u32,

View file

@ -8,12 +8,16 @@ mod clip;
mod config;
mod draw;
mod encoding;
#[cfg(feature = "full")]
mod glyph;
#[cfg(feature = "full")]
mod glyph_cache;
#[cfg(feature = "full")]
mod image_cache;
mod math;
mod monoid;
mod path;
#[cfg(feature = "full")]
mod ramp_cache;
mod resolve;
@ -28,11 +32,17 @@ pub use draw::{
DrawRadialGradient, DrawTag,
};
pub use encoding::{Encoding, StreamOffsets};
pub use glyph::{Glyph, GlyphRun};
pub use math::Transform;
pub use monoid::Monoid;
pub use path::{
Cubic, Path, PathBbox, PathEncoder, PathMonoid, PathSegment, PathSegmentType, PathTag, Tile,
};
pub use ramp_cache::Ramps;
pub use resolve::{Layout, Patch, Resolver};
pub use resolve::{resolve_solid_paths_only, Layout};
#[cfg(feature = "full")]
pub use {
encoding::Resources,
glyph::{Glyph, GlyphRun},
ramp_cache::Ramps,
resolve::{Patch, Resolver},
};

View file

@ -72,6 +72,7 @@ impl Mul for Transform {
}
}
#[allow(dead_code)]
pub fn point_to_f32(point: kurbo::Point) -> [f32; 2] {
[point.x as f32, point.y as f32]
}

View file

@ -403,6 +403,7 @@ impl<'a> PathEncoder<'a> {
}
}
#[cfg(feature = "full")]
impl fello::scale::Pen for PathEncoder<'_> {
fn move_to(&mut self, x: f32, y: f32) {
self.move_to(x, y)

View file

@ -1,16 +1,19 @@
// Copyright 2022 The Vello authors
// SPDX-License-Identifier: Apache-2.0 OR MIT
use std::ops::Range;
use bytemuck::{Pod, Zeroable};
use peniko::{Extend, Image};
use super::{
glyph_cache::{CachedRange, GlyphCache, GlyphKey},
image_cache::{ImageCache, Images},
ramp_cache::{RampCache, Ramps},
DrawTag, Encoding, PathTag, StreamOffsets, Transform,
use super::{DrawTag, Encoding, PathTag, StreamOffsets, Transform};
#[cfg(feature = "full")]
use {
super::{
glyph_cache::{CachedRange, GlyphCache, GlyphKey},
image_cache::{ImageCache, Images},
ramp_cache::{RampCache, Ramps},
},
peniko::Image,
std::ops::Range,
};
/// Layout of a packed encoding.
@ -100,7 +103,63 @@ impl Layout {
}
}
/// Resolves and packs an encoding that contains only paths with solid color
/// fills.
///
/// Panics if the encoding contains any late bound resources (gradients, images
/// or glyph runs).
pub fn resolve_solid_paths_only(encoding: &Encoding, packed: &mut Vec<u8>) -> Layout {
#[cfg(feature = "full")]
assert!(
encoding.resources.patches.is_empty(),
"this resolve function doesn't support late bound resources"
);
let data = packed;
data.clear();
let mut layout = Layout {
n_paths: encoding.n_paths,
n_clips: encoding.n_clips,
..Layout::default()
};
let SceneBufferSizes {
buffer_size,
path_tag_padded,
} = SceneBufferSizes::new(encoding, &StreamOffsets::default());
data.reserve(buffer_size);
// Path tag stream
layout.path_tag_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.path_tags));
for _ in 0..encoding.n_open_clips {
data.extend_from_slice(bytemuck::bytes_of(&PathTag::PATH));
}
data.resize(path_tag_padded, 0);
// Path data stream
layout.path_data_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.path_data));
// Draw tag stream
layout.draw_tag_base = size_to_words(data.len());
// Bin data follows draw info
layout.bin_data_start = encoding.draw_tags.iter().map(|tag| tag.info_size()).sum();
data.extend_from_slice(bytemuck::cast_slice(&encoding.draw_tags));
for _ in 0..encoding.n_open_clips {
data.extend_from_slice(bytemuck::bytes_of(&DrawTag::END_CLIP));
}
// Draw data stream
layout.draw_data_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.draw_data));
// Transform stream
layout.transform_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.transforms));
// Linewidth stream
layout.linewidth_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.linewidths));
layout.n_draw_objects = layout.n_paths;
assert_eq!(buffer_size, data.len());
layout
}
/// Resolver for late bound resources.
#[cfg(feature = "full")]
#[derive(Default)]
pub struct Resolver {
glyph_cache: GlyphCache,
@ -112,6 +171,7 @@ pub struct Resolver {
patches: Vec<ResolvedPatch>,
}
#[cfg(feature = "full")]
impl Resolver {
/// Creates a new resource cache.
pub fn new() -> Self {
@ -125,7 +185,12 @@ impl Resolver {
encoding: &Encoding,
packed: &mut Vec<u8>,
) -> (Layout, Ramps<'a>, Images<'a>) {
let sizes = self.resolve_patches(encoding);
let resources = &encoding.resources;
if resources.patches.is_empty() {
let layout = resolve_solid_paths_only(encoding, packed);
return (layout, Ramps::default(), Images::default());
}
let patch_sizes = self.resolve_patches(encoding);
self.resolve_pending_images();
let data = packed;
data.clear();
@ -134,20 +199,11 @@ impl Resolver {
n_clips: encoding.n_clips,
..Layout::default()
};
// Compute size of data buffer
let n_path_tags =
encoding.path_tags.len() + sizes.path_tags + encoding.n_open_clips as usize;
let path_tag_padded = align_up(n_path_tags, 4 * crate::config::PATH_REDUCE_WG);
let capacity = path_tag_padded
+ slice_size_in_bytes(&encoding.path_data, sizes.path_data)
+ slice_size_in_bytes(
&encoding.draw_tags,
sizes.draw_tags + encoding.n_open_clips as usize,
)
+ slice_size_in_bytes(&encoding.draw_data, sizes.draw_data)
+ slice_size_in_bytes(&encoding.transforms, sizes.transforms)
+ slice_size_in_bytes(&encoding.linewidths, sizes.linewidths);
data.reserve(capacity);
let SceneBufferSizes {
buffer_size,
path_tag_padded,
} = SceneBufferSizes::new(encoding, &patch_sizes);
data.reserve(buffer_size);
// Path tag stream
layout.path_tag_base = size_to_words(data.len());
{
@ -156,7 +212,7 @@ impl Resolver {
for patch in &self.patches {
if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch {
layout.n_paths += 1;
let stream_offset = encoding.glyph_runs[*index].stream_offsets.path_tags;
let stream_offset = resources.glyph_runs[*index].stream_offsets.path_tags;
if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset;
@ -185,7 +241,9 @@ impl Resolver {
let stream = &encoding.path_data;
for patch in &self.patches {
if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch {
let stream_offset = encoding.glyph_runs[*index].stream_offsets.path_data;
let stream_offset = encoding.resources.glyph_runs[*index]
.stream_offsets
.path_data;
if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset;
@ -269,14 +327,14 @@ impl Resolver {
transform,
} = patch
{
let run = &encoding.glyph_runs[*index];
let stream_offset = encoding.glyph_runs[*index].stream_offsets.transforms;
let run = &resources.glyph_runs[*index];
let stream_offset = run.stream_offsets.transforms;
if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset;
}
if let Some(glyph_transform) = run.glyph_transform {
for glyph in &encoding.glyphs[run.glyphs.clone()] {
for glyph in &resources.glyphs[run.glyphs.clone()] {
let xform = *transform
* Transform {
matrix: [1.0, 0.0, 0.0, -1.0],
@ -286,7 +344,7 @@ impl Resolver {
data.extend_from_slice(bytemuck::bytes_of(&xform));
}
} else {
for glyph in &encoding.glyphs[run.glyphs.clone()] {
for glyph in &resources.glyphs[run.glyphs.clone()] {
let xform = *transform
* Transform {
matrix: [1.0, 0.0, 0.0, -1.0],
@ -308,7 +366,7 @@ impl Resolver {
let stream = &encoding.linewidths;
for patch in &self.patches {
if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch {
let stream_offset = encoding.glyph_runs[*index].stream_offsets.linewidths;
let stream_offset = resources.glyph_runs[*index].stream_offsets.linewidths;
if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset;
@ -325,7 +383,7 @@ impl Resolver {
}
}
layout.n_draw_objects = layout.n_paths;
assert_eq!(capacity, data.len());
assert_eq!(buffer_size, data.len());
(layout, self.ramp_cache.ramps(), self.image_cache.images())
}
@ -337,14 +395,15 @@ impl Resolver {
self.pending_images.clear();
self.patches.clear();
let mut sizes = StreamOffsets::default();
for patch in &encoding.patches {
let resources = &encoding.resources;
for patch in &resources.patches {
match patch {
Patch::Ramp {
draw_data_offset,
stops,
extend,
} => {
let ramp_id = self.ramp_cache.add(&encoding.color_stops[stops.clone()]);
let ramp_id = self.ramp_cache.add(&resources.color_stops[stops.clone()]);
self.patches.push(ResolvedPatch::Ramp {
draw_data_offset: *draw_data_offset + sizes.draw_data,
ramp_id,
@ -353,7 +412,7 @@ impl Resolver {
}
Patch::GlyphRun { index } => {
let mut run_sizes = StreamOffsets::default();
let run = &encoding.glyph_runs[*index];
let run = &resources.glyph_runs[*index];
let font_id = run.font.data.id();
let font_size_u32 = run.font_size.to_bits();
let Ok(font_file) = fello::raw::FileRef::new(run.font.data.as_ref()) else { continue };
@ -364,8 +423,8 @@ impl Resolver {
}
};
let Some(font) = font else { continue };
let glyphs = &encoding.glyphs[run.glyphs.clone()];
let coords = &encoding.normalized_coords[run.normalized_coords.clone()];
let glyphs = &resources.glyphs[run.glyphs.clone()];
let coords = &resources.normalized_coords[run.normalized_coords.clone()];
let key = fello::FontKey {
data_id: font_id,
index: run.font.index,
@ -467,8 +526,9 @@ impl Resolver {
}
}
#[derive(Clone)]
/// Patch for a late bound resource.
#[cfg(feature = "full")]
#[derive(Clone)]
pub enum Patch {
/// Gradient ramp resource.
Ramp {
@ -494,12 +554,14 @@ pub enum Patch {
}
/// Image to be allocated in the atlas.
#[cfg(feature = "full")]
#[derive(Clone, Debug)]
struct PendingImage {
image: Image,
xy: Option<(u32, u32)>,
}
#[cfg(feature = "full")]
#[derive(Clone, Debug)]
enum ResolvedPatch {
Ramp {
@ -526,6 +588,36 @@ enum ResolvedPatch {
},
}
struct SceneBufferSizes {
/// Full size of the scene buffer in bytes.
buffer_size: usize,
/// Padded length of the path tag stream in bytes.
path_tag_padded: usize,
}
impl SceneBufferSizes {
/// Computes common scene buffer sizes for the given encoding and patch
/// stream sizes.
fn new(encoding: &Encoding, patch_sizes: &StreamOffsets) -> Self {
let n_path_tags =
encoding.path_tags.len() + patch_sizes.path_tags + encoding.n_open_clips as usize;
let path_tag_padded = align_up(n_path_tags, 4 * crate::config::PATH_REDUCE_WG);
let buffer_size = path_tag_padded
+ slice_size_in_bytes(&encoding.path_data, patch_sizes.path_data)
+ slice_size_in_bytes(
&encoding.draw_tags,
patch_sizes.draw_tags + encoding.n_open_clips as usize,
)
+ slice_size_in_bytes(&encoding.draw_data, patch_sizes.draw_data)
+ slice_size_in_bytes(&encoding.transforms, patch_sizes.transforms)
+ slice_size_in_bytes(&encoding.linewidths, patch_sizes.linewidths);
Self {
buffer_size,
path_tag_padded,
}
}
}
fn slice_size_in_bytes<T: Sized>(slice: &[T], extra: usize) -> usize {
(slice.len() + extra) * std::mem::size_of::<T>()
}

View file

@ -205,8 +205,8 @@ impl<'a> DrawGlyphs<'a> {
/// Creates a new builder for encoding a glyph run for the specified
/// encoding with the given font.
pub fn new(encoding: &'a mut Encoding, font: &Font) -> Self {
let coords_start = encoding.normalized_coords.len();
let glyphs_start = encoding.glyphs.len();
let coords_start = encoding.resources.normalized_coords.len();
let glyphs_start = encoding.resources.glyphs.len();
let stream_offsets = encoding.stream_offsets();
Self {
encoding,
@ -264,10 +264,14 @@ impl<'a> DrawGlyphs<'a> {
/// Sets the normalized design space coordinates for a variable font instance.
pub fn normalized_coords(mut self, coords: &[NormalizedCoord]) -> Self {
self.encoding
.resources
.normalized_coords
.truncate(self.run.normalized_coords.start);
self.encoding.normalized_coords.extend_from_slice(coords);
self.run.normalized_coords.end = self.encoding.normalized_coords.len();
self.encoding
.resources
.normalized_coords
.extend_from_slice(coords);
self.run.normalized_coords.end = self.encoding.resources.normalized_coords.len();
self
}
@ -292,18 +296,19 @@ impl<'a> DrawGlyphs<'a> {
///
/// The `style` parameter accepts either `Fill` or `&Stroke` types.
pub fn draw(mut self, style: impl Into<StyleRef<'a>>, glyphs: impl Iterator<Item = Glyph>) {
let resources = &mut self.encoding.resources;
self.run.style = style.into().to_owned();
self.encoding.glyphs.extend(glyphs);
self.run.glyphs.end = self.encoding.glyphs.len();
resources.glyphs.extend(glyphs);
self.run.glyphs.end = resources.glyphs.len();
if self.run.glyphs.is_empty() {
self.encoding
resources
.normalized_coords
.truncate(self.run.normalized_coords.start);
return;
}
let index = self.encoding.glyph_runs.len();
self.encoding.glyph_runs.push(self.run);
self.encoding.patches.push(Patch::GlyphRun { index });
let index = resources.glyph_runs.len();
resources.glyph_runs.push(self.run);
resources.patches.push(Patch::GlyphRun { index });
self.encoding.encode_brush(self.brush, self.brush_alpha);
}
}