Merge branch 'main' into gradient-fixes

This commit is contained in:
Chad Brokaw 2023-05-03 14:49:57 -04:00
commit 4f9ae4c937
9 changed files with 287 additions and 133 deletions

View file

@ -3,8 +3,14 @@ name = "vello_encoding"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
[features]
default = ["full"]
# Enables support for the full pipeline including late-bound
# resources (gradients, images and glyph runs)
full = ["fello", "guillotiere"]
[dependencies] [dependencies]
bytemuck = { workspace = true } bytemuck = { workspace = true }
fello = { workspace = true } fello = { workspace = true, optional = true }
peniko = { workspace = true } peniko = { workspace = true }
guillotiere = "0.6.2" guillotiere = { version = "0.6.2", optional = true }

View file

@ -196,6 +196,7 @@ impl<T: Sized> BufferSize<T> {
} }
/// Returns the number of elements. /// Returns the number of elements.
#[allow(clippy::len_without_is_empty)]
pub const fn len(self) -> u32 { pub const fn len(self) -> u32 {
self.len self.len
} }

View file

@ -1,13 +1,16 @@
// Copyright 2022 The Vello authors // Copyright 2022 The Vello authors
// SPDX-License-Identifier: Apache-2.0 OR MIT // SPDX-License-Identifier: Apache-2.0 OR MIT
use super::{ use super::{DrawColor, DrawTag, PathEncoder, PathTag, Transform};
resolve::Patch, DrawColor, DrawImage, DrawLinearGradient, DrawRadialGradient, DrawTag, Glyph,
GlyphRun, PathEncoder, PathTag, Transform,
};
use fello::NormalizedCoord; use peniko::{kurbo::Shape, BlendMode, BrushRef};
use peniko::{kurbo::Shape, BlendMode, BrushRef, ColorStop, Extend, GradientKind, Image};
#[cfg(feature = "full")]
use {
super::{DrawImage, DrawLinearGradient, DrawRadialGradient, Glyph, GlyphRun, Patch},
fello::NormalizedCoord,
peniko::{ColorStop, Extend, GradientKind, Image},
};
/// Encoded data streams for a scene. /// Encoded data streams for a scene.
#[derive(Clone, Default)] #[derive(Clone, Default)]
@ -20,20 +23,13 @@ pub struct Encoding {
pub draw_tags: Vec<DrawTag>, pub draw_tags: Vec<DrawTag>,
/// The draw data stream. /// The draw data stream.
pub draw_data: Vec<u8>, pub draw_data: Vec<u8>,
/// Draw data patches for late bound resources.
pub patches: Vec<Patch>,
/// Color stop collection for gradients.
pub color_stops: Vec<ColorStop>,
/// The transform stream. /// The transform stream.
pub transforms: Vec<Transform>, pub transforms: Vec<Transform>,
/// The line width stream. /// The line width stream.
pub linewidths: Vec<f32>, pub linewidths: Vec<f32>,
/// Positioned glyph buffer. /// Late bound resource data.
pub glyphs: Vec<Glyph>, #[cfg(feature = "full")]
/// Sequences of glyphs. pub resources: Resources,
pub glyph_runs: Vec<GlyphRun>,
/// Normalized coordinate buffer for variable fonts.
pub normalized_coords: Vec<NormalizedCoord>,
/// Number of encoded paths. /// Number of encoded paths.
pub n_paths: u32, pub n_paths: u32,
/// Number of encoded path segments. /// Number of encoded path segments.
@ -63,15 +59,12 @@ impl Encoding {
self.linewidths.clear(); self.linewidths.clear();
self.draw_data.clear(); self.draw_data.clear();
self.draw_tags.clear(); self.draw_tags.clear();
self.glyphs.clear();
self.glyph_runs.clear();
self.normalized_coords.clear();
self.n_paths = 0; self.n_paths = 0;
self.n_path_segments = 0; self.n_path_segments = 0;
self.n_clips = 0; self.n_clips = 0;
self.n_open_clips = 0; self.n_open_clips = 0;
self.patches.clear(); #[cfg(feature = "full")]
self.color_stops.clear(); self.resources.reset();
if !is_fragment { if !is_fragment {
self.transforms.push(Transform::IDENTITY); self.transforms.push(Transform::IDENTITY);
self.linewidths.push(-1.0); self.linewidths.push(-1.0);
@ -80,20 +73,22 @@ impl Encoding {
/// Appends another encoding to this one with an optional transform. /// Appends another encoding to this one with an optional transform.
pub fn append(&mut self, other: &Self, transform: &Option<Transform>) { pub fn append(&mut self, other: &Self, transform: &Option<Transform>) {
let stops_base = self.color_stops.len(); #[cfg(feature = "full")]
let glyph_runs_base = self.glyph_runs.len(); let glyph_runs_base = {
let glyphs_base = self.glyphs.len();
let coords_base = self.normalized_coords.len();
let offsets = self.stream_offsets(); let offsets = self.stream_offsets();
self.path_tags.extend_from_slice(&other.path_tags); let stops_base = self.resources.color_stops.len();
self.path_data.extend_from_slice(&other.path_data); let glyph_runs_base = self.resources.glyph_runs.len();
self.draw_tags.extend_from_slice(&other.draw_tags); let glyphs_base = self.resources.glyphs.len();
self.draw_data.extend_from_slice(&other.draw_data); let coords_base = self.resources.normalized_coords.len();
self.glyphs.extend_from_slice(&other.glyphs); self.resources
self.normalized_coords .glyphs
.extend_from_slice(&other.normalized_coords); .extend_from_slice(&other.resources.glyphs);
self.glyph_runs self.resources
.extend(other.glyph_runs.iter().cloned().map(|mut run| { .normalized_coords
.extend_from_slice(&other.resources.normalized_coords);
self.resources
.glyph_runs
.extend(other.resources.glyph_runs.iter().cloned().map(|mut run| {
run.glyphs.start += glyphs_base; run.glyphs.start += glyphs_base;
run.normalized_coords.start += coords_base; run.normalized_coords.start += coords_base;
run.stream_offsets.path_tags += offsets.path_tags; run.stream_offsets.path_tags += offsets.path_tags;
@ -104,22 +99,17 @@ impl Encoding {
run.stream_offsets.linewidths += offsets.linewidths; run.stream_offsets.linewidths += offsets.linewidths;
run run
})); }));
self.n_paths += other.n_paths; self.resources
self.n_path_segments += other.n_path_segments; .patches
self.n_clips += other.n_clips; .extend(other.resources.patches.iter().map(|patch| match patch {
self.n_open_clips += other.n_open_clips;
self.patches
.extend(other.patches.iter().map(|patch| match patch {
Patch::Ramp { Patch::Ramp {
draw_data_offset: offset, draw_data_offset: offset,
stops, stops,
extend,
} => { } => {
let stops = stops.start + stops_base..stops.end + stops_base; let stops = stops.start + stops_base..stops.end + stops_base;
Patch::Ramp { Patch::Ramp {
draw_data_offset: offset + offsets.draw_data, draw_data_offset: offset + offsets.draw_data,
stops, stops,
extend: *extend,
} }
} }
Patch::GlyphRun { index } => Patch::GlyphRun { Patch::GlyphRun { index } => Patch::GlyphRun {
@ -133,11 +123,24 @@ impl Encoding {
draw_data_offset: *draw_data_offset + offsets.draw_data, draw_data_offset: *draw_data_offset + offsets.draw_data,
}, },
})); }));
self.color_stops.extend_from_slice(&other.color_stops); self.resources
.color_stops
.extend_from_slice(&other.resources.color_stops);
glyph_runs_base
};
self.path_tags.extend_from_slice(&other.path_tags);
self.path_data.extend_from_slice(&other.path_data);
self.draw_tags.extend_from_slice(&other.draw_tags);
self.draw_data.extend_from_slice(&other.draw_data);
self.n_paths += other.n_paths;
self.n_path_segments += other.n_path_segments;
self.n_clips += other.n_clips;
self.n_open_clips += other.n_open_clips;
if let Some(transform) = *transform { if let Some(transform) = *transform {
self.transforms self.transforms
.extend(other.transforms.iter().map(|x| transform * *x)); .extend(other.transforms.iter().map(|x| transform * *x));
for run in &mut self.glyph_runs[glyph_runs_base..] { #[cfg(feature = "full")]
for run in &mut self.resources.glyph_runs[glyph_runs_base..] {
run.transform = transform * run.transform; run.transform = transform * run.transform;
} }
} else { } else {
@ -201,7 +204,9 @@ impl Encoding {
} }
/// Encodes a brush with an optional alpha modifier. /// Encodes a brush with an optional alpha modifier.
#[allow(unused_variables)]
pub fn encode_brush<'b>(&mut self, brush: impl Into<BrushRef<'b>>, alpha: f32) { pub fn encode_brush<'b>(&mut self, brush: impl Into<BrushRef<'b>>, alpha: f32) {
#[cfg(feature = "full")]
use super::math::point_to_f32; use super::math::point_to_f32;
match brush.into() { match brush.into() {
BrushRef::Solid(color) => { BrushRef::Solid(color) => {
@ -212,6 +217,7 @@ impl Encoding {
}; };
self.encode_color(DrawColor::new(color)); self.encode_color(DrawColor::new(color));
} }
#[cfg(feature = "full")]
BrushRef::Gradient(gradient) => match gradient.kind { BrushRef::Gradient(gradient) => match gradient.kind {
GradientKind::Linear { start, end } => { GradientKind::Linear { start, end } => {
self.encode_linear_gradient( self.encode_linear_gradient(
@ -248,9 +254,13 @@ impl Encoding {
todo!("sweep gradients aren't supported yet!") todo!("sweep gradients aren't supported yet!")
} }
}, },
#[cfg(feature = "full")]
BrushRef::Image(image) => { BrushRef::Image(image) => {
#[cfg(feature = "full")]
self.encode_image(image, alpha); self.encode_image(image, alpha);
} }
#[cfg(not(feature = "full"))]
_ => panic!("brushes other than solid require the 'full' feature to be enabled"),
} }
} }
@ -261,6 +271,7 @@ impl Encoding {
} }
/// Encodes a linear gradient brush. /// Encodes a linear gradient brush.
#[cfg(feature = "full")]
pub fn encode_linear_gradient( pub fn encode_linear_gradient(
&mut self, &mut self,
gradient: DrawLinearGradient, gradient: DrawLinearGradient,
@ -275,6 +286,7 @@ impl Encoding {
} }
/// Encodes a radial gradient brush. /// Encodes a radial gradient brush.
#[cfg(feature = "full")]
pub fn encode_radial_gradient( pub fn encode_radial_gradient(
&mut self, &mut self,
gradient: DrawRadialGradient, gradient: DrawRadialGradient,
@ -289,10 +301,11 @@ impl Encoding {
} }
/// Encodes an image brush. /// Encodes an image brush.
#[cfg(feature = "full")]
pub fn encode_image(&mut self, image: &Image, _alpha: f32) { pub fn encode_image(&mut self, image: &Image, _alpha: f32) {
// TODO: feed the alpha multiplier through the full pipeline for consistency // TODO: feed the alpha multiplier through the full pipeline for consistency
// with other brushes? // with other brushes?
self.patches.push(Patch::Image { self.resources.patches.push(Patch::Image {
image: image.clone(), image: image.clone(),
draw_data_offset: self.draw_data.len(), draw_data_offset: self.draw_data.len(),
}); });
@ -333,28 +346,51 @@ impl Encoding {
self.path_tags.swap(len - 1, len - 2); self.path_tags.swap(len - 1, len - 2);
} }
fn add_ramp( #[cfg(feature = "full")]
&mut self, fn add_ramp(&mut self, color_stops: impl Iterator<Item = ColorStop>, alpha: f32) {
color_stops: impl Iterator<Item = ColorStop>,
alpha: f32,
extend: Extend,
) {
let offset = self.draw_data.len(); let offset = self.draw_data.len();
let stops_start = self.color_stops.len(); let stops_start = self.resources.color_stops.len();
if alpha != 1.0 { if alpha != 1.0 {
self.color_stops self.resources
.color_stops
.extend(color_stops.map(|stop| stop.with_alpha_factor(alpha))); .extend(color_stops.map(|stop| stop.with_alpha_factor(alpha)));
} else { } else {
self.color_stops.extend(color_stops); self.resources.color_stops.extend(color_stops);
} }
self.patches.push(Patch::Ramp { self.resources.patches.push(Patch::Ramp {
draw_data_offset: offset, draw_data_offset: offset,
stops: stops_start..self.color_stops.len(), stops: stops_start..self.resources.color_stops.len(),
extend,
}); });
} }
} }
/// Encoded data for late bound resources.
#[cfg(feature = "full")]
#[derive(Clone, Default)]
pub struct Resources {
/// Draw data patches for late bound resources.
pub patches: Vec<Patch>,
/// Color stop collection for gradients.
pub color_stops: Vec<ColorStop>,
/// Positioned glyph buffer.
pub glyphs: Vec<Glyph>,
/// Sequences of glyphs.
pub glyph_runs: Vec<GlyphRun>,
/// Normalized coordinate buffer for variable fonts.
pub normalized_coords: Vec<NormalizedCoord>,
}
#[cfg(feature = "full")]
impl Resources {
fn reset(&mut self) {
self.patches.clear();
self.color_stops.clear();
self.glyphs.clear();
self.glyph_runs.clear();
self.normalized_coords.clear();
}
}
/// Snapshot of offsets for encoded streams. /// Snapshot of offsets for encoded streams.
#[derive(Copy, Clone, Default, Debug)] #[derive(Copy, Clone, Default, Debug)]
pub struct StreamOffsets { pub struct StreamOffsets {
@ -373,6 +409,7 @@ pub struct StreamOffsets {
} }
impl StreamOffsets { impl StreamOffsets {
#[cfg(feature = "full")]
pub(crate) fn add(&mut self, other: &Self) { pub(crate) fn add(&mut self, other: &Self) {
self.path_tags += other.path_tags; self.path_tags += other.path_tags;
self.path_data += other.path_data; self.path_data += other.path_data;

View file

@ -8,6 +8,7 @@ use std::collections::{hash_map::Entry, HashMap};
const DEFAULT_ATLAS_SIZE: i32 = 1024; const DEFAULT_ATLAS_SIZE: i32 = 1024;
const MAX_ATLAS_SIZE: i32 = 8192; const MAX_ATLAS_SIZE: i32 = 8192;
#[derive(Default)]
pub struct Images<'a> { pub struct Images<'a> {
pub width: u32, pub width: u32,
pub height: u32, pub height: u32,

View file

@ -8,12 +8,16 @@ mod clip;
mod config; mod config;
mod draw; mod draw;
mod encoding; mod encoding;
#[cfg(feature = "full")]
mod glyph; mod glyph;
#[cfg(feature = "full")]
mod glyph_cache; mod glyph_cache;
#[cfg(feature = "full")]
mod image_cache; mod image_cache;
mod math; mod math;
mod monoid; mod monoid;
mod path; mod path;
#[cfg(feature = "full")]
mod ramp_cache; mod ramp_cache;
mod resolve; mod resolve;
@ -28,11 +32,17 @@ pub use draw::{
DrawRadialGradient, DrawTag, DrawRadialGradient, DrawTag,
}; };
pub use encoding::{Encoding, StreamOffsets}; pub use encoding::{Encoding, StreamOffsets};
pub use glyph::{Glyph, GlyphRun};
pub use math::Transform; pub use math::Transform;
pub use monoid::Monoid; pub use monoid::Monoid;
pub use path::{ pub use path::{
Cubic, Path, PathBbox, PathEncoder, PathMonoid, PathSegment, PathSegmentType, PathTag, Tile, Cubic, Path, PathBbox, PathEncoder, PathMonoid, PathSegment, PathSegmentType, PathTag, Tile,
}; };
pub use ramp_cache::Ramps; pub use resolve::{resolve_solid_paths_only, Layout};
pub use resolve::{Layout, Patch, Resolver};
#[cfg(feature = "full")]
pub use {
encoding::Resources,
glyph::{Glyph, GlyphRun},
ramp_cache::Ramps,
resolve::{Patch, Resolver},
};

View file

@ -72,6 +72,7 @@ impl Mul for Transform {
} }
} }
#[allow(dead_code)]
pub fn point_to_f32(point: kurbo::Point) -> [f32; 2] { pub fn point_to_f32(point: kurbo::Point) -> [f32; 2] {
[point.x as f32, point.y as f32] [point.x as f32, point.y as f32]
} }

View file

@ -403,6 +403,7 @@ impl<'a> PathEncoder<'a> {
} }
} }
#[cfg(feature = "full")]
impl fello::scale::Pen for PathEncoder<'_> { impl fello::scale::Pen for PathEncoder<'_> {
fn move_to(&mut self, x: f32, y: f32) { fn move_to(&mut self, x: f32, y: f32) {
self.move_to(x, y) self.move_to(x, y)

View file

@ -1,16 +1,19 @@
// Copyright 2022 The Vello authors // Copyright 2022 The Vello authors
// SPDX-License-Identifier: Apache-2.0 OR MIT // SPDX-License-Identifier: Apache-2.0 OR MIT
use std::ops::Range;
use bytemuck::{Pod, Zeroable}; use bytemuck::{Pod, Zeroable};
use peniko::{Extend, Image};
use super::{ use super::{DrawTag, Encoding, PathTag, StreamOffsets, Transform};
#[cfg(feature = "full")]
use {
super::{
glyph_cache::{CachedRange, GlyphCache, GlyphKey}, glyph_cache::{CachedRange, GlyphCache, GlyphKey},
image_cache::{ImageCache, Images}, image_cache::{ImageCache, Images},
ramp_cache::{RampCache, Ramps}, ramp_cache::{RampCache, Ramps},
DrawTag, Encoding, PathTag, StreamOffsets, Transform, },
peniko::Image,
std::ops::Range,
}; };
/// Layout of a packed encoding. /// Layout of a packed encoding.
@ -100,7 +103,63 @@ impl Layout {
} }
} }
/// Resolves and packs an encoding that contains only paths with solid color
/// fills.
///
/// Panics if the encoding contains any late bound resources (gradients, images
/// or glyph runs).
pub fn resolve_solid_paths_only(encoding: &Encoding, packed: &mut Vec<u8>) -> Layout {
#[cfg(feature = "full")]
assert!(
encoding.resources.patches.is_empty(),
"this resolve function doesn't support late bound resources"
);
let data = packed;
data.clear();
let mut layout = Layout {
n_paths: encoding.n_paths,
n_clips: encoding.n_clips,
..Layout::default()
};
let SceneBufferSizes {
buffer_size,
path_tag_padded,
} = SceneBufferSizes::new(encoding, &StreamOffsets::default());
data.reserve(buffer_size);
// Path tag stream
layout.path_tag_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.path_tags));
for _ in 0..encoding.n_open_clips {
data.extend_from_slice(bytemuck::bytes_of(&PathTag::PATH));
}
data.resize(path_tag_padded, 0);
// Path data stream
layout.path_data_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.path_data));
// Draw tag stream
layout.draw_tag_base = size_to_words(data.len());
// Bin data follows draw info
layout.bin_data_start = encoding.draw_tags.iter().map(|tag| tag.info_size()).sum();
data.extend_from_slice(bytemuck::cast_slice(&encoding.draw_tags));
for _ in 0..encoding.n_open_clips {
data.extend_from_slice(bytemuck::bytes_of(&DrawTag::END_CLIP));
}
// Draw data stream
layout.draw_data_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.draw_data));
// Transform stream
layout.transform_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.transforms));
// Linewidth stream
layout.linewidth_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.linewidths));
layout.n_draw_objects = layout.n_paths;
assert_eq!(buffer_size, data.len());
layout
}
/// Resolver for late bound resources. /// Resolver for late bound resources.
#[cfg(feature = "full")]
#[derive(Default)] #[derive(Default)]
pub struct Resolver { pub struct Resolver {
glyph_cache: GlyphCache, glyph_cache: GlyphCache,
@ -112,6 +171,7 @@ pub struct Resolver {
patches: Vec<ResolvedPatch>, patches: Vec<ResolvedPatch>,
} }
#[cfg(feature = "full")]
impl Resolver { impl Resolver {
/// Creates a new resource cache. /// Creates a new resource cache.
pub fn new() -> Self { pub fn new() -> Self {
@ -125,7 +185,12 @@ impl Resolver {
encoding: &Encoding, encoding: &Encoding,
packed: &mut Vec<u8>, packed: &mut Vec<u8>,
) -> (Layout, Ramps<'a>, Images<'a>) { ) -> (Layout, Ramps<'a>, Images<'a>) {
let sizes = self.resolve_patches(encoding); let resources = &encoding.resources;
if resources.patches.is_empty() {
let layout = resolve_solid_paths_only(encoding, packed);
return (layout, Ramps::default(), Images::default());
}
let patch_sizes = self.resolve_patches(encoding);
self.resolve_pending_images(); self.resolve_pending_images();
let data = packed; let data = packed;
data.clear(); data.clear();
@ -134,20 +199,11 @@ impl Resolver {
n_clips: encoding.n_clips, n_clips: encoding.n_clips,
..Layout::default() ..Layout::default()
}; };
// Compute size of data buffer let SceneBufferSizes {
let n_path_tags = buffer_size,
encoding.path_tags.len() + sizes.path_tags + encoding.n_open_clips as usize; path_tag_padded,
let path_tag_padded = align_up(n_path_tags, 4 * crate::config::PATH_REDUCE_WG); } = SceneBufferSizes::new(encoding, &patch_sizes);
let capacity = path_tag_padded data.reserve(buffer_size);
+ slice_size_in_bytes(&encoding.path_data, sizes.path_data)
+ slice_size_in_bytes(
&encoding.draw_tags,
sizes.draw_tags + encoding.n_open_clips as usize,
)
+ slice_size_in_bytes(&encoding.draw_data, sizes.draw_data)
+ slice_size_in_bytes(&encoding.transforms, sizes.transforms)
+ slice_size_in_bytes(&encoding.linewidths, sizes.linewidths);
data.reserve(capacity);
// Path tag stream // Path tag stream
layout.path_tag_base = size_to_words(data.len()); layout.path_tag_base = size_to_words(data.len());
{ {
@ -156,7 +212,7 @@ impl Resolver {
for patch in &self.patches { for patch in &self.patches {
if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch { if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch {
layout.n_paths += 1; layout.n_paths += 1;
let stream_offset = encoding.glyph_runs[*index].stream_offsets.path_tags; let stream_offset = resources.glyph_runs[*index].stream_offsets.path_tags;
if pos < stream_offset { if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset])); data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset; pos = stream_offset;
@ -185,7 +241,9 @@ impl Resolver {
let stream = &encoding.path_data; let stream = &encoding.path_data;
for patch in &self.patches { for patch in &self.patches {
if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch { if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch {
let stream_offset = encoding.glyph_runs[*index].stream_offsets.path_data; let stream_offset = encoding.resources.glyph_runs[*index]
.stream_offsets
.path_data;
if pos < stream_offset { if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset])); data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset; pos = stream_offset;
@ -269,14 +327,14 @@ impl Resolver {
transform, transform,
} = patch } = patch
{ {
let run = &encoding.glyph_runs[*index]; let run = &resources.glyph_runs[*index];
let stream_offset = encoding.glyph_runs[*index].stream_offsets.transforms; let stream_offset = run.stream_offsets.transforms;
if pos < stream_offset { if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset])); data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset; pos = stream_offset;
} }
if let Some(glyph_transform) = run.glyph_transform { if let Some(glyph_transform) = run.glyph_transform {
for glyph in &encoding.glyphs[run.glyphs.clone()] { for glyph in &resources.glyphs[run.glyphs.clone()] {
let xform = *transform let xform = *transform
* Transform { * Transform {
matrix: [1.0, 0.0, 0.0, -1.0], matrix: [1.0, 0.0, 0.0, -1.0],
@ -286,7 +344,7 @@ impl Resolver {
data.extend_from_slice(bytemuck::bytes_of(&xform)); data.extend_from_slice(bytemuck::bytes_of(&xform));
} }
} else { } else {
for glyph in &encoding.glyphs[run.glyphs.clone()] { for glyph in &resources.glyphs[run.glyphs.clone()] {
let xform = *transform let xform = *transform
* Transform { * Transform {
matrix: [1.0, 0.0, 0.0, -1.0], matrix: [1.0, 0.0, 0.0, -1.0],
@ -308,7 +366,7 @@ impl Resolver {
let stream = &encoding.linewidths; let stream = &encoding.linewidths;
for patch in &self.patches { for patch in &self.patches {
if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch { if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch {
let stream_offset = encoding.glyph_runs[*index].stream_offsets.linewidths; let stream_offset = resources.glyph_runs[*index].stream_offsets.linewidths;
if pos < stream_offset { if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset])); data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset; pos = stream_offset;
@ -325,7 +383,7 @@ impl Resolver {
} }
} }
layout.n_draw_objects = layout.n_paths; layout.n_draw_objects = layout.n_paths;
assert_eq!(capacity, data.len()); assert_eq!(buffer_size, data.len());
(layout, self.ramp_cache.ramps(), self.image_cache.images()) (layout, self.ramp_cache.ramps(), self.image_cache.images())
} }
@ -337,14 +395,15 @@ impl Resolver {
self.pending_images.clear(); self.pending_images.clear();
self.patches.clear(); self.patches.clear();
let mut sizes = StreamOffsets::default(); let mut sizes = StreamOffsets::default();
for patch in &encoding.patches { let resources = &encoding.resources;
for patch in &resources.patches {
match patch { match patch {
Patch::Ramp { Patch::Ramp {
draw_data_offset, draw_data_offset,
stops, stops,
extend, extend,
} => { } => {
let ramp_id = self.ramp_cache.add(&encoding.color_stops[stops.clone()]); let ramp_id = self.ramp_cache.add(&resources.color_stops[stops.clone()]);
self.patches.push(ResolvedPatch::Ramp { self.patches.push(ResolvedPatch::Ramp {
draw_data_offset: *draw_data_offset + sizes.draw_data, draw_data_offset: *draw_data_offset + sizes.draw_data,
ramp_id, ramp_id,
@ -353,7 +412,7 @@ impl Resolver {
} }
Patch::GlyphRun { index } => { Patch::GlyphRun { index } => {
let mut run_sizes = StreamOffsets::default(); let mut run_sizes = StreamOffsets::default();
let run = &encoding.glyph_runs[*index]; let run = &resources.glyph_runs[*index];
let font_id = run.font.data.id(); let font_id = run.font.data.id();
let font_size_u32 = run.font_size.to_bits(); let font_size_u32 = run.font_size.to_bits();
let Ok(font_file) = fello::raw::FileRef::new(run.font.data.as_ref()) else { continue }; let Ok(font_file) = fello::raw::FileRef::new(run.font.data.as_ref()) else { continue };
@ -364,8 +423,8 @@ impl Resolver {
} }
}; };
let Some(font) = font else { continue }; let Some(font) = font else { continue };
let glyphs = &encoding.glyphs[run.glyphs.clone()]; let glyphs = &resources.glyphs[run.glyphs.clone()];
let coords = &encoding.normalized_coords[run.normalized_coords.clone()]; let coords = &resources.normalized_coords[run.normalized_coords.clone()];
let key = fello::FontKey { let key = fello::FontKey {
data_id: font_id, data_id: font_id,
index: run.font.index, index: run.font.index,
@ -467,8 +526,9 @@ impl Resolver {
} }
} }
#[derive(Clone)]
/// Patch for a late bound resource. /// Patch for a late bound resource.
#[cfg(feature = "full")]
#[derive(Clone)]
pub enum Patch { pub enum Patch {
/// Gradient ramp resource. /// Gradient ramp resource.
Ramp { Ramp {
@ -494,12 +554,14 @@ pub enum Patch {
} }
/// Image to be allocated in the atlas. /// Image to be allocated in the atlas.
#[cfg(feature = "full")]
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct PendingImage { struct PendingImage {
image: Image, image: Image,
xy: Option<(u32, u32)>, xy: Option<(u32, u32)>,
} }
#[cfg(feature = "full")]
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
enum ResolvedPatch { enum ResolvedPatch {
Ramp { Ramp {
@ -526,6 +588,36 @@ enum ResolvedPatch {
}, },
} }
struct SceneBufferSizes {
/// Full size of the scene buffer in bytes.
buffer_size: usize,
/// Padded length of the path tag stream in bytes.
path_tag_padded: usize,
}
impl SceneBufferSizes {
/// Computes common scene buffer sizes for the given encoding and patch
/// stream sizes.
fn new(encoding: &Encoding, patch_sizes: &StreamOffsets) -> Self {
let n_path_tags =
encoding.path_tags.len() + patch_sizes.path_tags + encoding.n_open_clips as usize;
let path_tag_padded = align_up(n_path_tags, 4 * crate::config::PATH_REDUCE_WG);
let buffer_size = path_tag_padded
+ slice_size_in_bytes(&encoding.path_data, patch_sizes.path_data)
+ slice_size_in_bytes(
&encoding.draw_tags,
patch_sizes.draw_tags + encoding.n_open_clips as usize,
)
+ slice_size_in_bytes(&encoding.draw_data, patch_sizes.draw_data)
+ slice_size_in_bytes(&encoding.transforms, patch_sizes.transforms)
+ slice_size_in_bytes(&encoding.linewidths, patch_sizes.linewidths);
Self {
buffer_size,
path_tag_padded,
}
}
}
fn slice_size_in_bytes<T: Sized>(slice: &[T], extra: usize) -> usize { fn slice_size_in_bytes<T: Sized>(slice: &[T], extra: usize) -> usize {
(slice.len() + extra) * std::mem::size_of::<T>() (slice.len() + extra) * std::mem::size_of::<T>()
} }

View file

@ -205,8 +205,8 @@ impl<'a> DrawGlyphs<'a> {
/// Creates a new builder for encoding a glyph run for the specified /// Creates a new builder for encoding a glyph run for the specified
/// encoding with the given font. /// encoding with the given font.
pub fn new(encoding: &'a mut Encoding, font: &Font) -> Self { pub fn new(encoding: &'a mut Encoding, font: &Font) -> Self {
let coords_start = encoding.normalized_coords.len(); let coords_start = encoding.resources.normalized_coords.len();
let glyphs_start = encoding.glyphs.len(); let glyphs_start = encoding.resources.glyphs.len();
let stream_offsets = encoding.stream_offsets(); let stream_offsets = encoding.stream_offsets();
Self { Self {
encoding, encoding,
@ -264,10 +264,14 @@ impl<'a> DrawGlyphs<'a> {
/// Sets the normalized design space coordinates for a variable font instance. /// Sets the normalized design space coordinates for a variable font instance.
pub fn normalized_coords(mut self, coords: &[NormalizedCoord]) -> Self { pub fn normalized_coords(mut self, coords: &[NormalizedCoord]) -> Self {
self.encoding self.encoding
.resources
.normalized_coords .normalized_coords
.truncate(self.run.normalized_coords.start); .truncate(self.run.normalized_coords.start);
self.encoding.normalized_coords.extend_from_slice(coords); self.encoding
self.run.normalized_coords.end = self.encoding.normalized_coords.len(); .resources
.normalized_coords
.extend_from_slice(coords);
self.run.normalized_coords.end = self.encoding.resources.normalized_coords.len();
self self
} }
@ -292,18 +296,19 @@ impl<'a> DrawGlyphs<'a> {
/// ///
/// The `style` parameter accepts either `Fill` or `&Stroke` types. /// The `style` parameter accepts either `Fill` or `&Stroke` types.
pub fn draw(mut self, style: impl Into<StyleRef<'a>>, glyphs: impl Iterator<Item = Glyph>) { pub fn draw(mut self, style: impl Into<StyleRef<'a>>, glyphs: impl Iterator<Item = Glyph>) {
let resources = &mut self.encoding.resources;
self.run.style = style.into().to_owned(); self.run.style = style.into().to_owned();
self.encoding.glyphs.extend(glyphs); resources.glyphs.extend(glyphs);
self.run.glyphs.end = self.encoding.glyphs.len(); self.run.glyphs.end = resources.glyphs.len();
if self.run.glyphs.is_empty() { if self.run.glyphs.is_empty() {
self.encoding resources
.normalized_coords .normalized_coords
.truncate(self.run.normalized_coords.start); .truncate(self.run.normalized_coords.start);
return; return;
} }
let index = self.encoding.glyph_runs.len(); let index = resources.glyph_runs.len();
self.encoding.glyph_runs.push(self.run); resources.glyph_runs.push(self.run);
self.encoding.patches.push(Patch::GlyphRun { index }); resources.patches.push(Patch::GlyphRun { index });
self.encoding.encode_brush(self.brush, self.brush_alpha); self.encoding.encode_brush(self.brush, self.brush_alpha);
} }
} }