Merge pull request #284 from linebender/glyph-run

Glyph run API
This commit is contained in:
Chad Brokaw 2023-03-06 18:39:16 -05:00 committed by GitHub
commit 2f268d4e0f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 976 additions and 387 deletions

View file

@ -45,7 +45,7 @@ parking_lot = "0.12"
bytemuck = { version = "1.12.1", features = ["derive"] }
smallvec = "1.8.0"
moscato = { git = "https://github.com/dfrg/pinot", rev = "59db153" }
peniko = { git = "https://github.com/linebender/peniko", rev = "8cb710f" }
peniko = { git = "https://github.com/linebender/peniko", rev = "cafdac9a211a0fb2fec5656bd663d1ac770bcc81" }
[workspace.dependencies]
wgpu = "0.15"

View file

@ -104,7 +104,6 @@ async fn render(mut scenes: SceneSet, index: usize, args: &Args) -> Result<()> {
base_color: None,
};
(example_scene.function)(&mut builder, &mut scene_params);
builder.finish();
let mut transform = Affine::IDENTITY;
let (width, height) = if let Some(resolution) = scene_params.resolution {
let ratio = resolution.x / resolution.y;
@ -144,7 +143,6 @@ async fn render(mut scenes: SceneSet, index: usize, args: &Args) -> Result<()> {
let mut scene = Scene::new();
let mut builder = SceneBuilder::for_scene(&mut scene);
builder.append(&fragment, Some(transform));
builder.finish();
let size = Extent3d {
width,
height,

View file

@ -14,14 +14,17 @@
//
// Also licensed under MIT license, at your choice.
use std::sync::Arc;
use vello::{
encoding::Glyph,
glyph::{
pinot,
pinot::{FontRef, TableProvider},
GlyphContext,
},
kurbo::Affine,
peniko::Brush,
peniko::{Blob, Brush, BrushRef, Font, StyleRef},
SceneBuilder,
};
@ -31,12 +34,69 @@ const FONT_DATA: &[u8] = include_bytes!("../../assets/roboto/Roboto-Regular.ttf"
pub struct SimpleText {
gcx: GlyphContext,
font: Font,
}
impl SimpleText {
pub fn new() -> Self {
Self {
gcx: GlyphContext::new(),
font: Font::new(Blob::new(Arc::new(FONT_DATA)), 0),
}
}
pub fn add_run<'a>(
&mut self,
builder: &mut SceneBuilder,
size: f32,
brush: impl Into<BrushRef<'a>>,
transform: Affine,
glyph_transform: Option<Affine>,
style: impl Into<StyleRef<'a>>,
text: &str,
) {
let font = FontRef {
data: FONT_DATA,
offset: 0,
};
let brush = brush.into();
let style = style.into();
if let Some(cmap) = font.cmap() {
if let Some(hmtx) = font.hmtx() {
let upem = font.head().map(|head| head.units_per_em()).unwrap_or(1000) as f64;
let scale = size as f64 / upem;
let hmetrics = hmtx.hmetrics();
let default_advance = hmetrics
.get(hmetrics.len().saturating_sub(1))
.map(|h| h.advance_width)
.unwrap_or(0);
let mut pen_x = 0f64;
builder
.draw_glyphs(&self.font)
.font_size(size)
.transform(transform)
.glyph_transform(glyph_transform)
.brush(brush)
.draw(
style,
text.chars().map(|ch| {
let gid = cmap.map(ch as u32).unwrap_or(0);
let advance = hmetrics
.get(gid as usize)
.map(|h| h.advance_width)
.unwrap_or(default_advance)
as f64
* scale;
let x = pen_x as f32;
pen_x += advance;
Glyph {
id: gid as u32,
x,
y: 0.0,
}
}),
)
}
}
}

View file

@ -124,12 +124,14 @@ fn animated_text(sb: &mut SceneBuilder, params: &mut SceneParams) {
Affine::translate((110.0, 600.0)),
s,
);
params.text.add(
params.text.add_run(
sb,
None,
text_size,
None,
Color::WHITE,
Affine::translate((110.0, 700.0)),
// Add a skew to simulate an oblique font.
Some(Affine::skew(20f64.to_radians().tan(), 0.0)),
&Stroke::new(1.0),
s,
);
let th = params.time as f64;
@ -398,7 +400,6 @@ fn blend_square(blend: BlendMode) -> SceneFragment {
let mut fragment = SceneFragment::default();
let mut sb = SceneBuilder::for_fragment(&mut fragment);
render_blend_square(&mut sb, blend, Affine::IDENTITY);
sb.finish();
fragment
}

View file

@ -111,7 +111,6 @@ impl ExtractComponent for VelloScene {
let mut scene = Scene::default();
let mut builder = SceneBuilder::for_scene(&mut scene);
builder.append(&fragment.0, None);
builder.finish();
Some(Self(scene, target.0.clone()))
}
}

View file

@ -268,7 +268,6 @@ fn run(
base_color: None,
};
(example_scene.function)(&mut builder, &mut scene_params);
builder.finish();
// If the user specifies a base color in the CLI we use that. Otherwise we use any
// color specified by the scene. The default is black.
@ -291,7 +290,6 @@ fn run(
transform = transform * Affine::scale(scale_factor);
}
builder.append(&fragment, Some(transform));
builder.finish();
let surface_texture = render_state
.surface
.surface

View file

@ -1,5 +1,6 @@
// SPDX-License-Identifier: Apache-2.0 OR MIT OR Unlicense
// This must be kept in sync with the struct in src/encoding/resolve.rs
struct Config {
width_in_tiles: u32,
height_in_tiles: u32,

View file

@ -18,19 +18,22 @@
mod draw;
mod encoding;
mod glyph;
mod glyph_cache;
mod math;
mod monoid;
mod packed;
mod path;
pub mod resource;
mod ramp_cache;
mod resolve;
pub use draw::{
DrawBeginClip, DrawColor, DrawImage, DrawLinearGradient, DrawMonoid, DrawRadialGradient,
DrawTag,
};
pub use encoding::Encoding;
pub use encoding::{Encoding, StreamOffsets};
pub use glyph::{Glyph, GlyphRun};
pub use math::Transform;
pub use monoid::Monoid;
pub use packed::{Config, Layout, PackedEncoding};
pub use path::{PathBbox, PathEncoder, PathMonoid, PathSegment, PathSegmentType, PathTag};
pub use ramp_cache::Ramps;
pub use resolve::{Config, Layout, Patch, Resolver};

View file

@ -14,15 +14,15 @@
//
// Also licensed under MIT license, at your choice.
use super::resource::Patch;
use super::{
DrawColor, DrawLinearGradient, DrawRadialGradient, DrawTag, PathEncoder, PathTag, Transform,
resolve::Patch, DrawColor, DrawLinearGradient, DrawRadialGradient, DrawTag, Glyph, GlyphRun,
PathEncoder, PathTag, Transform,
};
use peniko::{kurbo::Shape, BlendMode, BrushRef, Color, ColorStop, Extend, GradientKind};
use peniko::{kurbo::Shape, BlendMode, BrushRef, ColorStop, Extend, GradientKind};
/// Encoded data streams for a scene.
#[derive(Default)]
#[derive(Clone, Default)]
pub struct Encoding {
/// The path tag stream.
pub path_tags: Vec<PathTag>,
@ -40,12 +40,20 @@ pub struct Encoding {
pub transforms: Vec<Transform>,
/// The line width stream.
pub linewidths: Vec<f32>,
/// Positioned glyph buffer.
pub glyphs: Vec<Glyph>,
/// Sequences of glyphs.
pub glyph_runs: Vec<GlyphRun>,
/// Normalized coordinate buffer for variable fonts.
pub normalized_coords: Vec<i16>,
/// Number of encoded paths.
pub n_paths: u32,
/// Number of encoded path segments.
pub n_path_segments: u32,
/// Number of encoded clips/layers.
pub n_clips: u32,
/// Number of unclosed clips/layers.
pub n_open_clips: u32,
}
impl Encoding {
@ -67,9 +75,13 @@ impl Encoding {
self.linewidths.clear();
self.draw_data.clear();
self.draw_tags.clear();
self.glyphs.clear();
self.glyph_runs.clear();
self.normalized_coords.clear();
self.n_paths = 0;
self.n_path_segments = 0;
self.n_clips = 0;
self.n_open_clips = 0;
self.patches.clear();
self.color_stops.clear();
if !is_fragment {
@ -81,33 +93,70 @@ impl Encoding {
/// Appends another encoding to this one with an optional transform.
pub fn append(&mut self, other: &Self, transform: &Option<Transform>) {
let stops_base = self.color_stops.len();
let draw_data_base = self.draw_data.len();
let glyph_runs_base = self.glyph_runs.len();
let glyphs_base = self.glyphs.len();
let coords_base = self.normalized_coords.len();
let offsets = self.stream_offsets();
self.path_tags.extend_from_slice(&other.path_tags);
self.path_data.extend_from_slice(&other.path_data);
self.draw_tags.extend_from_slice(&other.draw_tags);
self.draw_data.extend_from_slice(&other.draw_data);
self.glyphs.extend_from_slice(&other.glyphs);
self.normalized_coords
.extend_from_slice(&other.normalized_coords);
self.glyph_runs
.extend(other.glyph_runs.iter().cloned().map(|mut run| {
run.glyphs.start += glyphs_base;
run.normalized_coords.start += coords_base;
run.stream_offsets.path_tags += offsets.path_tags;
run.stream_offsets.path_data += offsets.path_data;
run.stream_offsets.draw_tags += offsets.draw_tags;
run.stream_offsets.draw_data += offsets.draw_data;
run.stream_offsets.transforms += offsets.transforms;
run.stream_offsets.linewidths += offsets.linewidths;
run
}));
self.n_paths += other.n_paths;
self.n_path_segments += other.n_path_segments;
self.n_clips += other.n_clips;
self.n_open_clips += other.n_open_clips;
self.patches
.extend(other.patches.iter().map(|patch| match patch {
Patch::Ramp { offset, stops } => {
let stops = stops.start + stops_base..stops.end + stops_base;
Patch::Ramp {
offset: draw_data_base + offset,
offset: offset + offsets.draw_data,
stops,
}
}
Patch::GlyphRun { index } => Patch::GlyphRun {
index: index + glyph_runs_base,
},
}));
self.color_stops.extend_from_slice(&other.color_stops);
if let Some(transform) = *transform {
self.transforms
.extend(other.transforms.iter().map(|x| transform * *x));
for run in &mut self.glyph_runs[glyph_runs_base..] {
run.transform = transform * run.transform;
}
} else {
self.transforms.extend_from_slice(&other.transforms);
}
self.linewidths.extend_from_slice(&other.linewidths);
}
/// Returns a snapshot of the current stream offsets.
pub fn stream_offsets(&self) -> StreamOffsets {
StreamOffsets {
path_tags: self.path_tags.len(),
path_data: self.path_data.len(),
draw_tags: self.draw_tags.len(),
draw_data: self.draw_data.len(),
transforms: self.transforms.len(),
linewidths: self.linewidths.len(),
}
}
}
impl Encoding {
@ -159,7 +208,7 @@ impl Encoding {
match brush.into() {
BrushRef::Solid(color) => {
let color = if alpha != 1.0 {
color_with_alpha(color, alpha)
color.with_alpha_factor(alpha)
} else {
color
};
@ -248,15 +297,19 @@ impl Encoding {
self.draw_data
.extend_from_slice(bytemuck::bytes_of(&DrawBeginClip::new(blend_mode, alpha)));
self.n_clips += 1;
self.n_open_clips += 1;
}
/// Encodes an end clip command.
pub fn encode_end_clip(&mut self) {
self.draw_tags.push(DrawTag::END_CLIP);
// This is a dummy path, and will go away with the new clip impl.
self.path_tags.push(PathTag::PATH);
self.n_paths += 1;
self.n_clips += 1;
if self.n_open_clips > 0 {
self.draw_tags.push(DrawTag::END_CLIP);
// This is a dummy path, and will go away with the new clip impl.
self.path_tags.push(PathTag::PATH);
self.n_paths += 1;
self.n_clips += 1;
self.n_open_clips -= 1;
}
}
// Swap the last two tags in the path tag stream; used for transformed
@ -270,10 +323,8 @@ impl Encoding {
let offset = self.draw_data.len();
let stops_start = self.color_stops.len();
if alpha != 1.0 {
self.color_stops.extend(color_stops.map(|s| ColorStop {
offset: s.offset,
color: color_with_alpha(s.color, alpha),
}));
self.color_stops
.extend(color_stops.map(|stop| stop.with_alpha_factor(alpha)));
} else {
self.color_stops.extend(color_stops);
}
@ -284,7 +335,30 @@ impl Encoding {
}
}
fn color_with_alpha(mut color: Color, alpha: f32) -> Color {
color.a = ((color.a as f32) * alpha) as u8;
color
/// Snapshot of offsets for encoded streams.
#[derive(Copy, Clone, Default, Debug)]
pub struct StreamOffsets {
/// Current length of path tag stream.
pub path_tags: usize,
/// Current length of path data stream.
pub path_data: usize,
/// Current length of draw tag stream.
pub draw_tags: usize,
/// Current length of draw data stream.
pub draw_data: usize,
/// Current length of transform stream.
pub transforms: usize,
/// Current length of linewidth stream.
pub linewidths: usize,
}
impl StreamOffsets {
pub(crate) fn add(&mut self, other: &Self) {
self.path_tags += other.path_tags;
self.path_data += other.path_data;
self.draw_tags += other.draw_tags;
self.draw_data += other.draw_data;
self.transforms += other.transforms;
self.linewidths += other.linewidths;
}
}

55
src/encoding/glyph.rs Normal file
View file

@ -0,0 +1,55 @@
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Also licensed under MIT license, at your choice.
use std::ops::Range;
use peniko::{Font, Style};
use super::{StreamOffsets, Transform};
/// Positioned glyph.
#[derive(Copy, Clone, Default, Debug)]
pub struct Glyph {
/// Glyph identifier.
pub id: u32,
/// X-offset in run, relative to transform.
pub x: f32,
/// Y-offset in run, relative to transform.
pub y: f32,
}
/// Properties for a sequence of glyphs in an encoding.
#[derive(Clone)]
pub struct GlyphRun {
/// Font for all glyphs in the run.
pub font: Font,
/// Global run transform.
pub transform: Transform,
/// Per-glyph transform.
pub glyph_transform: Option<Transform>,
/// Size of the font in pixels per em.
pub font_size: f32,
/// True if hinting is enabled.
pub hint: bool,
/// Range of normalized coordinates in the parent encoding.
pub normalized_coords: Range<usize>,
/// Fill or stroke style.
pub style: Style,
/// Range of glyphs in the parent encoding.
pub glyphs: Range<usize>,
/// Stream offsets where this glyph run should be inserted.
pub stream_offsets: StreamOffsets,
}

View file

@ -0,0 +1,90 @@
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Also licensed under MIT license, at your choice.
use std::collections::HashMap;
use super::{Encoding, StreamOffsets};
use crate::glyph::GlyphProvider;
use peniko::{Fill, Style};
#[derive(Copy, Clone, PartialEq, Eq, Hash, Default, Debug)]
pub struct GlyphKey {
pub font_id: u64,
pub font_index: u32,
pub glyph_id: u32,
pub font_size: u32,
pub hint: bool,
}
#[derive(Default)]
pub struct GlyphCache {
pub encoding: Encoding,
glyphs: HashMap<GlyphKey, CachedRange>,
}
impl GlyphCache {
pub fn clear(&mut self) {
self.encoding.reset(true);
self.glyphs.clear();
}
pub fn get_or_insert(
&mut self,
key: GlyphKey,
style: &Style,
scaler: &mut GlyphProvider,
) -> Option<CachedRange> {
let encoding_cache = &mut self.encoding;
let mut encode_glyph = || {
let start = encoding_cache.stream_offsets();
scaler.encode_glyph(key.glyph_id as u16, style, encoding_cache)?;
let end = encoding_cache.stream_offsets();
Some(CachedRange { start, end })
};
// For now, only cache non-zero filled glyphs so we don't need to keep style
// as part of the key.
let range = if matches!(style, Style::Fill(Fill::NonZero)) {
use std::collections::hash_map::Entry;
match self.glyphs.entry(key) {
Entry::Occupied(entry) => *entry.get(),
Entry::Vacant(entry) => *entry.insert(encode_glyph()?),
}
} else {
encode_glyph()?
};
Some(range)
}
}
#[derive(Copy, Clone, Default, Debug)]
pub struct CachedRange {
pub start: StreamOffsets,
pub end: StreamOffsets,
}
impl CachedRange {
pub fn len(&self) -> StreamOffsets {
StreamOffsets {
path_tags: self.end.path_tags - self.start.path_tags,
path_data: self.end.path_data - self.start.path_data,
draw_tags: self.end.draw_tags - self.start.draw_tags,
draw_data: self.end.draw_data - self.start.draw_data,
transforms: self.end.transforms - self.start.transforms,
linewidths: self.end.linewidths - self.start.linewidths,
}
}
}

View file

@ -20,7 +20,7 @@ use bytemuck::{Pod, Zeroable};
use peniko::kurbo;
/// Affine transformation matrix.
#[derive(Copy, Clone, PartialEq, Pod, Zeroable)]
#[derive(Copy, Clone, Debug, PartialEq, Pod, Zeroable)]
#[repr(C)]
pub struct Transform {
/// 2x2 matrix.

View file

@ -1,223 +0,0 @@
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Also licensed under MIT license, at your choice.
use bytemuck::{Pod, Zeroable};
use super::{
resource::{Patch, ResourceCache, Token},
DrawTag, Encoding, PathTag, Transform,
};
use crate::shaders;
/// Layout of a packed encoding.
#[derive(Clone, Copy, Debug, Default, Zeroable, Pod)]
#[repr(C)]
pub struct Layout {
/// Number of draw objects.
pub n_draw_objects: u32,
/// Number of paths.
pub n_paths: u32,
/// Number of clips.
pub n_clips: u32,
/// Start of binning data.
pub bin_data_start: u32,
/// Start of path tag stream.
pub path_tag_base: u32,
/// Start of path data stream.
pub path_data_base: u32,
/// Start of draw tag stream.
pub draw_tag_base: u32,
/// Start of draw data stream.
pub draw_data_base: u32,
/// Start of transform stream.
pub transform_base: u32,
/// Start of linewidth stream.
pub linewidth_base: u32,
}
/// Scene configuration. This data structure must be kept in sync with the definition in
/// shaders/shared/config.wgsl.
#[derive(Clone, Copy, Debug, Default, Zeroable, Pod)]
#[repr(C)]
pub struct Config {
/// Width of the scene in tiles.
pub width_in_tiles: u32,
/// Height of the scene in tiles.
pub height_in_tiles: u32,
/// Width of the target in pixels.
pub target_width: u32,
/// Height of the target in pixels.
pub target_height: u32,
/// The base background color applied to the target before any blends.
pub base_color: u32,
/// Layout of packed scene data.
pub layout: Layout,
/// Size of binning buffer allocation (in u32s).
pub binning_size: u32,
/// Size of tile buffer allocation (in Tiles).
pub tiles_size: u32,
/// Size of segment buffer allocation (in PathSegments).
pub segments_size: u32,
/// Size of per-tile command list buffer allocation (in u32s).
pub ptcl_size: u32,
}
/// Packed encoding of scene data.
#[derive(Default)]
pub struct PackedEncoding {
/// Layout of the packed scene data.
pub layout: Layout,
/// Packed scene data.
pub data: Vec<u8>,
/// Token for current cached resource state.
pub resources: Token,
}
impl PackedEncoding {
/// Creates a new packed encoding.
pub fn new() -> Self {
Self::default()
}
/// Returns the path tag stream.
pub fn path_tags(&self) -> &[PathTag] {
let start = self.layout.path_tag_base as usize * 4;
let end = self.layout.path_data_base as usize * 4;
bytemuck::cast_slice(&self.data[start..end])
}
/// Returns the path tag stream in chunks of 4.
pub fn path_tags_chunked(&self) -> &[u32] {
let start = self.layout.path_tag_base as usize * 4;
let end = self.layout.path_data_base as usize * 4;
bytemuck::cast_slice(&self.data[start..end])
}
/// Returns the path data stream.
pub fn path_data(&self) -> &[[f32; 2]] {
let start = self.layout.path_data_base as usize * 4;
let end = self.layout.draw_tag_base as usize * 4;
bytemuck::cast_slice(&self.data[start..end])
}
/// Returns the draw tag stream.
pub fn draw_tags(&self) -> &[DrawTag] {
let start = self.layout.draw_tag_base as usize * 4;
let end = self.layout.draw_data_base as usize * 4;
bytemuck::cast_slice(&self.data[start..end])
}
/// Returns the draw data stream.
pub fn draw_data(&self) -> &[u32] {
let start = self.layout.draw_data_base as usize * 4;
let end = self.layout.transform_base as usize * 4;
bytemuck::cast_slice(&self.data[start..end])
}
/// Returns the transform stream.
pub fn transforms(&self) -> &[Transform] {
let start = self.layout.transform_base as usize * 4;
let end = self.layout.linewidth_base as usize * 4;
bytemuck::cast_slice(&self.data[start..end])
}
/// Returns the linewidth stream.
pub fn linewidths(&self) -> &[f32] {
let start = self.layout.linewidth_base as usize * 4;
bytemuck::cast_slice(&self.data[start..])
}
}
impl PackedEncoding {
/// Packs the given encoding into self using the specified cache to handle
/// late bound resources.
pub fn pack(&mut self, encoding: &Encoding, resource_cache: &mut ResourceCache) {
// Advance the resource cache epoch.
self.resources = resource_cache.advance();
// Pack encoded data.
let layout = &mut self.layout;
*layout = Layout::default();
layout.n_paths = encoding.n_paths;
layout.n_draw_objects = encoding.n_paths;
layout.n_clips = encoding.n_clips;
let data = &mut self.data;
data.clear();
// Path tag stream
let n_path_tags = encoding.path_tags.len();
let path_tag_padded = align_up(n_path_tags, 4 * shaders::PATHTAG_REDUCE_WG);
let capacity = path_tag_padded
+ slice_size_in_bytes(&encoding.path_data)
+ slice_size_in_bytes(&encoding.draw_tags)
+ slice_size_in_bytes(&encoding.draw_data)
+ slice_size_in_bytes(&encoding.transforms)
+ slice_size_in_bytes(&encoding.linewidths);
data.reserve(capacity);
layout.path_tag_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.path_tags));
data.resize(path_tag_padded, 0);
// Path data stream
layout.path_data_base = size_to_words(data.len());
data.extend_from_slice(&encoding.path_data);
// Draw tag stream
layout.draw_tag_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.draw_tags));
// Bin data follows draw info
layout.bin_data_start = encoding.draw_tags.iter().map(|tag| tag.info_size()).sum();
// Draw data stream
layout.draw_data_base = size_to_words(data.len());
// Handle patches, if any
if !encoding.patches.is_empty() {
let stop_data = &encoding.color_stops;
let mut pos = 0;
for patch in &encoding.patches {
let (offset, value) = match patch {
Patch::Ramp { offset, stops } => {
let ramp_id = resource_cache.add_ramp(&stop_data[stops.clone()]);
(*offset, ramp_id)
}
};
if pos < offset {
data.extend_from_slice(&encoding.draw_data[pos..offset]);
}
data.extend_from_slice(bytemuck::bytes_of(&value));
pos = offset + 4;
}
if pos < encoding.draw_data.len() {
data.extend_from_slice(&encoding.draw_data[pos..])
}
} else {
data.extend_from_slice(&encoding.draw_data);
}
// Transform stream
layout.transform_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.transforms));
// Linewidth stream
layout.linewidth_base = size_to_words(data.len());
data.extend_from_slice(bytemuck::cast_slice(&encoding.linewidths));
}
}
fn slice_size_in_bytes<T: Sized>(slice: &[T]) -> usize {
slice.len() * std::mem::size_of::<T>()
}
fn size_to_words(byte_size: usize) -> u32 {
(byte_size / std::mem::size_of::<u32>()) as u32
}
fn align_up(len: usize, alignment: u32) -> usize {
len + (len.wrapping_neg() & (alignment as usize - 1))
}

View file

@ -14,67 +14,23 @@
//
// Also licensed under MIT license, at your choice.
//! Late bound resource management.
use std::collections::HashMap;
use std::ops::Range;
use peniko::{Color, ColorStop, ColorStops};
const N_SAMPLES: usize = 512;
const RETAINED_COUNT: usize = 64;
/// Token for ensuring that an encoded scene matches the current state
/// of a resource cache.
#[derive(Copy, Clone, PartialEq, Eq, Default)]
pub struct Token(u64);
/// Cache for late bound resources.
#[derive(Default)]
pub struct ResourceCache {
ramps: RampCache,
}
impl ResourceCache {
/// Creates a new resource cache.
pub fn new() -> Self {
Self::default()
}
/// Returns the ramp data, width and height. Returns `None` if the
/// given token does not match the current state of the cache.
pub fn ramps(&self, token: Token) -> Option<(&[u32], u32, u32)> {
if token.0 == self.ramps.epoch {
Some((self.ramps.data(), self.ramps.width(), self.ramps.height()))
} else {
None
}
}
pub(crate) fn advance(&mut self) -> Token {
self.ramps.advance();
Token(self.ramps.epoch)
}
pub(crate) fn add_ramp(&mut self, stops: &[ColorStop]) -> u32 {
self.ramps.add(stops)
}
}
#[derive(Clone)]
/// Patch for a late bound resource.
pub enum Patch {
/// Gradient ramp resource.
Ramp {
/// Byte offset to the ramp id in the draw data stream.
offset: usize,
/// Range of the gradient stops in the resource set.
stops: Range<usize>,
},
/// Data and dimensions for a set of resolved gradient ramps.
#[derive(Copy, Clone, Debug, Default)]
pub struct Ramps<'a> {
pub data: &'a [u32],
pub width: u32,
pub height: u32,
}
#[derive(Default)]
struct RampCache {
pub struct RampCache {
epoch: u64,
map: HashMap<ColorStops, (u32, u64)>,
data: Vec<u32>,
@ -127,16 +83,12 @@ impl RampCache {
}
}
pub fn data(&self) -> &[u32] {
&self.data
}
pub fn width(&self) -> u32 {
N_SAMPLES as u32
}
pub fn height(&self) -> u32 {
(self.data.len() / N_SAMPLES) as u32
pub fn ramps(&self) -> Ramps {
Ramps {
data: &self.data,
width: N_SAMPLES as u32,
height: (self.data.len() / N_SAMPLES) as u32,
}
}
}

468
src/encoding/resolve.rs Normal file
View file

@ -0,0 +1,468 @@
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Also licensed under MIT license, at your choice.
use std::ops::Range;
use bytemuck::{Pod, Zeroable};
use moscato::pinot::FontRef;
use super::{
glyph_cache::{CachedRange, GlyphCache, GlyphKey},
ramp_cache::{RampCache, Ramps},
DrawTag, Encoding, PathTag, StreamOffsets, Transform,
};
use crate::glyph::GlyphContext;
use crate::shaders;
/// Layout of a packed encoding.
#[derive(Clone, Copy, Debug, Default, Zeroable, Pod)]
#[repr(C)]
pub struct Layout {
/// Number of draw objects.
pub n_draw_objects: u32,
/// Number of paths.
pub n_paths: u32,
/// Number of clips.
pub n_clips: u32,
/// Start of binning data.
pub bin_data_start: u32,
/// Start of path tag stream.
pub path_tag_base: u32,
/// Start of path data stream.
pub path_data_base: u32,
/// Start of draw tag stream.
pub draw_tag_base: u32,
/// Start of draw data stream.
pub draw_data_base: u32,
/// Start of transform stream.
pub transform_base: u32,
/// Start of linewidth stream.
pub linewidth_base: u32,
}
impl Layout {
/// Creates a zeroed layout.
pub fn new() -> Self {
Self::default()
}
/// Returns the path tag stream.
pub fn path_tags<'a>(&self, data: &'a [u8]) -> &'a [PathTag] {
let start = self.path_tag_base as usize * 4;
let end = self.path_data_base as usize * 4;
bytemuck::cast_slice(&data[start..end])
}
/// Returns the path tag stream in chunks of 4.
pub fn path_tags_chunked<'a>(&self, data: &'a [u8]) -> &'a [u32] {
let start = self.path_tag_base as usize * 4;
let end = self.path_data_base as usize * 4;
bytemuck::cast_slice(&data[start..end])
}
/// Returns the path data stream.
pub fn path_data<'a>(&self, data: &'a [u8]) -> &'a [u8] {
let start = self.path_data_base as usize * 4;
let end = self.draw_tag_base as usize * 4;
bytemuck::cast_slice(&data[start..end])
}
/// Returns the draw tag stream.
pub fn draw_tags<'a>(&self, data: &'a [u8]) -> &'a [DrawTag] {
let start = self.draw_tag_base as usize * 4;
let end = self.draw_data_base as usize * 4;
bytemuck::cast_slice(&data[start..end])
}
/// Returns the draw data stream.
pub fn draw_data<'a>(&self, data: &'a [u8]) -> &'a [u32] {
let start = self.draw_data_base as usize * 4;
let end = self.transform_base as usize * 4;
bytemuck::cast_slice(&data[start..end])
}
/// Returns the transform stream.
pub fn transforms<'a>(&self, data: &'a [u8]) -> &'a [Transform] {
let start = self.transform_base as usize * 4;
let end = self.linewidth_base as usize * 4;
bytemuck::cast_slice(&data[start..end])
}
/// Returns the linewidth stream.
pub fn linewidths<'a>(&self, data: &'a [u8]) -> &'a [f32] {
let start = self.linewidth_base as usize * 4;
bytemuck::cast_slice(&data[start..])
}
}
/// Scene configuration.
///
/// This data structure must be kept in sync with the definition in
/// shaders/shared/config.wgsl.
#[derive(Clone, Copy, Debug, Default, Zeroable, Pod)]
#[repr(C)]
pub struct Config {
/// Width of the scene in tiles.
pub width_in_tiles: u32,
/// Height of the scene in tiles.
pub height_in_tiles: u32,
/// Width of the target in pixels.
pub target_width: u32,
/// Height of the target in pixels.
pub target_height: u32,
/// The base background color applied to the target before any blends.
pub base_color: u32,
/// Layout of packed scene data.
pub layout: Layout,
/// Size of binning buffer allocation (in u32s).
pub binning_size: u32,
/// Size of tile buffer allocation (in Tiles).
pub tiles_size: u32,
/// Size of segment buffer allocation (in PathSegments).
pub segments_size: u32,
/// Size of per-tile command list buffer allocation (in u32s).
pub ptcl_size: u32,
}
/// Resolver for late bound resources.
#[derive(Default)]
pub struct Resolver {
glyph_cache: GlyphCache,
glyph_ranges: Vec<CachedRange>,
glyph_cx: GlyphContext,
ramp_cache: RampCache,
patches: Vec<ResolvedPatch>,
}
impl Resolver {
/// Creates a new resource cache.
pub fn new() -> Self {
Self::default()
}
/// Resolves late bound resources and packs an encoding. Returns the packed
/// layout and computed ramp data.
pub fn resolve<'a>(
&'a mut self,
encoding: &Encoding,
packed: &mut Vec<u8>,
) -> (Layout, Ramps<'a>) {
let sizes = self.resolve_patches(encoding);
let data = packed;
data.clear();
let mut layout = Layout::default();
layout.n_paths = encoding.n_paths;
layout.n_clips = encoding.n_clips;
// Compute size of data buffer
let n_path_tags =
encoding.path_tags.len() + sizes.path_tags + encoding.n_open_clips as usize;
let path_tag_padded = align_up(n_path_tags, 4 * shaders::PATHTAG_REDUCE_WG);
let capacity = path_tag_padded
+ slice_size_in_bytes(&encoding.path_data, sizes.path_data)
+ slice_size_in_bytes(
&encoding.draw_tags,
sizes.draw_tags + encoding.n_open_clips as usize,
)
+ slice_size_in_bytes(&encoding.draw_data, sizes.draw_data)
+ slice_size_in_bytes(&encoding.transforms, sizes.transforms)
+ slice_size_in_bytes(&encoding.linewidths, sizes.linewidths);
data.reserve(capacity);
// Path tag stream
layout.path_tag_base = size_to_words(data.len());
{
let mut pos = 0;
let stream = &encoding.path_tags;
for patch in &self.patches {
if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch {
layout.n_paths += 1;
let stream_offset = encoding.glyph_runs[*index].stream_offsets.path_tags;
if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset;
}
for glyph in &self.glyph_ranges[glyphs.clone()] {
data.extend_from_slice(bytemuck::bytes_of(&PathTag::TRANSFORM));
let glyph_data = &self.glyph_cache.encoding.path_tags
[glyph.start.path_tags..glyph.end.path_tags];
data.extend_from_slice(bytemuck::cast_slice(glyph_data));
}
data.extend_from_slice(bytemuck::bytes_of(&PathTag::PATH));
}
}
if pos < stream.len() {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..]));
}
for _ in 0..encoding.n_open_clips {
data.extend_from_slice(bytemuck::bytes_of(&PathTag::PATH));
}
data.resize(path_tag_padded, 0);
}
// Path data stream
layout.path_data_base = size_to_words(data.len());
{
let mut pos = 0;
let stream = &encoding.path_data;
for patch in &self.patches {
if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch {
let stream_offset = encoding.glyph_runs[*index].stream_offsets.path_data;
if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset;
}
for glyph in &self.glyph_ranges[glyphs.clone()] {
let glyph_data = &self.glyph_cache.encoding.path_data
[glyph.start.path_data..glyph.end.path_data];
data.extend_from_slice(bytemuck::cast_slice(glyph_data));
}
}
}
if pos < stream.len() {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..]));
}
}
// Draw tag stream
layout.draw_tag_base = size_to_words(data.len());
// Bin data follows draw info
layout.bin_data_start = encoding.draw_tags.iter().map(|tag| tag.info_size()).sum();
{
data.extend_from_slice(bytemuck::cast_slice(&encoding.draw_tags));
for _ in 0..encoding.n_open_clips {
data.extend_from_slice(bytemuck::bytes_of(&DrawTag::END_CLIP));
}
}
// Draw data stream
layout.draw_data_base = size_to_words(data.len());
{
let mut pos = 0;
let stream = &encoding.draw_data;
for patch in &self.patches {
match patch {
ResolvedPatch::Ramp {
draw_data_offset,
ramp_id,
} => {
if pos < *draw_data_offset {
data.extend_from_slice(&encoding.draw_data[pos..*draw_data_offset]);
}
data.extend_from_slice(bytemuck::bytes_of(ramp_id));
pos = *draw_data_offset + 4;
}
ResolvedPatch::GlyphRun { .. } => {}
}
}
if pos < stream.len() {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..]));
}
}
// Transform stream
layout.transform_base = size_to_words(data.len());
{
let mut pos = 0;
let stream = &encoding.transforms;
for patch in &self.patches {
if let ResolvedPatch::GlyphRun {
index,
glyphs: _,
transform,
} = patch
{
let run = &encoding.glyph_runs[*index];
let stream_offset = encoding.glyph_runs[*index].stream_offsets.transforms;
if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset;
}
if let Some(glyph_transform) = run.glyph_transform {
for glyph in &encoding.glyphs[run.glyphs.clone()] {
let xform = *transform
* Transform {
matrix: [1.0, 0.0, 0.0, -1.0],
translation: [glyph.x, glyph.y],
}
* glyph_transform;
data.extend_from_slice(bytemuck::bytes_of(&xform));
}
} else {
for glyph in &encoding.glyphs[run.glyphs.clone()] {
let xform = *transform
* Transform {
matrix: [1.0, 0.0, 0.0, -1.0],
translation: [glyph.x, glyph.y],
};
data.extend_from_slice(bytemuck::bytes_of(&xform));
}
}
}
}
if pos < stream.len() {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..]));
}
}
// Linewidth stream
layout.linewidth_base = size_to_words(data.len());
{
let mut pos = 0;
let stream = &encoding.linewidths;
for patch in &self.patches {
if let ResolvedPatch::GlyphRun { index, glyphs, .. } = patch {
let stream_offset = encoding.glyph_runs[*index].stream_offsets.linewidths;
if pos < stream_offset {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..stream_offset]));
pos = stream_offset;
}
for glyph in &self.glyph_ranges[glyphs.clone()] {
let glyph_data = &self.glyph_cache.encoding.linewidths
[glyph.start.linewidths..glyph.end.linewidths];
data.extend_from_slice(bytemuck::cast_slice(glyph_data));
}
}
}
if pos < stream.len() {
data.extend_from_slice(bytemuck::cast_slice(&stream[pos..]));
}
}
layout.n_draw_objects = layout.n_paths;
assert_eq!(capacity, data.len());
(layout, self.ramp_cache.ramps())
}
fn resolve_patches(&mut self, encoding: &Encoding) -> StreamOffsets {
self.ramp_cache.advance();
self.glyph_cache.clear();
self.glyph_ranges.clear();
self.patches.clear();
let mut sizes = StreamOffsets::default();
for patch in &encoding.patches {
match patch {
Patch::Ramp { offset, stops } => {
let ramp_id = self.ramp_cache.add(&encoding.color_stops[stops.clone()]);
self.patches.push(ResolvedPatch::Ramp {
draw_data_offset: *offset + sizes.draw_data,
ramp_id,
});
}
Patch::GlyphRun { index } => {
let mut run_sizes = StreamOffsets::default();
let run = &encoding.glyph_runs[*index];
let font_id = run.font.data.id();
let font_size_u32 = run.font_size.to_bits();
let Some(font) = FontRef::from_index(run.font.data.as_ref(), run.font.index) else { continue };
let glyphs = &encoding.glyphs[run.glyphs.clone()];
let _coords = &encoding.normalized_coords[run.normalized_coords.clone()];
let vars: [(moscato::pinot::types::Tag, f32); 0] = [];
let hint_id = if run.font.index < 0xFF {
Some(font_id << 8 | run.font.index as u64)
} else {
None
};
let mut hint = run.hint;
let mut font_size = run.font_size;
let mut transform = run.transform;
if hint {
// If hinting was requested and our transform matrix is just a uniform
// scale, then adjust our font size and cancel out the matrix. Otherwise,
// disable hinting entirely.
if transform.matrix[0] == transform.matrix[3]
&& transform.matrix[1] == 0.0
&& transform.matrix[2] == 0.0
{
font_size *= transform.matrix[0];
transform.matrix = [1.0, 0.0, 0.0, 1.0];
} else {
hint = false;
}
}
let mut scaler = self
.glyph_cx
.new_provider(&font, hint_id, font_size, hint, vars);
let glyph_start = self.glyph_ranges.len();
for glyph in glyphs {
let key = GlyphKey {
font_id,
font_index: run.font.index,
font_size: font_size_u32,
glyph_id: glyph.id,
hint: run.hint,
};
let encoding_range = self
.glyph_cache
.get_or_insert(key, &run.style, &mut scaler)
.unwrap_or_default();
run_sizes.add(&encoding_range.len());
self.glyph_ranges.push(encoding_range);
}
let glyph_end = self.glyph_ranges.len();
run_sizes.path_tags += glyphs.len() + 1;
run_sizes.transforms += glyphs.len();
sizes.add(&run_sizes);
self.patches.push(ResolvedPatch::GlyphRun {
index: *index,
glyphs: glyph_start..glyph_end,
transform,
});
}
}
}
sizes
}
}
#[derive(Clone)]
/// Patch for a late bound resource.
pub enum Patch {
/// Gradient ramp resource.
Ramp {
/// Byte offset to the ramp id in the draw data stream.
offset: usize,
/// Range of the gradient stops in the resource set.
stops: Range<usize>,
},
/// Glyph run resource.
GlyphRun {
/// Index in the glyph run buffer.
index: usize,
},
}
#[derive(Clone, Debug)]
enum ResolvedPatch {
Ramp {
/// Offset to the ramp id in draw data stream.
draw_data_offset: usize,
/// Resolved ramp index.
ramp_id: u32,
},
GlyphRun {
/// Index of the original glyph run in the encoding.
index: usize,
/// Range into the glyphs encoding range buffer.
glyphs: Range<usize>,
/// Global transform.
transform: Transform,
},
}
fn slice_size_in_bytes<T: Sized>(slice: &[T], extra: usize) -> usize {
(slice.len() + extra) * std::mem::size_of::<T>()
}
fn size_to_words(byte_size: usize) -> u32 {
(byte_size / std::mem::size_of::<u32>()) as u32
}
fn align_up(len: usize, alignment: u32) -> usize {
len + (len.wrapping_neg() & (alignment as usize - 1))
}

View file

@ -18,9 +18,10 @@
pub use moscato::pinot;
use crate::encoding::Encoding;
use crate::scene::{SceneBuilder, SceneFragment};
use peniko::kurbo::{Affine, Rect};
use peniko::{Brush, Color, Fill, Mix};
use peniko::{Brush, Color, Fill, Mix, Style};
use moscato::{Context, Scaler};
use pinot::{types::Tag, FontRef};
@ -32,6 +33,12 @@ pub struct GlyphContext {
ctx: Context,
}
impl Default for GlyphContext {
fn default() -> Self {
Self::new()
}
}
impl GlyphContext {
/// Creates a new context.
pub fn new() -> Self {
@ -94,10 +101,35 @@ impl<'a> GlyphProvider<'a> {
None,
&convert_path(path.elements()),
);
builder.finish();
Some(fragment)
}
pub fn encode_glyph(&mut self, gid: u16, style: &Style, encoding: &mut Encoding) -> Option<()> {
let glyph = self.scaler.glyph(gid)?;
let path = glyph.path(0)?;
match style {
Style::Fill(Fill::NonZero) => encoding.encode_linewidth(-1.0),
Style::Fill(Fill::EvenOdd) => encoding.encode_linewidth(-2.0),
Style::Stroke(stroke) => encoding.encode_linewidth(stroke.width),
}
let mut path_encoder = encoding.encode_path(matches!(style, Style::Fill(_)));
for el in path.elements() {
use moscato::Element::*;
match el {
MoveTo(p) => path_encoder.move_to(p.x, p.y),
LineTo(p) => path_encoder.line_to(p.x, p.y),
QuadTo(c, p) => path_encoder.quad_to(c.x, c.y, p.x, p.y),
CurveTo(c0, c1, p) => path_encoder.cubic_to(c0.x, c0.y, c1.x, c1.y, p.x, p.y),
Close => path_encoder.close(),
}
}
if path_encoder.finish(false) != 0 {
Some(())
} else {
None
}
}
/// Returns a scene fragment containing the commands and resources to
/// render the specified color glyph.
pub fn get_color(&mut self, palette_index: u16, gid: u16) -> Option<SceneFragment> {
@ -188,7 +220,6 @@ impl<'a> GlyphProvider<'a> {
}
}
}
builder.finish();
Some(fragment)
}
}

View file

@ -30,7 +30,7 @@ pub mod glyph;
pub mod util;
use render::Render;
pub use scene::{Scene, SceneBuilder, SceneFragment};
pub use scene::{DrawGlyphs, Scene, SceneBuilder, SceneFragment};
pub use util::block_on_wgpu;
use engine::{Engine, ExternalResource, Recording};

View file

@ -3,7 +3,7 @@
use bytemuck::{Pod, Zeroable};
use crate::{
encoding::Encoding,
encoding::{Config, Encoding, Layout},
engine::{BufProxy, ImageFormat, ImageProxy, Recording, ResourceProxy},
shaders::{self, FullShaders, Shaders},
RenderParams, Scene,
@ -42,7 +42,6 @@ const TAG_MONOID_FULL_SIZE: u64 = 20;
const PATH_BBOX_SIZE: u64 = 24;
const CUBIC_SIZE: u64 = 48;
const DRAWMONOID_SIZE: u64 = 16;
const MAX_DRAWINFO_SIZE: u64 = 44;
const CLIP_BIC_SIZE: u64 = 8;
const CLIP_EL_SIZE: u64 = 32;
const CLIP_INP_SIZE: u64 = 8;
@ -54,28 +53,6 @@ const BIN_HEADER_SIZE: u64 = 8;
const TILE_SIZE: u64 = 8;
const SEGMENT_SIZE: u64 = 24;
// This data structure must be kept in sync with encoding::Config and the definition in
// shaders/shared/config.wgsl.
#[repr(C)]
#[derive(Clone, Copy, Debug, Default, Zeroable, Pod)]
struct Config {
width_in_tiles: u32,
height_in_tiles: u32,
target_width: u32,
target_height: u32,
base_color: u32,
n_drawobj: u32,
n_path: u32,
n_clip: u32,
bin_data_start: u32,
pathtag_base: u32,
pathdata_base: u32,
drawtag_base: u32,
drawdata_base: u32,
transform_base: u32,
linewidth_base: u32,
}
fn size_to_words(byte_size: usize) -> u32 {
(byte_size / std::mem::size_of::<u32>()) as u32
}
@ -120,8 +97,11 @@ fn render(scene: &Scene, shaders: &Shaders) -> (Recording, BufProxy) {
height_in_tiles: 64,
target_width: 64 * 16,
target_height: 64 * 16,
pathtag_base,
pathdata_base,
layout: Layout {
path_tag_base: pathtag_base,
path_data_base: pathdata_base,
..Default::default()
},
..Default::default()
};
let scene_buf = recording.upload("scene", scene);
@ -232,34 +212,33 @@ impl Render {
params: &RenderParams,
robust: bool,
) -> Recording {
use crate::encoding::{resource::ResourceCache, PackedEncoding};
use crate::encoding::Resolver;
let mut recording = Recording::default();
let mut resources = ResourceCache::new();
let mut packed = PackedEncoding::default();
packed.pack(encoding, &mut resources);
let (ramp_data, ramps_width, ramps_height) = resources.ramps(packed.resources).unwrap();
let gradient_image = if encoding.patches.is_empty() {
let mut resolver = Resolver::new();
let mut packed = vec![];
let (layout, ramps) = resolver.resolve(encoding, &mut packed);
let gradient_image = if ramps.height == 0 {
ResourceProxy::new_image(1, 1, ImageFormat::Rgba8)
} else {
let data: &[u8] = bytemuck::cast_slice(ramp_data);
let data: &[u8] = bytemuck::cast_slice(ramps.data);
ResourceProxy::Image(recording.upload_image(
ramps_width,
ramps_height,
ramps.width,
ramps.height,
ImageFormat::Rgba8,
data,
))
};
// TODO: calculate for real when we do rectangles
let n_pathtag = encoding.path_tags.len();
let pathtag_padded = align_up(encoding.path_tags.len(), 4 * shaders::PATHTAG_REDUCE_WG);
let n_paths = encoding.n_paths;
let n_drawobj = n_paths;
let n_clip = encoding.n_clips;
let n_pathtag = layout.path_tags(&packed).len();
let pathtag_padded = align_up(n_pathtag, 4 * shaders::PATHTAG_REDUCE_WG);
let n_paths = layout.n_paths;
let n_drawobj = layout.n_paths;
let n_clip = layout.n_clips;
let new_width = next_multiple_of(params.width, 16);
let new_height = next_multiple_of(params.height, 16);
let info_size = packed.layout.bin_data_start;
let info_size = layout.bin_data_start;
let config = crate::encoding::Config {
width_in_tiles: new_width / 16,
height_in_tiles: new_height / 16,
@ -270,10 +249,10 @@ impl Render {
tiles_size: self.tiles_size,
segments_size: self.segments_size,
ptcl_size: self.ptcl_size,
layout: packed.layout,
layout: layout,
};
// println!("{:?}", config);
let scene_buf = ResourceProxy::Buf(recording.upload("scene", packed.data));
let scene_buf = ResourceProxy::Buf(recording.upload("scene", packed));
let config_buf =
ResourceProxy::Buf(recording.upload_uniform("config", bytemuck::bytes_of(&config)));
let info_bin_data_buf = ResourceProxy::new_buf(
@ -374,8 +353,7 @@ impl Render {
);
let draw_monoid_buf =
ResourceProxy::new_buf(n_drawobj as u64 * DRAWMONOID_SIZE, "draw_monoid_buf");
let clip_inp_buf =
ResourceProxy::new_buf(encoding.n_clips as u64 * CLIP_INP_SIZE, "clip_inp_buf");
let clip_inp_buf = ResourceProxy::new_buf(n_clip as u64 * CLIP_INP_SIZE, "clip_inp_buf");
recording.dispatch(
shaders.draw_leaf,
(drawobj_wgs, 1, 1),
@ -390,8 +368,7 @@ impl Render {
],
);
recording.free_resource(draw_reduced_buf);
let clip_el_buf =
ResourceProxy::new_buf(encoding.n_clips as u64 * CLIP_EL_SIZE, "clip_el_buf");
let clip_el_buf = ResourceProxy::new_buf(n_clip as u64 * CLIP_EL_SIZE, "clip_el_buf");
let clip_bic_buf = ResourceProxy::new_buf(
(n_clip / shaders::CLIP_REDUCE_WG) as u64 * CLIP_BIC_SIZE,
"clip_bic_buf",

View file

@ -15,9 +15,9 @@
// Also licensed under MIT license, at your choice.
use peniko::kurbo::{Affine, Rect, Shape};
use peniko::{BlendMode, BrushRef, Fill, Stroke};
use peniko::{BlendMode, BrushRef, Color, Fill, Font, Stroke, StyleRef};
use crate::encoding::{Encoding, Transform};
use crate::encoding::{Encoding, Glyph, GlyphRun, Patch, Transform};
/// Encoded definition of a scene and associated resources.
#[derive(Default)]
@ -67,7 +67,6 @@ impl SceneFragment {
/// Builder for constructing a scene or scene fragment.
pub struct SceneBuilder<'a> {
scene: &'a mut Encoding,
layer_depth: u32,
}
impl<'a> SceneBuilder<'a> {
@ -86,10 +85,7 @@ impl<'a> SceneBuilder<'a> {
/// Creates a new builder for constructing a scene.
fn new(scene: &'a mut Encoding, is_fragment: bool) -> Self {
scene.reset(is_fragment);
Self {
scene,
layer_depth: 0,
}
Self { scene }
}
/// Pushes a new layer bound by the specifed shape and composed with
@ -112,15 +108,11 @@ impl<'a> SceneBuilder<'a> {
.encode_shape(&Rect::new(0.0, 0.0, 0.0, 0.0), true);
}
self.scene.encode_begin_clip(blend, alpha.clamp(0.0, 1.0));
self.layer_depth += 1;
}
/// Pops the current layer.
pub fn pop_layer(&mut self) {
if self.layer_depth > 0 {
self.scene.encode_end_clip();
self.layer_depth -= 1;
}
self.scene.encode_end_clip();
}
/// Fills a shape using the specified style and brush.
@ -176,6 +168,11 @@ impl<'a> SceneBuilder<'a> {
}
}
/// Returns a builder for encoding a glyph run.
pub fn draw_glyphs(&mut self, font: &Font) -> DrawGlyphs {
DrawGlyphs::new(self.scene, font)
}
/// Appends a fragment to the scene.
pub fn append(&mut self, fragment: &SceneFragment, transform: Option<Affine>) {
self.scene.append(
@ -183,11 +180,119 @@ impl<'a> SceneBuilder<'a> {
&transform.map(|xform| Transform::from_kurbo(&xform)),
);
}
}
/// Completes construction and finalizes the underlying scene.
pub fn finish(self) {
for _ in 0..self.layer_depth {
self.scene.encode_end_clip();
/// Builder for encoding a glyph run.
pub struct DrawGlyphs<'a> {
encoding: &'a mut Encoding,
run: GlyphRun,
brush: BrushRef<'a>,
brush_alpha: f32,
}
impl<'a> DrawGlyphs<'a> {
/// Creates a new builder for encoding a glyph run for the specified
/// encoding with the given font.
pub fn new(encoding: &'a mut Encoding, font: &Font) -> Self {
let coords_start = encoding.normalized_coords.len();
let glyphs_start = encoding.glyphs.len();
let stream_offsets = encoding.stream_offsets();
Self {
encoding,
run: GlyphRun {
font: font.clone(),
transform: Transform::IDENTITY,
glyph_transform: None,
font_size: 16.0,
hint: false,
normalized_coords: coords_start..coords_start,
style: Fill::NonZero.into(),
glyphs: glyphs_start..glyphs_start,
stream_offsets,
},
brush: Color::BLACK.into(),
brush_alpha: 1.0,
}
}
/// Sets the global transform. This is applied to all glyphs after the offset
/// translation.
///
/// The default value is the identity matrix.
pub fn transform(mut self, transform: Affine) -> Self {
self.run.transform = Transform::from_kurbo(&transform);
self
}
/// Sets the per-glyph transform. This is applied to all glyphs prior to
/// offset translation. This is common used for applying a shear to simulate
/// an oblique font.
///
/// The default value is `None`.
pub fn glyph_transform(mut self, transform: Option<Affine>) -> Self {
self.run.glyph_transform = transform.map(|xform| Transform::from_kurbo(&xform));
self
}
/// Sets the font size in pixels per em units.
///
/// The default value is 16.0.
pub fn font_size(mut self, size: f32) -> Self {
self.run.font_size = size;
self
}
/// Sets whether to enable hinting.
///
/// The default value is `false`.
pub fn hint(mut self, hint: bool) -> Self {
self.run.hint = hint;
self
}
/// Sets the normalized design space coordinates for a variable font instance.
pub fn normalized_coords(mut self, coords: &[i16]) -> Self {
self.encoding
.normalized_coords
.truncate(self.run.normalized_coords.start);
self.encoding.normalized_coords.extend_from_slice(coords);
self.run.normalized_coords.end = self.encoding.normalized_coords.len();
self
}
/// Sets the brush.
///
/// The default value is solid black.
pub fn brush(mut self, brush: impl Into<BrushRef<'a>>) -> Self {
self.brush = brush.into();
self
}
/// Sets an additional alpha multiplier for the brush.
///
/// The default value is 1.0.
pub fn brush_alpha(mut self, alpha: f32) -> Self {
self.brush_alpha = alpha;
self
}
/// Encodes a fill or stroke for for the given sequence of glyphs and consumes
/// the builder.
///
/// The `style` parameter accepts either `Fill` or `&Stroke` types.
pub fn draw(mut self, style: impl Into<StyleRef<'a>>, glyphs: impl Iterator<Item = Glyph>) {
self.run.style = style.into().to_owned();
self.encoding.glyphs.extend(glyphs);
self.run.glyphs.end = self.encoding.glyphs.len();
if self.run.glyphs.is_empty() {
self.encoding
.normalized_coords
.truncate(self.run.normalized_coords.start);
return;
}
let index = self.encoding.glyph_runs.len();
self.encoding.glyph_runs.push(self.run);
self.encoding.patches.push(Patch::GlyphRun { index });
self.encoding.encode_brush(self.brush, self.brush_alpha);
}
}