Merge pull request #293 from linebender/images

Let's add images
This commit is contained in:
Chad Brokaw 2023-03-15 08:37:36 -04:00 committed by GitHub
commit f3d45fc01d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 515 additions and 41 deletions

View file

@ -46,6 +46,7 @@ bytemuck = { version = "1.12.1", features = ["derive"] }
smallvec = "1.8.0"
moscato = { git = "https://github.com/dfrg/pinot", rev = "59db153" }
peniko = { git = "https://github.com/linebender/peniko", rev = "cafdac9a211a0fb2fec5656bd663d1ac770bcc81" }
guillotiere = "0.6.2"
[workspace.dependencies]
wgpu = "0.15"

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

View file

@ -6,7 +6,7 @@ use std::{
use anyhow::{anyhow, bail, Context, Result};
use clap::{CommandFactory, Parser};
use scenes::{SceneParams, SceneSet, SimpleText};
use scenes::{ImageCache, SceneParams, SceneSet, SimpleText};
use vello::{
block_on_wgpu,
kurbo::{Affine, Vec2},
@ -97,9 +97,11 @@ async fn render(mut scenes: SceneSet, index: usize, args: &Args) -> Result<()> {
let mut builder = SceneBuilder::for_fragment(&mut fragment);
let example_scene = &mut scenes.scenes[index];
let mut text = SimpleText::new();
let mut images = ImageCache::new();
let mut scene_params = SceneParams {
time: args.time.unwrap_or(0.),
text: &mut text,
images: &mut images,
resolution: None,
base_color: None,
};

View file

@ -15,6 +15,7 @@ vello = { path = "../../" }
vello_svg = { path = "../../integrations/vello_svg" }
anyhow = { workspace = true }
clap = { workspace = true, features = ["derive"] }
image = "0.24.5"
# Used for the `download` command
byte-unit = "4.0"

View file

@ -0,0 +1,51 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use vello::peniko::{Blob, Format, Image};
/// Simple hack to support loading images for examples.
#[derive(Default)]
pub struct ImageCache {
files: HashMap<PathBuf, Image>,
bytes: HashMap<usize, Image>,
}
impl ImageCache {
pub fn new() -> Self {
Self::default()
}
pub fn from_file(&mut self, path: impl AsRef<Path>) -> anyhow::Result<Image> {
let path = path.as_ref();
if let Some(image) = self.files.get(path) {
Ok(image.clone())
} else {
let data = std::fs::read(path)?;
let image = decode_image(&data)?;
self.files.insert(path.to_owned(), image.clone());
Ok(image)
}
}
pub fn from_bytes(&mut self, key: usize, bytes: &[u8]) -> anyhow::Result<Image> {
if let Some(image) = self.bytes.get(&key) {
Ok(image.clone())
} else {
let image = decode_image(bytes)?;
self.bytes.insert(key, image.clone());
Ok(image)
}
}
}
fn decode_image(data: &[u8]) -> anyhow::Result<Image> {
let image = image::io::Reader::new(std::io::Cursor::new(data))
.with_guessed_format()?
.decode()?;
let width = image.width();
let height = image.height();
let data = Arc::new(image.into_rgba8().into_vec());
let blob = Blob::new(data);
Ok(Image::new(blob, Format::Rgba8, width, height))
}

View file

@ -1,4 +1,5 @@
pub mod download;
mod images;
mod simple_text;
mod svg;
mod test_scenes;
@ -7,6 +8,7 @@ use std::path::PathBuf;
use anyhow::{anyhow, Result};
use clap::{Args, Subcommand};
use download::Download;
pub use images::ImageCache;
pub use simple_text::SimpleText;
pub use svg::{default_scene, scene_from_files};
pub use test_scenes::test_scenes;
@ -16,6 +18,7 @@ use vello::{kurbo::Vec2, peniko::Color, SceneBuilder};
pub struct SceneParams<'a> {
pub time: f64,
pub text: &'a mut SimpleText,
pub images: &'a mut ImageCache,
pub resolution: Option<Vec2>,
pub base_color: Option<vello::peniko::Color>,
}

View file

@ -3,6 +3,8 @@ use vello::kurbo::{Affine, BezPath, Ellipse, PathEl, Point, Rect};
use vello::peniko::*;
use vello::*;
const PIET_LOGO_IMAGE: &[u8] = include_bytes!("../../assets/piet-logo.png");
macro_rules! scene {
($name: ident) => {
scene!($name: false)
@ -97,6 +99,13 @@ fn cardioid_and_friends(sb: &mut SceneBuilder, _: &mut SceneParams) {
}
fn animated_text(sb: &mut SceneBuilder, params: &mut SceneParams) {
// Uses the static array address as a cache key for expedience. Real code
// should use a better strategy.
let piet_logo = params
.images
.from_bytes(PIET_LOGO_IMAGE.as_ptr() as usize, PIET_LOGO_IMAGE)
.unwrap();
use PathEl::*;
let rect = Rect::from_origin_size(Point::new(0.0, 0.0), (1000.0, 1000.0));
let star = [
@ -184,6 +193,10 @@ fn animated_text(sb: &mut SceneBuilder, params: &mut SceneParams) {
None,
&star,
);
sb.draw_image(
&piet_logo,
Affine::translate((550.0, 250.0)) * Affine::skew(-20f64.to_radians().tan(), 0.0),
);
}
fn brush_transform(sb: &mut SceneBuilder, params: &mut SceneParams) {

View file

@ -19,7 +19,7 @@ use std::time::Instant;
use anyhow::Result;
use clap::{CommandFactory, Parser};
use scenes::{SceneParams, SceneSet, SimpleText};
use scenes::{ImageCache, SceneParams, SceneSet, SimpleText};
use vello::peniko::Color;
use vello::util::RenderSurface;
use vello::{
@ -96,6 +96,7 @@ fn run(
let mut scene = Scene::new();
let mut fragment = SceneFragment::new();
let mut simple_text = SimpleText::new();
let mut images = ImageCache::new();
let start = Instant::now();
let mut touch_state = multi_touch::TouchState::new();
@ -264,6 +265,7 @@ fn run(
let mut scene_params = SceneParams {
time: start.elapsed().as_secs_f64(),
text: &mut simple_text,
images: &mut images,
resolution: None,
base_color: None,
};

View file

@ -126,6 +126,13 @@ fn write_grad(ty: u32, index: u32, info_offset: u32) {
cmd_offset += 3u;
}
fn write_image(info_offset: u32) {
alloc_cmd(2u);
ptcl[cmd_offset] = CMD_IMAGE;
ptcl[cmd_offset + 1u] = info_offset;
cmd_offset += 2u;
}
fn write_begin_clip() {
alloc_cmd(1u);
ptcl[cmd_offset] = CMD_BEGIN_CLIP;
@ -377,6 +384,13 @@ fn main(
write_grad(CMD_RAD_GRAD, index, info_offset);
}
}
// DRAWTAG_FILL_IMAGE
case 0x248u: {
let linewidth = bitcast<f32>(info_bin_data[di]);
if write_path(tile, linewidth) {
write_image(di + 1u);
}
}
// DRAWTAG_BEGIN_CLIP
case 0x9u: {
if tile.segments == 0u && tile.backdrop == 0 {

View file

@ -113,7 +113,9 @@ fn main(
var matrx: vec4<f32>;
var translate: vec2<f32>;
var linewidth = bbox.linewidth;
if linewidth >= 0.0 || tag_word == DRAWTAG_FILL_LIN_GRADIENT || tag_word == DRAWTAG_FILL_RAD_GRADIENT {
if linewidth >= 0.0 || tag_word == DRAWTAG_FILL_LIN_GRADIENT || tag_word == DRAWTAG_FILL_RAD_GRADIENT ||
tag_word == DRAWTAG_FILL_IMAGE
{
let transform = read_transform(config.transform_base, bbox.trans_ix);
matrx = transform.matrx;
translate = transform.translate;
@ -123,8 +125,8 @@ fn main(
linewidth *= sqrt(abs(matrx.x * matrx.w - matrx.y * matrx.z));
}
switch tag_word {
// DRAWTAG_FILL_COLOR, DRAWTAG_FILL_IMAGE
case 0x44u, 0x48u: {
// DRAWTAG_FILL_COLOR
case 0x44u: {
info[di] = bitcast<u32>(linewidth);
}
// DRAWTAG_FILL_LIN_GRADIENT
@ -169,6 +171,21 @@ fn main(
info[di + 9u] = bitcast<u32>(ra);
info[di + 10u] = bitcast<u32>(roff);
}
// DRAWTAG_FILL_IMAGE
case 0x248u: {
info[di] = bitcast<u32>(linewidth);
let inv_det = 1.0 / (matrx.x * matrx.w - matrx.y * matrx.z);
let inv_mat = inv_det * vec4(matrx.w, -matrx.y, -matrx.z, matrx.x);
let inv_tr = mat2x2(inv_mat.xy, inv_mat.zw) * -translate;
info[di + 1u] = bitcast<u32>(inv_mat.x);
info[di + 2u] = bitcast<u32>(inv_mat.y);
info[di + 3u] = bitcast<u32>(inv_mat.z);
info[di + 4u] = bitcast<u32>(inv_mat.w);
info[di + 5u] = bitcast<u32>(inv_tr.x);
info[di + 6u] = bitcast<u32>(inv_tr.y);
info[di + 7u] = scene[dd];
info[di + 8u] = scene[dd + 1u];
}
default: {}
}
}

View file

@ -40,6 +40,9 @@ var gradients: texture_2d<f32>;
@group(0) @binding(6)
var<storage> info: array<u32>;
@group(0) @binding(7)
var image_atlas: texture_2d<f32>;
fn read_fill(cmd_ix: u32) -> CmdFill {
let tile = ptcl[cmd_ix + 1u];
let backdrop = i32(ptcl[cmd_ix + 2u]);
@ -81,6 +84,24 @@ fn read_rad_grad(cmd_ix: u32) -> CmdRadGrad {
return CmdRadGrad(index, matrx, xlat, c1, ra, roff);
}
fn read_image(cmd_ix: u32) -> CmdImage {
let info_offset = ptcl[cmd_ix + 1u];
let m0 = bitcast<f32>(info[info_offset]);
let m1 = bitcast<f32>(info[info_offset + 1u]);
let m2 = bitcast<f32>(info[info_offset + 2u]);
let m3 = bitcast<f32>(info[info_offset + 3u]);
let matrx = vec4(m0, m1, m2, m3);
let xlat = vec2(bitcast<f32>(info[info_offset + 4u]), bitcast<f32>(info[info_offset + 5u]));
let xy = info[info_offset + 6u];
let width_height = info[info_offset + 7u];
// The following are not intended to be bitcasts
let x = f32(xy >> 16u);
let y = f32(xy & 0xffffu);
let width = f32(width_height >> 16u);
let height = f32(width_height & 0xffffu);
return CmdImage(matrx, xlat, vec2(x, y), vec2(width, height));
}
fn read_end_clip(cmd_ix: u32) -> CmdEndClip {
let blend = ptcl[cmd_ix + 1u];
let alpha = bitcast<f32>(ptcl[cmd_ix + 2u]);
@ -265,6 +286,28 @@ fn main(
}
cmd_ix += 3u;
}
// CMD_IMAGE
case 8u: {
let image = read_image(cmd_ix);
let atlas_extents = image.atlas_offset + image.extents;
for (var i = 0u; i < PIXELS_PER_THREAD; i += 1u) {
let my_xy = vec2(xy.x + f32(i), xy.y);
let atlas_uv = image.matrx.xy * my_xy.x + image.matrx.zw * my_xy.y + image.xlat + image.atlas_offset;
// This currently clips to the image bounds. TODO: extend modes
if all(atlas_uv < atlas_extents) && area[i] != 0.0 {
let uv_quad = vec4(max(floor(atlas_uv), image.atlas_offset), min(ceil(atlas_uv), atlas_extents));
let uv_frac = fract(atlas_uv);
let a = premul_alpha(textureLoad(image_atlas, vec2<i32>(uv_quad.xy), 0));
let b = premul_alpha(textureLoad(image_atlas, vec2<i32>(uv_quad.xw), 0));
let c = premul_alpha(textureLoad(image_atlas, vec2<i32>(uv_quad.zy), 0));
let d = premul_alpha(textureLoad(image_atlas, vec2<i32>(uv_quad.zw), 0));
let fg_rgba = mix(mix(a, b, uv_frac.y), mix(c, d, uv_frac.y), uv_frac.x);
let fg_i = fg_rgba * area[i];
rgba[i] = rgba[i] * (1.0 - fg_i.a) + fg_i;
}
}
cmd_ix += 2u;
}
// CMD_BEGIN_CLIP
case 9u: {
if clip_depth < BLEND_STACK_SPLIT {
@ -326,3 +369,7 @@ fn main(
}
#endif
}
fn premul_alpha(rgba: vec4<f32>) -> vec4<f32> {
return vec4(rgba.rgb * rgba.a, rgba.a);
}

View file

@ -19,7 +19,7 @@ let DRAWTAG_NOP = 0u;
let DRAWTAG_FILL_COLOR = 0x44u;
let DRAWTAG_FILL_LIN_GRADIENT = 0x114u;
let DRAWTAG_FILL_RAD_GRADIENT = 0x2dcu;
let DRAWTAG_FILL_IMAGE = 0x48u;
let DRAWTAG_FILL_IMAGE = 0x248u;
let DRAWTAG_BEGIN_CLIP = 0x9u;
let DRAWTAG_END_CLIP = 0x21u;

View file

@ -16,6 +16,7 @@ let CMD_SOLID = 3u;
let CMD_COLOR = 5u;
let CMD_LIN_GRAD = 6u;
let CMD_RAD_GRAD = 7u;
let CMD_IMAGE = 8u;
let CMD_BEGIN_CLIP = 9u;
let CMD_END_CLIP = 10u;
let CMD_JUMP = 11u;
@ -57,6 +58,13 @@ struct CmdRadGrad {
roff: f32,
}
struct CmdImage {
matrx: vec4<f32>,
xlat: vec2<f32>,
atlas_offset: vec2<f32>,
extents: vec2<f32>,
}
struct CmdEndClip {
blend: u32,
alpha: f32,

View file

@ -20,6 +20,7 @@ mod draw;
mod encoding;
mod glyph;
mod glyph_cache;
mod image_cache;
mod math;
mod monoid;
mod path;

View file

@ -38,7 +38,7 @@ impl DrawTag {
pub const RADIAL_GRADIENT: Self = Self(0x2dc);
/// Image fill.
pub const IMAGE: Self = Self(0x48);
pub const IMAGE: Self = Self(0x248);
/// Begin layer/clip.
pub const BEGIN_CLIP: Self = Self(0x9);
@ -104,10 +104,10 @@ pub struct DrawRadialGradient {
#[derive(Clone, Copy, Debug, Default, Zeroable, Pod)]
#[repr(C)]
pub struct DrawImage {
/// Image index.
pub index: u32,
/// Packed image offset.
pub offset: u32,
/// Packed atlas coordinates.
pub xy: u32,
/// Packed image dimensions.
pub width_height: u32,
}
/// Draw data for a clip or layer.

View file

@ -14,12 +14,14 @@
//
// Also licensed under MIT license, at your choice.
use crate::encoding::DrawImage;
use super::{
resolve::Patch, DrawColor, DrawLinearGradient, DrawRadialGradient, DrawTag, Glyph, GlyphRun,
PathEncoder, PathTag, Transform,
};
use peniko::{kurbo::Shape, BlendMode, BrushRef, ColorStop, Extend, GradientKind};
use peniko::{kurbo::Shape, BlendMode, BrushRef, ColorStop, Extend, GradientKind, Image};
/// Encoded data streams for a scene.
#[derive(Clone, Default)]
@ -122,16 +124,26 @@ impl Encoding {
self.n_open_clips += other.n_open_clips;
self.patches
.extend(other.patches.iter().map(|patch| match patch {
Patch::Ramp { offset, stops } => {
Patch::Ramp {
draw_data_offset: offset,
stops,
} => {
let stops = stops.start + stops_base..stops.end + stops_base;
Patch::Ramp {
offset: offset + offsets.draw_data,
draw_data_offset: offset + offsets.draw_data,
stops,
}
}
Patch::GlyphRun { index } => Patch::GlyphRun {
index: index + glyph_runs_base,
},
Patch::Image {
image,
draw_data_offset,
} => Patch::Image {
image: image.clone(),
draw_data_offset: *draw_data_offset + offsets.draw_data,
},
}));
self.color_stops.extend_from_slice(&other.color_stops);
if let Some(transform) = *transform {
@ -250,8 +262,8 @@ impl Encoding {
todo!("sweep gradients aren't supported yet!")
}
},
BrushRef::Image(_) => {
todo!("images aren't supported yet!")
BrushRef::Image(image) => {
self.encode_image(image, alpha);
}
}
}
@ -290,6 +302,22 @@ impl Encoding {
.extend_from_slice(bytemuck::bytes_of(&gradient));
}
/// Encodes an image brush.
pub fn encode_image(&mut self, image: &Image, _alpha: f32) {
// TODO: feed the alpha multiplier through the full pipeline for consistency
// with other brushes?
self.patches.push(Patch::Image {
image: image.clone(),
draw_data_offset: self.draw_data.len(),
});
self.draw_tags.push(DrawTag::IMAGE);
self.draw_data
.extend_from_slice(bytemuck::bytes_of(&DrawImage {
xy: 0,
width_height: (image.width << 16) | (image.height & 0xFFFF),
}));
}
/// Encodes a begin clip command.
pub fn encode_begin_clip(&mut self, blend_mode: BlendMode, alpha: f32) {
use super::DrawBeginClip;
@ -329,7 +357,7 @@ impl Encoding {
self.color_stops.extend(color_stops);
}
self.patches.push(Patch::Ramp {
offset,
draw_data_offset: offset,
stops: stops_start..self.color_stops.len(),
});
}

View file

@ -0,0 +1,92 @@
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Also licensed under MIT license, at your choice.
use guillotiere::{size2, AtlasAllocator};
use peniko::Image;
use std::collections::{hash_map::Entry, HashMap};
const DEFAULT_ATLAS_SIZE: i32 = 1024;
const MAX_ATLAS_SIZE: i32 = 8192;
pub struct Images<'a> {
pub width: u32,
pub height: u32,
pub images: &'a [(Image, u32, u32)],
}
pub struct ImageCache {
atlas: AtlasAllocator,
/// Map from image blob id to atlas location.
map: HashMap<u64, (u32, u32)>,
/// List of all allocated images with associated atlas location.
images: Vec<(Image, u32, u32)>,
}
impl Default for ImageCache {
fn default() -> Self {
Self::new()
}
}
impl ImageCache {
pub fn new() -> Self {
Self {
atlas: AtlasAllocator::new(size2(DEFAULT_ATLAS_SIZE, DEFAULT_ATLAS_SIZE)),
map: Default::default(),
images: Default::default(),
}
}
pub fn images(&self) -> Images {
Images {
width: self.atlas.size().width as u32,
height: self.atlas.size().height as u32,
images: &self.images,
}
}
pub fn bump_size(&mut self) -> bool {
let new_size = self.atlas.size().width * 2;
if new_size > MAX_ATLAS_SIZE {
return false;
}
self.atlas = AtlasAllocator::new(size2(new_size, new_size));
self.map.clear();
self.images.clear();
true
}
pub fn clear(&mut self) {
self.atlas.clear();
self.map.clear();
self.images.clear();
}
pub fn get_or_insert(&mut self, image: &Image) -> Option<(u32, u32)> {
match self.map.entry(image.data.id()) {
Entry::Occupied(occupied) => Some(*occupied.get()),
Entry::Vacant(vacant) => {
let alloc = self
.atlas
.allocate(size2(image.width as _, image.height as _))?;
let x = alloc.rectangle.min.x as u32;
let y = alloc.rectangle.min.y as u32;
self.images.push((image.clone(), x, y));
Some(*vacant.insert((x, y)))
}
}
}
}

View file

@ -18,9 +18,11 @@ use std::ops::Range;
use bytemuck::{Pod, Zeroable};
use moscato::pinot::FontRef;
use peniko::Image;
use super::{
glyph_cache::{CachedRange, GlyphCache, GlyphKey},
image_cache::{ImageCache, Images},
ramp_cache::{RampCache, Ramps},
DrawTag, Encoding, PathTag, StreamOffsets, Transform,
};
@ -144,6 +146,8 @@ pub struct Resolver {
glyph_ranges: Vec<CachedRange>,
glyph_cx: GlyphContext,
ramp_cache: RampCache,
image_cache: ImageCache,
pending_images: Vec<PendingImage>,
patches: Vec<ResolvedPatch>,
}
@ -159,8 +163,9 @@ impl Resolver {
&'a mut self,
encoding: &Encoding,
packed: &mut Vec<u8>,
) -> (Layout, Ramps<'a>) {
) -> (Layout, Ramps<'a>, Images<'a>) {
let sizes = self.resolve_patches(encoding);
self.resolve_pending_images();
let data = packed;
data.clear();
let mut layout = Layout::default();
@ -261,6 +266,26 @@ impl Resolver {
pos = *draw_data_offset + 4;
}
ResolvedPatch::GlyphRun { .. } => {}
ResolvedPatch::Image {
index,
draw_data_offset,
} => {
if pos < *draw_data_offset {
data.extend_from_slice(&encoding.draw_data[pos..*draw_data_offset]);
}
if let Some((x, y)) = self.pending_images[*index].xy {
let xy = (x << 16) | y;
data.extend_from_slice(bytemuck::bytes_of(&xy));
pos = *draw_data_offset + 4;
} else {
// If we get here, we failed to allocate a slot for this image in the atlas.
// In this case, let's zero out the dimensions so we don't attempt to render
// anything.
// TODO: a better strategy: texture array? downsample large images?
data.extend_from_slice(&[0u8; 8]);
pos = *draw_data_offset + 8;
}
}
}
}
if pos < stream.len() {
@ -336,21 +361,26 @@ impl Resolver {
}
layout.n_draw_objects = layout.n_paths;
assert_eq!(capacity, data.len());
(layout, self.ramp_cache.ramps())
(layout, self.ramp_cache.ramps(), self.image_cache.images())
}
fn resolve_patches(&mut self, encoding: &Encoding) -> StreamOffsets {
self.ramp_cache.advance();
self.glyph_cache.clear();
self.glyph_ranges.clear();
self.image_cache.clear();
self.pending_images.clear();
self.patches.clear();
let mut sizes = StreamOffsets::default();
for patch in &encoding.patches {
match patch {
Patch::Ramp { offset, stops } => {
Patch::Ramp {
draw_data_offset,
stops,
} => {
let ramp_id = self.ramp_cache.add(&encoding.color_stops[stops.clone()]);
self.patches.push(ResolvedPatch::Ramp {
draw_data_offset: *offset + sizes.draw_data,
draw_data_offset: *draw_data_offset + sizes.draw_data,
ramp_id,
});
}
@ -414,10 +444,50 @@ impl Resolver {
transform,
});
}
Patch::Image {
draw_data_offset,
image,
} => {
let index = self.pending_images.len();
self.pending_images.push(PendingImage {
image: image.clone(),
xy: None,
});
self.patches.push(ResolvedPatch::Image {
index,
draw_data_offset: *draw_data_offset + sizes.draw_data,
});
}
}
}
sizes
}
fn resolve_pending_images(&mut self) {
self.image_cache.clear();
'outer: loop {
// Loop over the images, attempting to allocate them all into the atlas.
for pending_image in &mut self.pending_images {
if let Some(xy) = self.image_cache.get_or_insert(&pending_image.image) {
pending_image.xy = Some(xy);
} else {
// We failed to allocate. Try to bump the atlas size.
if self.image_cache.bump_size() {
// We were able to increase the atlas size. Restart the outer loop.
continue 'outer;
} else {
// If the atlas is already maximum size, there's nothing we can do. Set
// the xy field to None so this image isn't rendered and then carry on--
// other images might still fit.
pending_image.xy = None;
}
}
}
// If we made it here, we've either successfully allocated all images or we reached
// the maximum atlas size.
break;
}
}
}
#[derive(Clone)]
@ -426,7 +496,7 @@ pub enum Patch {
/// Gradient ramp resource.
Ramp {
/// Byte offset to the ramp id in the draw data stream.
offset: usize,
draw_data_offset: usize,
/// Range of the gradient stops in the resource set.
stops: Range<usize>,
},
@ -435,6 +505,20 @@ pub enum Patch {
/// Index in the glyph run buffer.
index: usize,
},
/// Image resource.
Image {
/// Offset to the atlas coordinates in the draw data stream.
draw_data_offset: usize,
/// Underlying image data.
image: Image,
},
}
/// Image to be allocated in the atlas.
#[derive(Clone, Debug)]
struct PendingImage {
image: Image,
xy: Option<(u32, u32)>,
}
#[derive(Clone, Debug)]
@ -453,6 +537,12 @@ enum ResolvedPatch {
/// Global transform.
transform: Transform,
},
Image {
/// Index of pending image element.
index: usize,
/// Offset to the atlas location in the draw data stream.
draw_data_offset: usize,
},
}
fn slice_size_in_bytes<T: Sized>(slice: &[T], extra: usize) -> usize {

View file

@ -22,8 +22,8 @@ use std::{
};
use wgpu::{
util::DeviceExt, BindGroup, BindGroupLayout, Buffer, BufferUsages, ComputePipeline, Device,
Queue, Texture, TextureAspect, TextureFormat, TextureUsages, TextureView, TextureViewDimension,
BindGroup, BindGroupLayout, Buffer, BufferUsages, ComputePipeline, Device, Queue, Texture,
TextureAspect, TextureUsages, TextureView, TextureViewDimension,
};
pub type Error = Box<dyn std::error::Error>;
@ -89,6 +89,7 @@ pub enum Command {
Upload(BufProxy, Vec<u8>),
UploadUniform(BufProxy, Vec<u8>),
UploadImage(ImageProxy, Vec<u8>),
WriteImage(ImageProxy, [u32; 4], Vec<u8>),
// Discussion question: third argument is vec of resources?
// Maybe use tricks to make more ergonomic?
// Alternative: provide bufs & images as separate sequences
@ -275,11 +276,6 @@ impl Engine {
self.bind_map.insert_buf(buf_proxy, buf);
}
Command::UploadImage(image_proxy, bytes) => {
let buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: bytes,
usage: wgpu::BufferUsages::COPY_SRC,
});
let format = image_proxy.format.to_wgpu();
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: None,
@ -303,23 +299,23 @@ impl Engine {
base_mip_level: 0,
base_array_layer: 0,
array_layer_count: None,
format: Some(TextureFormat::Rgba8Unorm),
format: Some(format),
});
encoder.copy_buffer_to_texture(
wgpu::ImageCopyBuffer {
buffer: &buf,
layout: wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: NonZeroU32::new(image_proxy.width * 4),
rows_per_image: None,
},
},
queue.write_texture(
wgpu::ImageCopyTexture {
texture: &texture,
mip_level: 0,
origin: wgpu::Origin3d { x: 0, y: 0, z: 0 },
aspect: TextureAspect::All,
},
bytes,
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: NonZeroU32::new(
image_proxy.width * format.describe().block_size as u32,
),
rows_per_image: None,
},
wgpu::Extent3d {
width: image_proxy.width,
height: image_proxy.height,
@ -329,6 +325,32 @@ impl Engine {
self.bind_map
.insert_image(image_proxy.id, texture, texture_view)
}
Command::WriteImage(proxy, [x, y, width, height], data) => {
if let Ok((texture, _)) = self.bind_map.get_or_create_image(*proxy, device) {
let format = proxy.format.to_wgpu();
queue.write_texture(
wgpu::ImageCopyTexture {
texture: &texture,
mip_level: 0,
origin: wgpu::Origin3d { x: *x, y: *y, z: 0 },
aspect: TextureAspect::All,
},
&data[..],
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: NonZeroU32::new(
*width * format.describe().block_size as u32,
),
rows_per_image: None,
},
wgpu::Extent3d {
width: *width,
height: *height,
depth_or_array_layers: 1,
},
);
}
}
Command::Dispatch(shader_id, wg_size, bindings) => {
// println!("dispatching {:?} with {} bindings", wg_size, bindings.len());
let shader = &self.shaders[shader_id.0];
@ -444,6 +466,19 @@ impl Recording {
image_proxy
}
pub fn write_image(
&mut self,
image: ImageProxy,
x: u32,
y: u32,
width: u32,
height: u32,
data: impl Into<Vec<u8>>,
) {
let data = data.into();
self.push(Command::WriteImage(image, [x, y, width, height], data));
}
pub fn dispatch<R>(&mut self, shader: ShaderId, wg_size: (u32, u32, u32), resources: R)
where
R: IntoIterator,
@ -716,6 +751,44 @@ impl BindMap {
}
}
}
fn get_or_create_image(
&mut self,
proxy: ImageProxy,
device: &Device,
) -> Result<&(Texture, TextureView), Error> {
match self.image_map.entry(proxy.id) {
Entry::Occupied(occupied) => Ok(occupied.into_mut()),
Entry::Vacant(vacant) => {
let format = proxy.format.to_wgpu();
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: None,
size: wgpu::Extent3d {
width: proxy.width,
height: proxy.height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
usage: TextureUsages::TEXTURE_BINDING | TextureUsages::COPY_DST,
format,
view_formats: &[],
});
let texture_view = texture.create_view(&wgpu::TextureViewDescriptor {
label: None,
dimension: Some(TextureViewDimension::D2),
aspect: TextureAspect::All,
mip_level_count: None,
base_mip_level: 0,
base_array_layer: 0,
array_layer_count: None,
format: Some(proxy.format.to_wgpu()),
});
Ok(vacant.insert((texture, texture_view)))
}
}
}
}
const SIZE_CLASS_BITS: u32 = 1;

View file

@ -33,6 +33,7 @@ struct FineResources {
ptcl_buf: ResourceProxy,
gradient_image: ResourceProxy,
info_bin_data_buf: ResourceProxy,
image_atlas: ResourceProxy,
out_image: ImageProxy,
}
@ -216,7 +217,7 @@ impl Render {
let mut recording = Recording::default();
let mut resolver = Resolver::new();
let mut packed = vec![];
let (layout, ramps) = resolver.resolve(encoding, &mut packed);
let (layout, ramps, images) = resolver.resolve(encoding, &mut packed);
let gradient_image = if ramps.height == 0 {
ResourceProxy::new_image(1, 1, ImageFormat::Rgba8)
} else {
@ -228,6 +229,11 @@ impl Render {
data,
))
};
let image_atlas = if images.images.is_empty() {
ImageProxy::new(1, 1, ImageFormat::Rgba8)
} else {
ImageProxy::new(images.width, images.height, ImageFormat::Rgba8)
};
// TODO: calculate for real when we do rectangles
let n_pathtag = layout.path_tags(&packed).len();
let pathtag_padded = align_up(n_pathtag, 4 * shaders::PATHTAG_REDUCE_WG);
@ -251,6 +257,16 @@ impl Render {
ptcl_size: self.ptcl_size,
layout: layout,
};
for image in images.images {
recording.write_image(
image_atlas,
image.1,
image.2,
image.0.width,
image.0.height,
image.0.data.data(),
);
}
// println!("{:?}", config);
let scene_buf = ResourceProxy::Buf(recording.upload("scene", packed));
let config_buf =
@ -504,6 +520,7 @@ impl Render {
ptcl_buf,
gradient_image,
info_bin_data_buf,
image_atlas: ResourceProxy::Image(image_atlas),
out_image,
});
if robust {
@ -527,6 +544,7 @@ impl Render {
fine.ptcl_buf,
fine.gradient_image,
fine.info_bin_data_buf,
fine.image_atlas,
],
);
recording.free_resource(fine.config_buf);
@ -534,6 +552,7 @@ impl Render {
recording.free_resource(fine.segments_buf);
recording.free_resource(fine.ptcl_buf);
recording.free_resource(fine.gradient_image);
recording.free_resource(fine.image_atlas);
recording.free_resource(fine.info_bin_data_buf);
}

View file

@ -15,7 +15,7 @@
// Also licensed under MIT license, at your choice.
use peniko::kurbo::{Affine, Rect, Shape};
use peniko::{BlendMode, BrushRef, Color, Fill, Font, Stroke, StyleRef};
use peniko::{BlendMode, BrushRef, Color, Fill, Font, Image, Stroke, StyleRef};
use crate::encoding::{Encoding, Glyph, GlyphRun, Patch, Transform};
@ -168,6 +168,17 @@ impl<'a> SceneBuilder<'a> {
}
}
/// Draws an image at its natural size with the given transform.
pub fn draw_image(&mut self, image: &Image, transform: Affine) {
self.fill(
Fill::NonZero,
transform,
image,
None,
&Rect::new(0.0, 0.0, image.width as f64, image.height as f64),
);
}
/// Returns a builder for encoding a glyph run.
pub fn draw_glyphs(&mut self, font: &Font) -> DrawGlyphs {
DrawGlyphs::new(self.scene, font)

View file

@ -351,6 +351,7 @@ pub fn full_shaders(device: &Device, engine: &mut Engine) -> Result<FullShaders,
BindType::BufReadOnly,
BindType::ImageRead(ImageFormat::Rgba8),
BindType::BufReadOnly,
BindType::ImageRead(ImageFormat::Rgba8),
],
)?;
Ok(FullShaders {