Skip to content

Commit

Permalink
Update cosmic-text and glyphon
Browse files Browse the repository at this point in the history
  • Loading branch information
hecrj committed Jul 7, 2023
1 parent 7f805bc commit 8db0df4
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 61 deletions.
5 changes: 1 addition & 4 deletions tiny_skia/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ geometry = ["iced_graphics/geometry"]
raw-window-handle = "0.5"
softbuffer = "0.2"
tiny-skia = "0.9"
cosmic-text = "0.9"
bytemuck = "1"
rustc-hash = "1.1"
kurbo = "0.9"
Expand All @@ -21,10 +22,6 @@ log = "0.4"
version = "0.8"
path = "../graphics"

[dependencies.cosmic-text]
git = "https://github.com/hecrj/cosmic-text.git"
rev = "c3cd24dc972bb8fd55d016c81ac9fa637e0a4ada"

[dependencies.twox-hash]
version = "1.6"
default-features = false
Expand Down
2 changes: 1 addition & 1 deletion wgpu/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ path = "../graphics"
[dependencies.glyphon]
version = "0.2"
git = "https://github.com/hecrj/glyphon.git"
rev = "8324f20158a62f8520bad4ed09f6aa5552f8f2a6"
rev = "886f47c0a9905af340b07a488c953ac00c4bf370"

[dependencies.glam]
version = "0.24"
Expand Down
53 changes: 13 additions & 40 deletions wgpu/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,18 +94,11 @@ impl Backend {
queue,
encoder,
scale_factor,
target_size,
transformation,
&layers,
);

while !self.prepare_text(
device,
queue,
scale_factor,
target_size,
&layers,
) {}

self.render(
device,
encoder,
Expand All @@ -124,44 +117,13 @@ impl Backend {
self.image_pipeline.end_frame();
}

fn prepare_text(
&mut self,
device: &wgpu::Device,
queue: &wgpu::Queue,
scale_factor: f32,
target_size: Size<u32>,
layers: &[Layer<'_>],
) -> bool {
for layer in layers {
let bounds = (layer.bounds * scale_factor).snap();

if bounds.width < 1 || bounds.height < 1 {
continue;
}

if !layer.text.is_empty()
&& !self.text_pipeline.prepare(
device,
queue,
&layer.text,
layer.bounds,
scale_factor,
target_size,
)
{
return false;
}
}

true
}

fn prepare(
&mut self,
device: &wgpu::Device,
queue: &wgpu::Queue,
_encoder: &mut wgpu::CommandEncoder,
scale_factor: f32,
target_size: Size<u32>,
transformation: Transformation,
layers: &[Layer<'_>],
) {
Expand Down Expand Up @@ -210,6 +172,17 @@ impl Backend {
);
}
}

if !layer.text.is_empty() {
self.text_pipeline.prepare(
device,
queue,
&layer.text,
layer.bounds,
scale_factor,
target_size,
);
}
}
}

Expand Down
22 changes: 6 additions & 16 deletions wgpu/src/text.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ impl Pipeline {
.into_iter(),
)),
renderers: Vec::new(),
atlas: glyphon::TextAtlas::new(
atlas: glyphon::TextAtlas::with_color_mode(
device,
queue,
format,
Expand Down Expand Up @@ -66,7 +66,7 @@ impl Pipeline {
bounds: Rectangle,
scale_factor: f32,
target_size: Size<u32>,
) -> bool {
) {
if self.renderers.len() <= self.prepare_layer {
self.renderers.push(glyphon::TextRenderer::new(
&mut self.atlas,
Expand Down Expand Up @@ -188,21 +188,11 @@ impl Pipeline {
match result {
Ok(()) => {
self.prepare_layer += 1;

true
}
Err(glyphon::PrepareError::AtlasFull(content_type)) => {
self.prepare_layer = 0;

#[allow(clippy::needless_bool)]
if self.atlas.grow(device, content_type) {
false
} else {
// If the atlas cannot grow, then all bets are off.
// Instead of panicking, we will just pray that the result
// will be somewhat readable...
true
}
Err(glyphon::PrepareError::AtlasFull) => {
// If the atlas cannot grow, then all bets are off.
// Instead of panicking, we will just pray that the result
// will be somewhat readable...
}
}
}
Expand Down

0 comments on commit 8db0df4

Please sign in to comment.