This commit is contained in:
janis 2025-09-20 15:27:21 +02:00
parent cdb18da722
commit fd4e92c69e
33 changed files with 1163 additions and 218 deletions

View file

@ -1,3 +1,9 @@
[target.x86_64-unknown-linux-gnu] [target.x86_64-unknown-linux-gnu]
linker = "clang" linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=/usr/bin/mold"] rustflags = [
"-Clink-arg=-fuse-ld=mold",
# Nightly
"-Zshare-generics=y",
"-Zthreads=0",
]

View file

@ -1,7 +1,7 @@
[package] [package]
name = "game" name = "game"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2024"
[dependencies] [dependencies]
winit = { workspace = true } winit = { workspace = true }
@ -15,4 +15,4 @@ renderer = { path = "../renderer" }
egui = { workspace = true } egui = { workspace = true }
egui_winit_platform = { workspace = true } egui_winit_platform = { workspace = true }
egui_demo_lib = "0.30.0" egui_demo_lib = "0.32"

View file

@ -1,8 +1,7 @@
#![feature(result_flattening)]
use std::collections::BTreeMap; use std::collections::BTreeMap;
use rand::{Rng, SeedableRng}; use rand::{Rng, SeedableRng};
use renderer::{render_graph, swapchain::WindowSurface, Renderer2}; use renderer::{Renderer2, render_graph, swapchain::WindowSurface};
use tracing::info; use tracing::info;
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFilter;
use winit::{ use winit::{
@ -95,7 +94,7 @@ impl WinitState {
use renderer::device::DeviceOwned; use renderer::device::DeviceOwned;
let [r, g, b]: [f32; 3] = let [r, g, b]: [f32; 3] =
rand::prelude::StdRng::seed_from_u64(window.surface.surface.handle().as_raw()) rand::prelude::StdRng::seed_from_u64(window.surface.surface.handle().as_raw())
.gen(); .random();
render_graph::clear_pass(rg, renderer::util::Rgba([r, g, b, 1.0]), framebuffer); render_graph::clear_pass(rg, renderer::util::Rgba([r, g, b, 1.0]), framebuffer);
egui_pre_pass( egui_pre_pass(
&dev, &dev,

View file

@ -1,7 +1,7 @@
[package] [package]
name = "renderer" name = "renderer"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2024"
[dependencies] [dependencies]
tinyvec = { workspace = true } tinyvec = { workspace = true }

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.6 KiB

View file

@ -6,3 +6,7 @@ SLANGC="/opt/shader-slang-bin/bin/slangc"
$SLANGC egui.slang -profile glsl_450 -target spirv -o egui_vert.spv -entry vertex $SLANGC egui.slang -profile glsl_450 -target spirv -o egui_vert.spv -entry vertex
$SLANGC egui.slang -profile glsl_450 -target spirv -o egui_frag.spv -entry fragment $SLANGC egui.slang -profile glsl_450 -target spirv -o egui_frag.spv -entry fragment
$SLANGC egui.slang -profile glsl_450 -target spirv -entry vertex -entry fragment -o egui.spv
$SLANGC wireframe.slang -profile glsl_450 -target spirv -entry vertex -entry fragment -o wireframe.spv
$SLANGC font.slang -profile glsl_450 -target spirv -entry vertex -entry fragment -o font.spv
$SLANGC font.slang -profile glsl_450 -target spirv -entry mesh -entry task -entry fragment_barycentric -o font_mesh.spv

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,134 @@
struct VertexIn {
[[vk::layout(0)]] float2 pos;
}
struct VertexOut {
[[vk::layout(0)]] float4 color;
float4 position : SV_Position;
}
struct PushConstant {
float2 screen_size;
float2 position;
float size;
}
[[vk::push_constant]]
ConstantBuffer<PushConstant> push_constant;
[shader("vertex")]
VertexOut vertex(VertexIn vertex) {
VertexOut output;
output.position = float4(
2.0 * (push_constant.position.x + vertex.pos.x * push_constant.size) /
push_constant.screen_size.x - 1.0,
2.0 * (push_constant.position.y + vertex.pos.y * push_constant.size) /
push_constant.screen_size.y - 1.0,
0.0,
1.0,
);
output.color = float4(1.0, 1.0, 0.0, 1.0);
return output;
}
struct FragmentOut {
float4 color : SV_Target;
}
[shader("fragment")]
FragmentOut fragment(VertexOut input, float3 bary: SV_BARYCENTRICS) {
FragmentOut output;
output.color = input.color;
return output;
}
struct GlyphDesc {
uint vertex_offset;
uint index_offset;
uint vertex_count;
uint index_count;
}
static const uint SOLID = 0;
static const uint CONVEX = 0;
static const uint CONCAVE = 0;
struct MeshOut {
float4 pos : SV_Position;
}
struct MeshInvocation {
uint glyph_id;
uint vertex_triangle_offsets;
}
struct MeshIn {
MeshInvocation glyph_id[32];
}
struct PrimOut {
uint kind : BLENDINDICES0;
}
[[vk::binding(0)]]
StructuredBuffer<GlyphDesc> glyphs;
[[vk::binding(1)]]
StructuredBuffer<uint> glyphs_ids;
[shader("mesh")]
[numthreads(32, 1, 1)]
[outputtopology("triangle")]
void mesh(uint3 gid: SV_GroupID, // dispatched group id
uint3 tid: SV_GroupThreadID, // global thread id
uint ti: SV_GroupIndex, // local group thread index
out indices uint3 triangles[126],
out vertices MeshOut vertices[64],
out primitives PrimOut attr[126],
in MeshIn mesh,
) {
SetMeshOutputCounts(0, 0);
// we have a limited number of verts/triangles we can output
// if a glyph exceeds the 126 triangles/64 verts limit, then we need more than
// one thread building geometry.
// all threads sharing one glyph should be in the same subgroup/wave.
}
struct FragmentIn2 {
float4 pos : SV_Position;
sample float3 bary : SV_BARYCENTRICS;
uint kind : BLENDINDICES0;
}
float2 computeUV(const float3 bary)
{
const float u = bary.x * 0 + bary.y * 0.5f + bary.z * 1;
const float v = bary.x * 0 + bary.y * 0.0f + bary.z * 1;
return float2(u, v);
}
float computeQuadraticBezierFunction(const float2 uv)
{
return uv.x * uv.x - uv.y;
}
[shader("fragment")]
FragmentOut fragment_barycentric(FragmentIn2 input) {
const uint kind = input.kind;
const float2 uv = computeUV(input.bary);
const float sign = computeQuadraticBezierFunction(uv);
if ((kind == CONVEX) && sign > 0.0f || (kind == CONCAVE) && sign < 0.0f) {
discard;
}
FragmentOut output;
output.color = float4(1.0, 0.0, 0.0, 1.0);
return output;
}

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,115 @@
const std = @import("std");
// Although this function looks imperative, note that its job is to
// declaratively construct a build graph that will be executed by an external
// runner.
pub fn build(b: *std.Build) void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard optimization options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not
// set a preferred release mode, allowing the user to decide how to optimize.
const optimize = b.standardOptimizeOption(.{});
// This creates a "module", which represents a collection of source files alongside
// some compilation options, such as optimization mode and linked system libraries.
// Every executable or library we compile will be based on one or more modules.
const lib_mod = b.createModule(.{
// `root_source_file` is the Zig "entry point" of the module. If a module
// only contains e.g. external object files, you can make this `null`.
// In this case the main source file is merely a path, however, in more
// complicated build scripts, this could be a generated file.
.root_source_file = b.path("src/root.zig"),
.target = target,
.optimize = optimize,
});
// We will also create a module for our other entry point, 'main.zig'.
const exe_mod = b.createModule(.{
// `root_source_file` is the Zig "entry point" of the module. If a module
// only contains e.g. external object files, you can make this `null`.
// In this case the main source file is merely a path, however, in more
// complicated build scripts, this could be a generated file.
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});
// Modules can depend on one another using the `std.Build.Module.addImport` function.
// This is what allows Zig source code to use `@import("foo")` where 'foo' is not a
// file path. In this case, we set up `exe_mod` to import `lib_mod`.
exe_mod.addImport("shader_builder_lib", lib_mod);
// Now, we will create a static library based on the module we created above.
// This creates a `std.Build.Step.Compile`, which is the build step responsible
// for actually invoking the compiler.
const lib = b.addStaticLibrary(.{
.name = "shader_builder",
.root_module = lib_mod,
});
// This declares intent for the library to be installed into the standard
// location when the user invokes the "install" step (the default step when
// running `zig build`).
b.installArtifact(lib);
// This creates another `std.Build.Step.Compile`, but this one builds an executable
// rather than a static library.
const exe = b.addExecutable(.{
.name = "shader_builder",
.root_module = exe_mod,
});
// This declares intent for the executable to be installed into the
// standard location when the user invokes the "install" step (the default
// step when running `zig build`).
b.installArtifact(exe);
// This *creates* a Run step in the build graph, to be executed when another
// step is evaluated that depends on it. The next line below will establish
// such a dependency.
const run_cmd = b.addRunArtifact(exe);
// By making the run step depend on the install step, it will be run from the
// installation directory rather than directly from within the cache directory.
// This is not necessary, however, if the application depends on other installed
// files, this ensures they will be present and in the expected location.
run_cmd.step.dependOn(b.getInstallStep());
// This allows the user to pass arguments to the application in the build
// command itself, like this: `zig build run -- arg1 arg2 etc`
if (b.args) |args| {
run_cmd.addArgs(args);
}
// This creates a build step. It will be visible in the `zig build --help` menu,
// and can be selected like this: `zig build run`
// This will evaluate the `run` step rather than the default, which is "install".
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const lib_unit_tests = b.addTest(.{
.root_module = lib_mod,
});
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
const exe_unit_tests = b.addTest(.{
.root_module = exe_mod,
});
const run_exe_unit_tests = b.addRunArtifact(exe_unit_tests);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
// running the unit tests.
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_lib_unit_tests.step);
test_step.dependOn(&run_exe_unit_tests.step);
}

View file

@ -0,0 +1,73 @@
.{
// This is the default name used by packages depending on this one. For
// example, when a user runs `zig fetch --save <url>`, this field is used
// as the key in the `dependencies` table. Although the user can choose a
// different name, most users will stick with this provided value.
//
// It is redundant to include "zig" in this name because it is already
// within the Zig package namespace.
.name = "shader_builder",
// This is a [Semantic Version](https://semver.org/).
// In a future version of Zig it will be used for package deduplication.
.version = "0.0.0",
// This field is optional.
// This is currently advisory only; Zig does not yet do anything
// with this value.
//.minimum_zig_version = "0.11.0",
// This field is optional.
// Each dependency must either provide a `url` and `hash`, or a `path`.
// `zig build --fetch` can be used to fetch all dependencies of a package, recursively.
// Once all dependencies are fetched, `zig build` no longer requires
// internet connectivity.
.dependencies = .{
// See `zig fetch --save <url>` for a command-line interface for adding dependencies.
//.example = .{
// // When updating this field to a new URL, be sure to delete the corresponding
// // `hash`, otherwise you are communicating that you expect to find the old hash at
// // the new URL. If the contents of a URL change this will result in a hash mismatch
// // which will prevent zig from using it.
// .url = "https://example.com/foo.tar.gz",
//
// // This is computed from the file contents of the directory of files that is
// // obtained after fetching `url` and applying the inclusion rules given by
// // `paths`.
// //
// // This field is the source of truth; packages do not come from a `url`; they
// // come from a `hash`. `url` is just one of many possible mirrors for how to
// // obtain a package matching this `hash`.
// //
// // Uses the [multihash](https://multiformats.io/multihash/) format.
// .hash = "...",
//
// // When this is provided, the package is found in a directory relative to the
// // build root. In this case the package's hash is irrelevant and therefore not
// // computed. This field and `url` are mutually exclusive.
// .path = "foo",
//
// // When this is set to `true`, a package is declared to be lazily
// // fetched. This makes the dependency only get fetched if it is
// // actually used.
// .lazy = false,
//},
},
// Specifies the set of files and directories that are included in this package.
// Only files and directories listed here are included in the `hash` that
// is computed for this package. Only files listed here will remain on disk
// when using the zig package manager. As a rule of thumb, one should list
// files required for compilation plus any license(s).
// Paths are relative to the build root. Use the empty string (`""`) to refer to
// the build root itself.
// A directory listed here means that all files within, recursively, are included.
.paths = .{
"build.zig",
"build.zig.zon",
"src",
// For example...
//"LICENSE",
//"README.md",
},
}

View file

@ -0,0 +1,45 @@
//! By convention, main.zig is where your main function lives in the case that
//! you are building an executable. If you are making a library, the convention
//! is to delete this file and start with root.zig instead.
pub fn main() !void {
// Prints to stderr (it's a shortcut based on `std.io.getStdErr()`)
std.debug.print("All your {s} are belong to us.\n", .{"codebase"});
// stdout is for the actual output of your application, for example if you
// are implementing gzip, then only the compressed bytes should be sent to
// stdout, not any debugging messages.
const stdout_file = std.io.getStdOut().writer();
var bw = std.io.bufferedWriter(stdout_file);
const stdout = bw.writer();
try stdout.print("Run `zig build test` to run the tests.\n", .{});
try bw.flush(); // Don't forget to flush!
}
test "simple test" {
var list = std.ArrayList(i32).init(std.testing.allocator);
defer list.deinit(); // Try commenting this out and see if zig detects the memory leak!
try list.append(42);
try std.testing.expectEqual(@as(i32, 42), list.pop());
}
test "use other module" {
try std.testing.expectEqual(@as(i32, 150), lib.add(100, 50));
}
test "fuzz example" {
const global = struct {
fn testOne(input: []const u8) anyerror!void {
// Try passing `--fuzz` to `zig build test` and see if it manages to fail this test case!
try std.testing.expect(!std.mem.eql(u8, "canyoufindme", input));
}
};
try std.testing.fuzz(global.testOne, .{});
}
const std = @import("std");
/// This imports the separate module containing `root.zig`. Take a look in `build.zig` for details.
const lib = @import("shader_builder_lib");

View file

@ -0,0 +1,13 @@
//! By convention, root.zig is the root source file when making a library. If
//! you are making an executable, the convention is to delete this file and
//! start with main.zig instead.
const std = @import("std");
const testing = std.testing;
pub export fn add(a: i32, b: i32) i32 {
return a + b;
}
test "basic add functionality" {
try testing.expect(add(3, 7) == 10);
}

Binary file not shown.

View file

@ -1,4 +1,4 @@
use std::sync::{atomic::AtomicU8, Arc}; use std::sync::{Arc, atomic::AtomicU8};
use crate::{ use crate::{
define_device_owned_handle, define_device_owned_handle,
@ -63,11 +63,11 @@ impl SingleUseCommandPool {
&self.queue &self.queue
} }
/// get the underlying pool, bypassing the mutex // /// get the underlying pool, bypassing the mutex
#[allow(dead_code)] // #[allow(dead_code)]
pub unsafe fn pool(&self) -> vk::CommandPool { // pub unsafe fn pool(&self) -> vk::CommandPool {
self.pool.data_ptr().read() // self.pool.data_ptr().read()
} // }
} }
pub trait HasQueue: DeviceOwned<vk::CommandBuffer> { pub trait HasQueue: DeviceOwned<vk::CommandBuffer> {
@ -643,11 +643,10 @@ mod command_pools {
use thread_local::ThreadLocal; use thread_local::ThreadLocal;
use crate::{ use crate::{
define_device_owned_handle, Queue, define_device_owned_handle,
device::{Device, DeviceOwned}, device::{Device, DeviceOwned},
sync, sync,
util::MutexExt, util::MutexExt,
Queue,
}; };
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]

View file

@ -1,12 +1,14 @@
use ash::vk; use ash::vk;
use tracing::{event, Level}; use tracing::{Level, event};
/// # Safety
/// `str` must be a valid null-terminated C string or null.
unsafe fn str_from_raw_parts<'a>(str: *const i8) -> std::borrow::Cow<'a, str> { unsafe fn str_from_raw_parts<'a>(str: *const i8) -> std::borrow::Cow<'a, str> {
use std::{borrow::Cow, ffi}; use std::{borrow::Cow, ffi};
if str.is_null() { if str.is_null() {
Cow::from("") Cow::from("")
} else { } else {
ffi::CStr::from_ptr(str).to_string_lossy() unsafe { ffi::CStr::from_ptr(str).to_string_lossy() }
} }
} }
@ -17,11 +19,19 @@ pub(super) unsafe extern "system" fn debug_callback(
user_data: *mut core::ffi::c_void, user_data: *mut core::ffi::c_void,
) -> vk::Bool32 { ) -> vk::Bool32 {
_ = user_data; _ = user_data;
let callback_data = *callback_data; // SAFETY:
// - `callback_data` is a valid pointer to `DebugUtilsMessengerCallbackDataEXT`.
// - `p_message_id_name` is a valid null-terminated C string or NULL.
// - `p_message` is a valid null-terminated C string or NULL.
let (message_id_number, message_id_name, message) = unsafe {
let callback_data = &*callback_data;
let message_id_number = callback_data.message_id_number; let message_id_number = callback_data.message_id_number;
let message_id_name = str_from_raw_parts(callback_data.p_message_id_name); let message_id_name = str_from_raw_parts(callback_data.p_message_id_name);
let message = str_from_raw_parts(callback_data.p_message); let message = str_from_raw_parts(callback_data.p_message);
(message_id_number, message_id_name, message)
};
match message_severity { match message_severity {
vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => { vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => {

View file

@ -12,11 +12,11 @@ use ash::{
vk::{self, Handle}, vk::{self, Handle},
}; };
use raw_window_handle::RawDisplayHandle; use raw_window_handle::RawDisplayHandle;
use tinyvec::{array_vec, ArrayVec}; use tinyvec::{ArrayVec, array_vec};
use crate::{ use crate::{
make_extention_properties, sync, Error, ExtendsDeviceProperties2Debug, Instance, Error, ExtendsDeviceProperties2Debug, Instance, PhysicalDevice, PhysicalDeviceFeatures,
PhysicalDevice, PhysicalDeviceFeatures, PhysicalDeviceProperties, Queue, Result, VkNameList, PhysicalDeviceProperties, Queue, Result, VkNameList, make_extention_properties, sync,
}; };
#[derive(Debug, Default)] #[derive(Debug, Default)]
@ -26,6 +26,7 @@ pub struct DeviceQueueFamilies {
pub(crate) present: (u32, u32), pub(crate) present: (u32, u32),
pub(crate) async_compute: (u32, u32), pub(crate) async_compute: (u32, u32),
pub(crate) transfer: (u32, u32), pub(crate) transfer: (u32, u32),
#[expect(dead_code)]
pub(crate) properties: Box<[vk::QueueFamilyProperties]>, pub(crate) properties: Box<[vk::QueueFamilyProperties]>,
} }

View file

@ -4,17 +4,17 @@ use ash::{prelude::VkResult, vk};
use indexmap::IndexMap; use indexmap::IndexMap;
use crate::{ use crate::{
EguiState,
buffers::{Buffer, BufferDesc}, buffers::{Buffer, BufferDesc},
commands::traits::CommandBufferExt, commands::traits::CommandBufferExt,
device::{self, DeviceOwned}, device::{self, DeviceOwned},
images::{Image, ImageDesc, ImageViewDesc}, images::{Image, ImageDesc, ImageViewDesc},
render_graph::{ render_graph::{
buffer_barrier, image_barrier, Access, Barrier, GraphResourceDesc, GraphResourceId, Access, Barrier, GraphResourceDesc, GraphResourceId, PassDesc, RecordFn, RenderContext,
PassDesc, RecordFn, RenderContext, RenderGraph, RenderGraph, buffer_barrier, image_barrier,
}, },
texture, texture,
util::Rect2D, util::Rect2D,
EguiState,
}; };
pub fn egui_pre_pass( pub fn egui_pre_pass(
@ -77,7 +77,7 @@ pub fn egui_pre_pass(
delta.image.width() as u32, delta.image.width() as u32,
delta.image.height() as u32, delta.image.height() as u32,
)); ));
buffer = buffer + bytes; buffer += bytes;
(buffer, image) (buffer, image)
}, },
@ -125,7 +125,7 @@ pub fn egui_pre_pass(
let mut staging_map = staging_buffer.map()?; let mut staging_map = staging_buffer.map()?;
let mut offset = 0; let mut offset = 0;
let aliased_images = output output
.textures_delta .textures_delta
.set .set
.iter() .iter()
@ -144,12 +144,6 @@ pub fn egui_pre_pass(
}; };
mem[..slice.len()].copy_from_slice(slice); mem[..slice.len()].copy_from_slice(slice);
} }
egui::ImageData::Font(font_image) => {
for (i, c) in font_image.srgba_pixels(None).enumerate() {
let bytes = c.to_array();
mem[i * 4..(i + 1) * 4].copy_from_slice(&bytes);
}
}
} }
let old_offset = offset; let old_offset = offset;
@ -162,11 +156,9 @@ pub fn egui_pre_pass(
); );
(*id, (old_offset, bytes, rect)) (*id, (old_offset, bytes, rect))
}) })
.collect::<BTreeMap<_, _>>(); .collect::<BTreeMap<_, _>>()
// let tessellated = egui.tessellate(output.shapes, output.pixels_per_point); // let tessellated = egui.tessellate(output.shapes, output.pixels_per_point);
aliased_images
}; };
let textures = output let textures = output
@ -206,8 +198,7 @@ pub fn egui_pre_pass(
tracing::trace!( tracing::trace!(
"record-prepass: fetching alias of prepass staging image id={id:?}" "record-prepass: fetching alias of prepass staging image id={id:?}"
); );
let alias = unsafe { let alias = staging_image.get_alias(ImageDesc {
staging_image.get_alias(ImageDesc {
name: Some(format!("egui-prepass-staging-aliased-{id:?}v").into()), name: Some(format!("egui-prepass-staging-aliased-{id:?}v").into()),
format: vk::Format::R8G8B8A8_UNORM, format: vk::Format::R8G8B8A8_UNORM,
extent: vk::Extent3D { extent: vk::Extent3D {
@ -215,12 +206,10 @@ pub fn egui_pre_pass(
height: rect.height() as u32, height: rect.height() as u32,
depth: 1, depth: 1,
}, },
usage: vk::ImageUsageFlags::TRANSFER_SRC usage: vk::ImageUsageFlags::TRANSFER_SRC | vk::ImageUsageFlags::TRANSFER_DST,
| vk::ImageUsageFlags::TRANSFER_DST,
queue_families: device::QueueFlags::empty(), queue_families: device::QueueFlags::empty(),
..Default::default() ..Default::default()
})? })?;
};
let texture = textures.get(&id).and_then(|id| ctx.get_image(*id)).unwrap(); let texture = textures.get(&id).and_then(|id| ctx.get_image(*id)).unwrap();
@ -381,8 +370,8 @@ pub fn egui_pre_pass(
] ]
.to_vec(), .to_vec(),
writes: textures writes: textures
.iter() .values()
.map(|(_, id)| { .map(|id| {
( (
*id, *id,
Access { Access {
@ -398,6 +387,7 @@ pub fn egui_pre_pass(
} }
// fn egui_pass() // fn egui_pass()
#[allow(clippy::too_many_arguments)]
pub fn egui_pass( pub fn egui_pass(
dev: &device::Device, dev: &device::Device,
rg: &mut RenderGraph, rg: &mut RenderGraph,
@ -535,8 +525,8 @@ pub fn egui_pass(
.values() .values()
.map(|entry| { .map(|entry| {
let texture = texture_handler.get_texture(entry.id).unwrap(); let texture = texture_handler.get_texture(entry.id).unwrap();
let info = vk::DescriptorImageInfo { vk::DescriptorImageInfo {
sampler: samplers.get_sampler(entry.into_sampler_desc()).unwrap(), sampler: samplers.get_sampler(entry.as_sampler_desc()).unwrap(),
image_view: texture image_view: texture
.get_view(ImageViewDesc { .get_view(ImageViewDesc {
kind: vk::ImageViewType::TYPE_2D, kind: vk::ImageViewType::TYPE_2D,
@ -548,9 +538,7 @@ pub fn egui_pass(
}) })
.unwrap(), .unwrap(),
image_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL, image_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,
}; }
info
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();

View file

@ -276,10 +276,12 @@ impl Image {
.unwrap_or_else(|| self.clone()) .unwrap_or_else(|| self.clone())
} }
pub unsafe fn get_alias(self: &Arc<Self>, desc: ImageDesc) -> VkResult<Arc<Self>> { // TODO: figure out how to make this safer
self.get_parent_or_self().get_alias_inner(desc) pub fn get_alias(self: &Arc<Self>, desc: ImageDesc) -> VkResult<Arc<Self>> {
unsafe { self.get_parent_or_self().get_alias_inner(desc) }
} }
/// # Safety
/// must only be called on the primogenitor of an image. /// must only be called on the primogenitor of an image.
/// get the primogenitor with [`Self::get_parent_or_self()`] /// get the primogenitor with [`Self::get_parent_or_self()`]
unsafe fn get_alias_inner(self: Arc<Self>, desc: ImageDesc) -> VkResult<Arc<Image>> { unsafe fn get_alias_inner(self: Arc<Self>, desc: ImageDesc) -> VkResult<Arc<Image>> {

View file

@ -1,7 +1,5 @@
#![feature( #![feature(
c_str_module,
closure_lifetime_binder, closure_lifetime_binder,
let_chains,
negative_impls, negative_impls,
map_try_insert, map_try_insert,
debug_closure_helpers, debug_closure_helpers,
@ -15,9 +13,9 @@ use raw_window_handle::{RawDisplayHandle, RawWindowHandle};
use parking_lot::{Mutex, MutexGuard}; use parking_lot::{Mutex, MutexGuard};
use ash::{ use ash::{
Entry,
prelude::VkResult, prelude::VkResult,
vk::{self}, vk::{self},
Entry,
}; };
use dyn_clone::DynClone; use dyn_clone::DynClone;
@ -44,7 +42,7 @@ use device::{Device, DeviceOwned, DeviceQueueFamilies};
mod texture { mod texture {
use std::{collections::BTreeMap, sync::Arc}; use std::{collections::BTreeMap, sync::Arc};
use crate::{def_monotonic_id, images::Image, Device}; use crate::{Device, def_monotonic_id, images::Image};
def_monotonic_id!(pub TextureId); def_monotonic_id!(pub TextureId);
@ -318,13 +316,10 @@ impl PhysicalDeviceFeatures {
} }
fn supports_extension(&self, e: &vk::ExtensionProperties) -> bool { fn supports_extension(&self, e: &vk::ExtensionProperties) -> bool {
self.device_extensions self.device_extensions.iter().any(|ext| {
.iter()
.find(|ext| {
ext.extension_name_as_c_str() == e.extension_name_as_c_str() ext.extension_name_as_c_str() == e.extension_name_as_c_str()
&& ext.spec_version >= e.spec_version && ext.spec_version >= e.spec_version
}) })
.is_some()
} }
fn compatible_with(&self, device: &Self) -> bool { fn compatible_with(&self, device: &Self) -> bool {
@ -339,9 +334,9 @@ impl PhysicalDeviceFeatures {
.device_extensions .device_extensions
.iter() .iter()
.filter(|ext| { .filter(|ext| {
!device_extensions device_extensions
.binary_search_by(|t| sort_exts(t, ext)) .binary_search_by(|t| sort_exts(t, ext))
.is_ok() .is_err()
}) })
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -513,7 +508,7 @@ pub struct EguiTextureInfo {
} }
impl EguiTextureInfo { impl EguiTextureInfo {
fn into_sampler_desc(&self) -> pipeline::SamplerDesc { fn as_sampler_desc(&self) -> pipeline::SamplerDesc {
let address_mode = match self.options.wrap_mode { let address_mode = match self.options.wrap_mode {
egui::TextureWrapMode::ClampToEdge => vk::SamplerAddressMode::CLAMP_TO_EDGE, egui::TextureWrapMode::ClampToEdge => vk::SamplerAddressMode::CLAMP_TO_EDGE,
egui::TextureWrapMode::Repeat => vk::SamplerAddressMode::REPEAT, egui::TextureWrapMode::Repeat => vk::SamplerAddressMode::REPEAT,

View file

@ -0,0 +1,27 @@
#![allow(dead_code)]
use crate::device::Device;
//#[derive(Debug)]
pub struct DeviceMemoryDesc {
pub flags: vk_mem::AllocationCreateFlags,
pub size: u64,
pub align: u64,
pub type_bits: u32,
pub usage: vk_mem::MemoryUsage,
}
#[derive(Debug)]
pub struct DeviceMemory {
device: Device,
alloc: vk_mem::Allocation,
}
impl DeviceMemory {}
impl Drop for DeviceMemory {
fn drop(&mut self) {
unsafe {
self.device.alloc().free_memory(&mut self.alloc);
}
}
}

View file

@ -491,7 +491,7 @@ impl Drop for Pipeline {
} }
impl ShaderStageDesc<'_> { impl ShaderStageDesc<'_> {
fn into_create_info(&self) -> vk::PipelineShaderStageCreateInfo { fn as_create_info(&'_ self) -> vk::PipelineShaderStageCreateInfo<'_> {
vk::PipelineShaderStageCreateInfo::default() vk::PipelineShaderStageCreateInfo::default()
.module(self.module.handle()) .module(self.module.handle())
.flags(self.flags) .flags(self.flags)
@ -516,7 +516,7 @@ impl Pipeline {
.map(|p| p.handle()) .map(|p| p.handle())
.unwrap_or(vk::Pipeline::null()), .unwrap_or(vk::Pipeline::null()),
) )
.stage(desc.shader_stage.into_create_info()); .stage(desc.shader_stage.as_create_info());
unsafe { unsafe {
device.dev().create_compute_pipelines( device.dev().create_compute_pipelines(
@ -533,7 +533,7 @@ impl Pipeline {
let stages = desc let stages = desc
.shader_stages .shader_stages
.iter() .iter()
.map(|stage| stage.into_create_info()) .map(|stage| stage.as_create_info())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let vertex_input = desc.vertex_input.map(|vertex| { let vertex_input = desc.vertex_input.map(|vertex| {

View file

@ -508,10 +508,11 @@ impl RenderGraph {
let cmd = pool.alloc()?; let cmd = pool.alloc()?;
// transitions // transitions
for (&id, &(from, to)) in accesses.iter() { for (&id, &(from, to)) in accesses.iter() {
let buffer = unsafe { cmd.buffer() };
Self::transition_resource( Self::transition_resource(
&resources[id.0 as usize], &resources[id.0 as usize],
device.dev(), device.dev(),
unsafe { &cmd.buffer() }, &buffer,
from, from,
to, to,
); );

View file

@ -1,7 +1,7 @@
use std::{ use std::{
future::Future, future::Future,
marker::PhantomData, marker::PhantomData,
sync::{atomic::AtomicU32, Arc}, sync::{Arc, atomic::AtomicU32},
time::Duration, time::Duration,
}; };
@ -19,6 +19,12 @@ pub struct SyncThreadpool {
num_threads: Arc<AtomicU32>, num_threads: Arc<AtomicU32>,
} }
impl Default for SyncThreadpool {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug)] #[derive(Debug)]
enum SyncPrimitive { enum SyncPrimitive {
Fence(Arc<Fence>), Fence(Arc<Fence>),
@ -259,11 +265,12 @@ pub struct FenceFuture<'a> {
} }
impl FenceFuture<'_> { impl FenceFuture<'_> {
/// Unsafe because `fence` must not be destroyed while this future is live. /// # Safety
/// `fence` must not be destroyed while this future is live.
#[allow(dead_code)] #[allow(dead_code)]
pub unsafe fn from_fence(device: Device, fence: vk::Fence) -> Self { pub unsafe fn from_fence(device: Device, fence: vk::Fence) -> Self {
Self { Self {
fence: Arc::new(Fence::new(device, fence)), fence: Arc::new(unsafe { Fence::new(device, fence) }),
_pd: PhantomData, _pd: PhantomData,
} }
} }

View file

@ -1,20 +1,19 @@
#![allow(dead_code)]
use std::{ use std::{
collections::HashMap, collections::HashMap,
sync::{Arc, Weak}, sync::{Arc, Weak},
}; };
use ash::vk::Extent2D; use ash::vk::Extent2D;
use cosmic_text::{ use cosmic_text::{CacheKey, FontSystem, PhysicalGlyph, SwashCache};
Attrs, Buffer, CacheKey, Family, FontSystem, Metrics, PhysicalGlyph, SwashCache, use glam::IVec2;
};
use glam::{IVec2, Vec2};
use guillotiere::size2; use guillotiere::size2;
#[cfg(test)] #[cfg(test)]
use image::{GenericImage, GenericImageView}; use image::{GenericImage, GenericImageView};
use crate::{ use crate::{
def_monotonic_id, def_monotonic_id,
util::{self, Rect2D, F32}, util::{self, F32, Rect2D},
}; };
const ROBOTO_BYTES: &[u8] = const ROBOTO_BYTES: &[u8] =
@ -214,18 +213,15 @@ impl FontAtlas {
fn add_glyph( fn add_glyph(
&mut self, &mut self,
key: CacheKey, key: CacheKey,
data: &Vec<u8>, data: &[u8],
size: Extent2D, size: Extent2D,
offset: IVec2, offset: IVec2,
) -> Option<AtlasGlyphInfo> { ) -> Option<AtlasGlyphInfo> {
let allocation = self.allocator.allocate(guillotiere::size2( let allocation = self.allocator.allocate(guillotiere::size2(
(size.width + 1).try_into().unwrap(), (size.width + 1).try_into().unwrap(),
(size.height + 1).try_into().unwrap(), (size.height + 1).try_into().unwrap(),
)); ))?;
let Some(allocation) = allocation else {
return None;
};
let rect = allocation.rectangle; let rect = allocation.rectangle;
let x = rect.min.x; let x = rect.min.x;
let y = rect.min.y; let y = rect.min.y;
@ -294,8 +290,14 @@ fn get_outlined_glyph_texture(
Ok((data, extent, IVec2::new(left, top))) Ok((data, extent, IVec2::new(left, top)))
} }
#[test] #[cfg(test)]
fn test() { mod tests {
use super::*;
use cosmic_text::{Attrs, Buffer, Family, Metrics};
use glam::Vec2;
#[test]
fn test() {
let mut font_store = FontStore::new(); let mut font_store = FontStore::new();
let mut db = cosmic_text::fontdb::Database::new(); let mut db = cosmic_text::fontdb::Database::new();
let mut font_id_map = HashMap::<FontId, cosmic_text::fontdb::ID>::new(); let mut font_id_map = HashMap::<FontId, cosmic_text::fontdb::ID>::new();
@ -332,7 +334,7 @@ fn test() {
let attrs = Attrs::new() let attrs = Attrs::new()
.family(Family::SansSerif) .family(Family::SansSerif)
.metrics(Metrics::new(48.0, 56.0)); .metrics(Metrics::new(48.0, 56.0));
let text = std::fs::read_to_string(path).expect("hello.txt"); let _text = std::fs::read_to_string(path).expect("hello.txt");
buf.set_text( buf.set_text(
"Hello, World! 你好! 안녕하세요", "Hello, World! 你好! 안녕하세요",
@ -353,7 +355,7 @@ fn test() {
let mut glyphs = Vec::new(); let mut glyphs = Vec::new();
let mut size = Vec2::new(0.0, 0.0); let mut size = Vec2::new(0.0, 0.0);
let result = buffer _ = buffer
.layout_runs() .layout_runs()
.flat_map(|run| { .flat_map(|run| {
size.x = size.x.max(run.line_w); size.x = size.x.max(run.line_w);
@ -369,7 +371,9 @@ fn test() {
let glyph_info = set let glyph_info = set
.get_glyph_info(physical.cache_key) .get_glyph_info(physical.cache_key)
.map(Ok) .map(Ok)
.unwrap_or_else(|| set.add_glyph(&mut font_system, &mut swash, physical.clone()))?; .unwrap_or_else(|| {
set.add_glyph(&mut font_system, &mut swash, physical.clone())
})?;
let pos = { let pos = {
let x = glyph_info.offset.x as f32 + physical.x as f32; let x = glyph_info.offset.x as f32 + physical.x as f32;
@ -394,7 +398,7 @@ fn test() {
eprintln!("glyphs: {glyphs:#?}"); eprintln!("glyphs: {glyphs:#?}");
eprintln!("image: {width}x{height}"); eprintln!("image: {width}x{height}");
for (info, pos, size) in glyphs { for (info, pos, _size) in glyphs {
let atlas_image = info.image.upgrade().unwrap(); let atlas_image = info.image.upgrade().unwrap();
let glyph = atlas_image.0.read(); let glyph = atlas_image.0.read();
@ -421,4 +425,5 @@ fn test() {
} }
image.save("rendered.png").unwrap(); image.save("rendered.png").unwrap();
}
} }

View file

@ -9,7 +9,14 @@ macro_rules! def_monotonic_id {
#[derive(Copy, Clone, Hash, Eq, PartialEq, PartialOrd, Ord, Debug)] #[derive(Copy, Clone, Hash, Eq, PartialEq, PartialOrd, Ord, Debug)]
$vis struct $ty(::core::num::NonZero<u32>); $vis struct $ty(::core::num::NonZero<u32>);
impl ::core::default::Default for $ty {
fn default() -> Self {
Self::new()
}
}
impl $ty { impl $ty {
#[allow(dead_code, reason = "This method is macro-generated")]
pub fn new() -> Self { pub fn new() -> Self {
use ::core::sync::atomic::{AtomicU32, Ordering}; use ::core::sync::atomic::{AtomicU32, Ordering};
static COUNTER: AtomicU32 = AtomicU32::new(1); static COUNTER: AtomicU32 = AtomicU32::new(1);
@ -21,6 +28,7 @@ macro_rules! def_monotonic_id {
) )
} }
#[allow(dead_code, reason = "This method is macro-generated")]
pub fn as_u32(&self) -> u32 { pub fn as_u32(&self) -> u32 {
self.0.get() self.0.get()
} }
@ -437,7 +445,7 @@ pub struct Rgba8(pub [u8; 4]);
impl std::hash::Hash for Rgba { impl std::hash::Hash for Rgba {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.map(|f| hash_f32(state, f)); self.0.iter().for_each(|&f| hash_f32(state, f));
} }
} }

29
crates/text/Cargo.toml Normal file
View file

@ -0,0 +1,29 @@
[package]
name = "text"
version = "0.1.0"
edition = "2024"
[dependencies]
thiserror = { workspace = true }
anyhow = { workspace = true }
tracing = { workspace = true }
glam = { workspace = true }
derive_more = { workspace = true }
ahash = { workspace = true }
parking_lot = { workspace = true }
bevy_ecs = { workspace = true }
bevy_asset = { workspace = true }
bevy_reflect = { workspace = true }
bevy_utils = { workspace = true }
cosmic-text = "0.12.1"
sys-locale = "0.3.2"
guillotiere = "0.6.2"
renderer = {path = "../renderer"}
[dev-dependencies]
tracing-test = "0.2.5"
image = "0.25.5"

484
crates/text/src/lib.rs Normal file
View file

@ -0,0 +1,484 @@
#![feature(debug_closure_helpers)]
#![allow(dead_code)]
use bevy_asset::{Asset, AssetLoader};
use bevy_ecs::prelude::*;
use bevy_reflect::TypePath;
use glam::IVec2;
use guillotiere::size2;
use renderer::{
Extent2D, def_monotonic_id,
util::{F32, Rect2D},
};
use std::sync::{Arc, Weak};
use ahash::{HashMap, HashMapExt};
use cosmic_text::{CacheKey, FontSystem, PhysicalGlyph, SwashCache};
mod components {
use bevy_ecs::component::Component;
use bevy_reflect::Reflect;
use derive_more::{Deref, DerefMut};
#[derive(Debug, Clone, Component, Reflect, Deref, DerefMut)]
pub struct Text(pub String);
}
const ROBOTO_BYTES: &[u8] =
include_bytes!("../../../assets/fonts/Roboto/Roboto-VariableFont_wdth,wght.ttf");
const NOTO_SANS_HAN_BYTES: &[u8] =
include_bytes!("../../../assets/fonts/Noto_Sans_SC/NotoSansSC-VariableFont_wght.ttf");
def_monotonic_id!(pub FontId);
#[derive(Clone, TypePath, Asset)]
pub struct FontData(Arc<dyn AsRef<[u8]> + Send + Sync>);
impl std::fmt::Debug for FontData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("FontData")
.field_with(|f| write!(f, "{:?}", <dyn AsRef::<[u8]>>::as_ref(self.0.as_ref())))
.finish()
}
}
#[derive(Debug, Default)]
struct FontLoader;
#[derive(thiserror::Error, Debug)]
enum FontLoaderError {
#[error(transparent)]
Parser(#[from] cosmic_text::ttf_parser::FaceParsingError),
#[error(transparent)]
Io(#[from] std::io::Error),
}
impl AssetLoader for FontLoader {
type Asset = FontData;
type Settings = ();
type Error = FontLoaderError;
async fn load(
&self,
reader: &mut dyn bevy_asset::io::Reader,
_settings: &Self::Settings,
_load_context: &mut bevy_asset::LoadContext<'_>,
) -> Result<Self::Asset, Self::Error> {
let mut bytes = Vec::new();
reader.read_to_end(&mut bytes).await?;
let font = FontData(Arc::new(bytes.into_boxed_slice()));
Ok(font)
}
}
struct FontStore {
fonts: HashMap<FontId, FontData>,
}
impl FontStore {
fn new() -> Self {
Self {
fonts: HashMap::new(),
}
}
fn add_font_bytes(&mut self, bytes: FontData) -> FontId {
let id = FontId::new();
self.fonts.insert(id, bytes);
id
}
fn as_source(&self, id: FontId) -> Option<cosmic_text::fontdb::Source> {
self.fonts
.get(&id)
.map(|bytes| cosmic_text::fontdb::Source::Binary(bytes.0.clone()))
}
}
#[derive(Debug, Default, Resource)]
struct FontAtlasSets {
sets: HashMap<FontId, FontAtlasSet>,
}
impl FontAtlasSets {
fn new() -> Self {
Self {
sets: HashMap::new(),
}
}
fn get(&self, key: &FontId) -> Option<&FontAtlasSet> {
self.sets.get(key)
}
fn get_mut(&mut self, key: &FontId) -> Option<&mut FontAtlasSet> {
self.sets.get_mut(key)
}
}
/// Per-Font Font Atlas Set
#[derive(Debug, Default)]
struct FontAtlasSet {
// TODO: add proper plural of atlas to english language.
atlantes: HashMap<F32, Vec<FontAtlas>>,
}
impl FontAtlasSet {
fn new() -> Self {
Self {
atlantes: HashMap::new(),
}
}
fn get_glyph_info(&self, key: CacheKey) -> Option<AtlasGlyphInfo> {
self.atlantes
.get(&F32::from_bits(key.font_size_bits))?
.iter()
.find_map(|atlas| {
atlas
.glyphs
.get(&key)
.map(|&(rect, offset)| (Arc::downgrade(&atlas.image), rect, offset))
})
.map(|(image, rect, offset)| AtlasGlyphInfo {
image,
rect,
offset,
})
}
fn add_glyph(
&mut self,
font_system: &mut FontSystem,
swash_cache: &mut SwashCache,
physical: PhysicalGlyph,
) -> Result<AtlasGlyphInfo, Error> {
let key = physical.cache_key;
let atlantes = self
.atlantes
.entry(F32::from_bits(physical.cache_key.font_size_bits))
.or_insert_with(|| vec![FontAtlas::new(512)]);
let (data, size, offset) = get_outlined_glyph_texture(font_system, swash_cache, physical)?;
if !atlantes
.iter_mut()
.any(|atlas| atlas.add_glyph(key, &data, size, offset).is_some())
{
let max_size = size.height.max(size.height);
let x2_or_512 = (1u32 << (32 - max_size.leading_zeros())).max(512);
atlantes.push(FontAtlas::new(x2_or_512));
atlantes
.last_mut()
.unwrap()
.add_glyph(key, &data, size, offset)
.ok_or(Error::FailedToRasterizeGlyph(key))?;
}
Ok(self.get_glyph_info(key).unwrap())
}
}
#[derive(Debug)]
struct Image(parking_lot::RwLock<ImageInner>);
#[derive(Debug, Clone)]
struct ImageInner {
image: Vec<u8>,
image_size: Extent2D,
}
impl ImageInner {
fn bytes(&self) -> &[u8] {
self.image.as_slice()
}
fn bytes_mut(&mut self) -> &mut [u8] {
self.image.as_mut_slice()
}
fn width(&self) -> u32 {
self.image_size.width
}
fn height(&self) -> u32 {
self.image_size.height
}
}
#[derive(Debug)]
struct AtlasGlyphInfo {
image: Weak<Image>,
rect: Rect2D,
offset: IVec2,
}
struct FontAtlas {
// TODO: this image will be host-coherent and host-visible, so that it can
// be updated with new glyphs.
// that begs the question:
// - should it be staged to a device-local image when it's used?
// - does it make sense to use VK_EXT_external_memory_host here? It's
// supported on virtually all Windows and Linux drivers.
// - how to sync this? I need to make sure that when this image is written
// to, it isn't also being read from. Usually, these images will be
// write-only, except when rendering. Since currently my rendering may
// happen on any thread at any time, that's problematic.
//
// In fact, this is an awful type to use here because of the unique access
// requirement for mapping.
image: Arc<Image>,
// stores sub-rect of image and placement offset of glyph
glyphs: HashMap<CacheKey, (Rect2D, IVec2)>,
allocator: guillotiere::AtlasAllocator,
}
impl std::fmt::Debug for FontAtlas {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("FontAtlas")
.field("image", &self.image)
.field("glyphs", &self.glyphs)
.finish_non_exhaustive()
}
}
impl FontAtlas {
fn new(size: u32) -> Self {
let num_bytes = size * size * 4;
Self {
image: Arc::new(Image(parking_lot::RwLock::new(ImageInner {
image: vec![0; num_bytes as usize],
image_size: Extent2D {
width: size,
height: size,
},
}))),
glyphs: HashMap::new(),
allocator: guillotiere::AtlasAllocator::new(size2(
size.try_into().unwrap(),
size.try_into().unwrap(),
)),
}
}
fn has_glyph(&self, key: CacheKey) -> bool {
self.glyphs.contains_key(&key)
}
fn add_glyph(
&mut self,
key: CacheKey,
data: &Vec<u8>,
size: Extent2D,
offset: IVec2,
) -> Option<AtlasGlyphInfo> {
let allocation = self.allocator.allocate(guillotiere::size2(
(size.width + 1).try_into().unwrap(),
(size.height + 1).try_into().unwrap(),
));
let Some(allocation) = allocation else {
return None;
};
let rect = allocation.rectangle;
let x = rect.min.x;
let y = rect.min.y;
let width = rect.width() - 1;
let height = rect.height() - 1;
let rect = Rect2D::new_from_size(IVec2::new(x, y), IVec2::new(width, height));
self.glyphs.insert(key, (rect, offset));
// put data into image array
let mut image = self.image.0.write();
for line_y in 0..height {
let y = y + line_y;
let image_offset = 4 * y as u32 * image.width() + 4 * x as u32;
let glyph_offset = 4 * line_y * width;
let len = 4 * width as usize;
let dst = &mut image.bytes_mut()[image_offset as usize..][..len];
let src = &data[glyph_offset as usize..][..len];
dst.copy_from_slice(src);
}
Some(AtlasGlyphInfo {
image: Arc::downgrade(&self.image),
rect,
offset,
})
}
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("Failed to rasterize glyph {0:?}.")]
FailedToRasterizeGlyph(CacheKey),
}
fn get_outlined_glyph_texture(
font_system: &mut FontSystem,
swash_cache: &mut SwashCache,
glyph: PhysicalGlyph,
) -> Result<(Vec<u8>, Extent2D, IVec2), Error> {
let image = swash_cache
.get_image_uncached(font_system, glyph.cache_key)
.ok_or(Error::FailedToRasterizeGlyph(glyph.cache_key))?;
let cosmic_text::Placement {
left,
top,
width,
height,
} = image.placement;
let data = match image.content {
cosmic_text::SwashContent::Mask => image
.data
.iter()
.flat_map(|a| [255, 255, 255, *a])
.collect(),
cosmic_text::SwashContent::Color => image.data,
cosmic_text::SwashContent::SubpixelMask => {
// TODO: implement
todo!()
}
};
let extent = Extent2D { width, height };
Ok((data, extent, IVec2::new(left, top)))
}
#[cfg(test)]
mod tests {
use cosmic_text::{Attrs, Buffer, Family, Metrics};
use glam::Vec2;
use image::{GenericImage, GenericImageView};
use super::*;
#[test]
fn test() {
let mut font_store = FontStore::new();
let mut db = cosmic_text::fontdb::Database::new();
let mut font_id_map = HashMap::<FontId, cosmic_text::fontdb::ID>::new();
let mut reverse_font_id_map = HashMap::<cosmic_text::fontdb::ID, FontId>::new();
let roboto = font_store.add_font_bytes(FontData(Arc::new(ROBOTO_BYTES)));
let noto_han = font_store.add_font_bytes(FontData(Arc::new(NOTO_SANS_HAN_BYTES)));
let id = *db
.load_font_source(font_store.as_source(roboto).unwrap())
.last()
.unwrap();
font_id_map.insert(roboto, id);
reverse_font_id_map.insert(id, roboto);
let id = *db
.load_font_source(font_store.as_source(noto_han).unwrap())
.last()
.unwrap();
font_id_map.insert(noto_han, id);
reverse_font_id_map.insert(id, noto_han);
db.set_sans_serif_family("Roboto");
// db.load_system_fonts();
let locale = sys_locale::get_locale().unwrap_or("en-DK".to_string());
let mut font_system = FontSystem::new_with_locale_and_db(locale, db);
let mut swash = SwashCache::new();
let mut buffer = Buffer::new_empty(Metrics::new(14.0, 20.0));
let mut buf = buffer.borrow_with(&mut font_system);
let path = format!("../../assets/testing/hello.txt");
let attrs = Attrs::new()
.family(Family::SansSerif)
.metrics(Metrics::new(48.0, 56.0));
let text = std::fs::read_to_string(path).expect("hello.txt");
buf.set_text(
"Hello, World! 你好! 안녕하세요",
attrs,
cosmic_text::Shaping::Advanced,
);
//buf.set_size(Some(400.0), Some(400.0));
buf.set_wrap(cosmic_text::Wrap::Word);
for line in buf.lines.iter_mut() {
line.set_align(Some(cosmic_text::Align::Left));
}
buf.shape_until_scroll(false);
let mut font_atlantes = FontAtlasSets::new();
let mut glyphs = Vec::new();
let mut size = Vec2::new(0.0, 0.0);
let result = buffer
.layout_runs()
.flat_map(|run| {
size.x = size.x.max(run.line_w);
size.y = size.y + run.line_height;
run.glyphs.iter().map(move |glyph| (glyph, run.line_y))
})
.try_for_each(|(glyph, line_y)| -> Result<(), Error> {
let font_id = *reverse_font_id_map.get(&glyph.font_id).unwrap();
let set = font_atlantes.sets.entry(font_id).or_default();
let physical = glyph.physical((0.0, 0.0), 1.0);
let glyph_info = set
.get_glyph_info(physical.cache_key)
.map(Ok)
.unwrap_or_else(|| {
set.add_glyph(&mut font_system, &mut swash, physical.clone())
})?;
let pos = {
let x = glyph_info.offset.x as f32 + physical.x as f32;
let y = line_y.round() + physical.y as f32 - glyph_info.offset.y as f32;
Vec2::new(x, y)
};
let size = glyph_info.rect.size();
glyphs.push((glyph_info, pos, size));
Ok(())
});
let (width, height) = {
let tmp = size.ceil().as_uvec2();
(tmp.x, tmp.y)
};
let mut image = image::RgbaImage::from_pixel(width, height, image::Rgba([0, 0, 0, 255]));
eprintln!("glyphs: {glyphs:#?}");
eprintln!("image: {width}x{height}");
for (info, pos, size) in glyphs {
let atlas_image = info.image.upgrade().unwrap();
let glyph = atlas_image.0.read();
let atlas_image = image::ImageBuffer::<image::Rgba<u8>, _>::from_raw(
glyph.width(),
glyph.height(),
glyph.bytes(),
)
.unwrap();
let glyph = atlas_image.view(
info.rect.top_left().x as u32,
info.rect.top_left().y as u32,
info.rect.width() as u32,
info.rect.height() as u32,
);
eprintln!("rect: {:?}", info.rect);
eprintln!("image_size: {:?}", image.dimensions());
image
.copy_from(&*glyph, pos.x as u32, pos.y as u32)
.unwrap();
}
image.save("rendered.png").unwrap();
}
}

View file

@ -1,7 +1,7 @@
[package] [package]
name = "window" name = "window"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2024"
[dependencies] [dependencies]
winit = { workspace = true } winit = { workspace = true }