Skip to content

Commit

Permalink
WIP TODO squash this commit
Browse files Browse the repository at this point in the history
  • Loading branch information
richard-uk1 committed Feb 21, 2025
1 parent 0004bbb commit 5131957
Show file tree
Hide file tree
Showing 8 changed files with 374 additions and 17 deletions.
24 changes: 21 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 4 additions & 5 deletions node-graph/gcore/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,7 @@ std = [
"image",
"reflections",
]
reflections = [
"alloc",
"ctor",
]
reflections = ["alloc", "ctor"]
serde = [
"dep:serde",
"glam/serde",
Expand Down Expand Up @@ -67,7 +64,7 @@ rand_chacha = { workspace = true, optional = true }
bezier-rs = { workspace = true, optional = true }
kurbo = { workspace = true, optional = true }
base64 = { workspace = true, optional = true }
vello = { workspace = true, optional = true }
vello = { workspace = true, optional = true }
wgpu = { workspace = true, optional = true }
specta = { workspace = true, optional = true }
rustybuzz = { workspace = true, optional = true }
Expand All @@ -80,6 +77,8 @@ image = { workspace = true, optional = true, default-features = false, features
"png",
] }
math-parser = { path = "../../libraries/math-parser" }
rustc-hash = { workspace = true }
petgraph = "0.7.1"

[dev-dependencies]
# Workspace dependencies
Expand Down
94 changes: 94 additions & 0 deletions node-graph/gcore/src/vector/merge_by_distance.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
//! Collapse all nodes with all edges < distance
use core::mem;

use glam::DVec2;
use petgraph::prelude::UnGraphMap;
use rustc_hash::FxHashSet;

use super::PointId;
use super::VectorData;

use super::IndexedVectorData;

impl VectorData {
/// Collapse all nodes with all edges < distance
pub(crate) fn merge_by_distance(&mut self, distance: f64) {
// treat self as an undirected graph with point = node, and segment = edge
let mut data = IndexedVectorData::from_data(&*self);

// Graph that will contain only short edges. References data graph
let mut short_edges = UnGraphMap::new();
for seg_id in data.segments() {
let length = data.segment_chord_length(seg_id);
if length < distance {
let [start, end] = data.segment_ends(seg_id);
let start = data.point_graph.node_weight(start).unwrap().id;
let end = data.point_graph.node_weight(end).unwrap().id;

short_edges.add_node(start);
short_edges.add_node(end);
short_edges.add_edge(start, end, seg_id);
}
}

// Now group connected segments - all will be collapsed to a single point.
// Note: there are a few algos for this - perhaps test empirically to find fastest
let collapse: Vec<FxHashSet<PointId>> = petgraph::algo::tarjan_scc(&short_edges).into_iter().map(|connected| connected.into_iter().collect()).collect();
let average_position = collapse
.iter()
.map(|collapse_set| {
let sum: DVec2 = collapse_set.iter().map(|&id| data.point_position(id)).sum();
sum / collapse_set.len() as f64
})
.collect::<Vec<_>>();

// steal the point->offset mapping before we drop the indexes
let point_to_offset = mem::replace(&mut data.point_to_offset, Default::default());
drop(data);

// we collect all points up and delete them at the end, so that our indices aren't invalidated
let mut points_to_delete = FxHashSet::default();
let mut segments_to_delete = FxHashSet::default();
for (mut collapse_set, average_pos) in collapse.into_iter().zip(average_position.into_iter()) {
// remove any segments where both endpoints are in the collapse set
segments_to_delete.extend(self.segment_domain.iter().filter_map(|(id, start_offset, end_offset, _)| {
let start = self.point_domain.ids()[start_offset];
let end = self.point_domain.ids()[end_offset];
if collapse_set.contains(&start) && collapse_set.contains(&end) {
Some(id)
} else {
None
}
}));

// Delete all points but the first (arbitrary). Set that point's position to the
// average of the points, update segments to use replace all points with collapsed
// point.

// Unwrap: set created from connected algo will not be empty
let first_id = collapse_set.iter().copied().next().unwrap();
// `first_id` the point we will collapse to.
collapse_set.remove(&first_id);
let first_offset = point_to_offset[&first_id];
self.point_domain.positions[first_offset] = average_pos;

// look for segments with ends in collapse_set and replace them with the point we are collapsing to
for (_, start_offset, end_offset, ..) in self.segment_domain.iter_mut() {
let start_id = self.point_domain.ids()[*start_offset];
let end_id = self.point_domain.ids()[*end_offset];
if collapse_set.contains(&start_id) {
*start_offset = first_offset;
} else if collapse_set.contains(&end_id) {
*end_offset = first_offset;
}
}

points_to_delete.extend(collapse_set)
}
self.segment_domain.retain(|id| !segments_to_delete.contains(id), usize::MAX);
self.point_domain.retain(&mut self.segment_domain, |id| !points_to_delete.contains(id));

// TODO: don't forget about faces
}
}
2 changes: 2 additions & 0 deletions node-graph/gcore/src/vector/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,5 @@ mod vector_nodes;
pub use vector_nodes::*;

pub use bezier_rs;

mod merge_by_distance;
24 changes: 24 additions & 0 deletions node-graph/gcore/src/vector/vector_data.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
mod attributes;
mod indexed;
mod modification;

pub use attributes::*;
pub use indexed::IndexedVectorData;
pub use modification::*;

use super::style::{PathStyle, Stroke};
Expand All @@ -12,6 +15,7 @@ use dyn_any::DynAny;

use core::borrow::Borrow;
use glam::{DAffine2, DVec2};
use std::borrow::Cow;

// TODO: Eventually remove this migration document upgrade code
pub fn migrate_vector_data<'de, D: serde::Deserializer<'de>>(deserializer: D) -> Result<VectorDataTable, D::Error> {
Expand All @@ -35,6 +39,8 @@ pub type VectorDataTable = Instances<VectorData>;

/// [VectorData] is passed between nodes.
/// It contains a list of subpaths (that may be open or closed), a transform, and some style information.
///
/// Segments are connected if they share end points.
#[derive(Clone, Debug, PartialEq, DynAny)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct VectorData {
Expand Down Expand Up @@ -65,6 +71,24 @@ impl core::hash::Hash for VectorData {
}
}

impl<'a> From<&'a VectorData> for Cow<'a, VectorData> {
fn from(value: &'a VectorData) -> Self {
Self::Borrowed(value)
}
}

impl<'a> From<&'a mut VectorData> for Cow<'a, VectorData> {
fn from(value: &'a mut VectorData) -> Self {
Self::Borrowed(value)
}
}

impl From<VectorData> for Cow<'static, VectorData> {
fn from(value: VectorData) -> Self {
Self::Owned(value)
}
}

impl VectorData {
/// An empty subpath with no data, an identity transform, and a black fill.
pub const fn empty() -> Self {
Expand Down
65 changes: 58 additions & 7 deletions node-graph/gcore/src/vector/vector_data/attributes.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
use crate::vector::vector_data::{HandleId, VectorData, VectorDataTable};
use crate::vector::ConcatElement;

use bezier_rs::BezierHandles;
use dyn_any::DynAny;

use core::iter::zip;
use glam::{DAffine2, DVec2};
use std::collections::HashMap;
use std::hash::{Hash, Hasher};
Expand Down Expand Up @@ -82,7 +84,7 @@ impl core::hash::BuildHasher for NoHashBuilder {
/// Stores data which is per-point. Each point is merely a position and can be used in a point cloud or to for a bézier path. In future this will be extendable at runtime with custom attributes.
pub struct PointDomain {
id: Vec<PointId>,
positions: Vec<DVec2>,
pub(crate) positions: Vec<DVec2>,
}

impl core::hash::Hash for PointDomain {
Expand Down Expand Up @@ -117,7 +119,8 @@ impl PointDomain {
id_map.push(new_index);
new_index += 1;
} else {
id_map.push(usize::MAX); // A placeholder for invalid ids. This is checked after the segment domain is modified.
// A placeholder for invalid ids. This is checked after the segment domain is modified.
id_map.push(usize::MAX);
}
}

Expand Down Expand Up @@ -149,6 +152,15 @@ impl PointDomain {
self.positions[index] = position;
}

pub fn set_position_by_id(&mut self, id: PointId, position: DVec2) {
let Some(idx) = self.resolve_id(id) else {
// If id not found do nothing.
debug_assert!(false, "tried to find Point ID that was not present");
return;
};
self.set_position(idx, position)
}

pub fn ids(&self) -> &[PointId] {
&self.id
}
Expand Down Expand Up @@ -184,6 +196,11 @@ impl PointDomain {
*pos = transform.transform_point2(*pos);
}
}

/// Iterate over point IDs and positions
pub fn iter(&self) -> impl Iterator<Item = (PointId, DVec2)> + '_ {
self.ids().iter().copied().zip(self.positions().iter().copied())
}
}

#[derive(Clone, Debug, Default, PartialEq, Hash, DynAny)]
Expand Down Expand Up @@ -359,6 +376,7 @@ impl SegmentDomain {
})
}

/// Get index from ID, `O(n)`
fn id_to_index(&self, id: SegmentId) -> Option<usize> {
debug_assert_eq!(self.ids.len(), self.handles.len());
debug_assert_eq!(self.ids.len(), self.start_point.len());
Expand Down Expand Up @@ -413,11 +431,35 @@ impl SegmentDomain {
pub(crate) fn connected_count(&self, point: usize) -> usize {
self.all_connected(point).count()
}

/// Iterate over segments in the domain.
///
/// tuple is: (id, start point, end point, handles)
pub(crate) fn iter(&self) -> impl Iterator<Item = (SegmentId, usize, usize, BezierHandles)> + '_ {
let ids = self.ids.iter().copied();
let start_point = self.start_point.iter().copied();
let end_point = self.end_point.iter().copied();
let handles = self.handles.iter().copied();
zip(ids, zip(start_point, zip(end_point, handles))).map(|(id, (start_point, (end_point, handles)))| (id, start_point, end_point, handles))
}

/// Iterate over segments in the domain.
///
/// tuple is: (id, start point, end point, handles)
pub(crate) fn iter_mut(&mut self) -> impl Iterator<Item = (&mut SegmentId, &mut usize, &mut usize, &mut BezierHandles)> + '_ {
let ids = self.ids.iter_mut();
let start_point = self.start_point.iter_mut();
let end_point = self.end_point.iter_mut();
let handles = self.handles.iter_mut();
zip(ids, zip(start_point, zip(end_point, handles))).map(|(id, (start_point, (end_point, handles)))| (id, start_point, end_point, handles))
}
}

#[derive(Clone, Debug, Default, PartialEq, Hash, DynAny)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
/// Stores data which is per-region. A region is an enclosed area composed of a range of segments from the [`SegmentDomain`] that can be given a fill. In future this will be extendable at runtime with custom attributes.
/// Stores data which is per-region. A region is an enclosed area composed of a range
/// of segments from the [`SegmentDomain`] that can be given a fill. In future this will
/// be extendable at runtime with custom attributes.
pub struct RegionDomain {
ids: Vec<RegionId>,
segment_range: Vec<core::ops::RangeInclusive<SegmentId>>,
Expand Down Expand Up @@ -457,10 +499,6 @@ impl RegionDomain {
self.fill.push(fill);
}

fn _resolve_id(&self, id: RegionId) -> Option<usize> {
self.ids.iter().position(|&check_id| check_id == id)
}

pub fn next_id(&self) -> RegionId {
self.ids.iter().copied().max_by(|a, b| a.0.cmp(&b.0)).map(|mut id| id.next_id()).unwrap_or(RegionId::ZERO)
}
Expand Down Expand Up @@ -504,6 +542,12 @@ impl RegionDomain {
}
}

pub struct Region {
pub id: RegionId,
pub segment_range: core::ops::RangeInclusive<SegmentId>,
pub fill: FillId,
}

impl VectorData {
/// Construct a [`bezier_rs::Bezier`] curve spanning from the resolved position of the start and end points with the specified handles.
fn segment_to_bezier_with_index(&self, start: usize, end: usize, handles: bezier_rs::BezierHandles) -> bezier_rs::Bezier {
Expand Down Expand Up @@ -653,6 +697,13 @@ impl VectorData {
self.segment_domain.map_ids(&id_map);
self.region_domain.map_ids(&id_map);
}

pub fn regions(&self) -> impl Iterator<Item = Region> + '_ {
let ids = self.region_domain.ids.iter().copied();
let segment_range = self.region_domain.segment_range.iter().cloned();
let fill = self.region_domain.fill.iter().copied();
zip(ids, zip(segment_range, fill)).map(|(id, (segment_range, fill))| Region { id, segment_range, fill })
}
}

#[derive(Clone, Copy, PartialEq, Eq, Debug, Default)]
Expand Down
Loading

0 comments on commit 5131957

Please sign in to comment.