Skip to content

Commit

Permalink
add merge_by_distance node
Browse files Browse the repository at this point in the history
  • Loading branch information
richard-uk1 committed Feb 21, 2025
1 parent 0004bbb commit 23a5e56
Show file tree
Hide file tree
Showing 8 changed files with 333 additions and 17 deletions.
24 changes: 21 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 4 additions & 5 deletions node-graph/gcore/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,7 @@ std = [
"image",
"reflections",
]
reflections = [
"alloc",
"ctor",
]
reflections = ["alloc", "ctor"]
serde = [
"dep:serde",
"glam/serde",
Expand Down Expand Up @@ -67,7 +64,7 @@ rand_chacha = { workspace = true, optional = true }
bezier-rs = { workspace = true, optional = true }
kurbo = { workspace = true, optional = true }
base64 = { workspace = true, optional = true }
vello = { workspace = true, optional = true }
vello = { workspace = true, optional = true }
wgpu = { workspace = true, optional = true }
specta = { workspace = true, optional = true }
rustybuzz = { workspace = true, optional = true }
Expand All @@ -80,6 +77,8 @@ image = { workspace = true, optional = true, default-features = false, features
"png",
] }
math-parser = { path = "../../libraries/math-parser" }
rustc-hash = { workspace = true }
petgraph = "0.7.1"

[dev-dependencies]
# Workspace dependencies
Expand Down
109 changes: 109 additions & 0 deletions node-graph/gcore/src/vector/merge_by_distance.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
//! Collapse all nodes with all edges < distance
use glam::DVec2;
use petgraph::prelude::UnGraphMap;
use rustc_hash::FxHashSet;

use super::PointId;
use super::VectorData;

use super::VectorDataIndex;

impl VectorData {
/// Collapse all nodes with all edges < distance
pub(crate) fn merge_by_distance(&mut self, distance: f64) {
// treat self as an undirected graph with point = node, and segment = edge
let indices = VectorDataIndex::build_from(self);

// Graph that will contain only short edges. References data graph
let mut short_edges = UnGraphMap::new();
for seg_id in self.segment_ids().iter().copied() {
let length = indices.segment_chord_length(seg_id);
if length < distance {
let [start, end] = indices.segment_ends(seg_id);
let start = indices.point_graph.node_weight(start).unwrap().id;
let end = indices.point_graph.node_weight(end).unwrap().id;

short_edges.add_node(start);
short_edges.add_node(end);
short_edges.add_edge(start, end, seg_id);
}
}

// Now group connected segments - all will be collapsed to a single point.
// Note: there are a few algos for this - perhaps test empirically to find fastest
let collapse: Vec<FxHashSet<PointId>> = petgraph::algo::tarjan_scc(&short_edges).into_iter().map(|connected| connected.into_iter().collect()).collect();
let average_position = collapse
.iter()
.map(|collapse_set| {
let sum: DVec2 = collapse_set.iter().map(|&id| indices.point_position(id, self)).sum();
sum / collapse_set.len() as f64
})
.collect::<Vec<_>>();

// we collect all points up and delete them at the end, so that our indices aren't invalidated
let mut points_to_delete = FxHashSet::default();
let mut segments_to_delete = FxHashSet::default();
for (mut collapse_set, average_pos) in collapse.into_iter().zip(average_position.into_iter()) {
// remove any segments where both endpoints are in the collapse set
segments_to_delete.extend(self.segment_domain.iter().filter_map(|(id, start_offset, end_offset, _)| {
let start = self.point_domain.ids()[start_offset];
let end = self.point_domain.ids()[end_offset];
if collapse_set.contains(&start) && collapse_set.contains(&end) {
Some(id)
} else {
None
}
}));

// Delete all points but the first (arbitrary). Set that point's position to the
// average of the points, update segments to use replace all points with collapsed
// point.

// Unwrap: set created from connected algo will not be empty
let first_id = collapse_set.iter().copied().next().unwrap();
// `first_id` the point we will collapse to.
collapse_set.remove(&first_id);
let first_offset = indices.point_to_offset[&first_id];

// look for segments with ends in collapse_set and replace them with the point we are collapsing to
for (_, start_offset, end_offset, handles) in self.segment_domain.iter_mut() {
let start_id = self.point_domain.ids()[*start_offset];
let end_id = self.point_domain.ids()[*end_offset];

// moved points (only need to update Bezier handles)
if start_id == first_id {
let point_position = self.point_domain.positions[*start_offset];
handles.move_start(average_pos - point_position);
}
if end_id == first_id {
let point_position = self.point_domain.positions[*end_offset];
handles.move_end(average_pos - point_position);
}

// removed points
if collapse_set.contains(&start_id) {
let point_position = self.point_domain.positions[*start_offset];
*start_offset = first_offset;
handles.move_start(average_pos - point_position);
}
if collapse_set.contains(&end_id) {
let point_position = self.point_domain.positions[*end_offset];
*end_offset = first_offset;
handles.move_end(average_pos - point_position);
}
}
// This must come after iterating segments, so segments involving the point at
// `first_offset` have their handles updated correctly.
self.point_domain.positions[first_offset] = average_pos;

points_to_delete.extend(collapse_set)
}
self.segment_domain.retain(|id| !segments_to_delete.contains(id), usize::MAX);
self.point_domain.retain(&mut self.segment_domain, |id| !points_to_delete.contains(id));

log::debug!("{:?}", self.segment_domain.handles());

// TODO: don't forget about faces
}
}
2 changes: 2 additions & 0 deletions node-graph/gcore/src/vector/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,5 @@ mod vector_nodes;
pub use vector_nodes::*;

pub use bezier_rs;

mod merge_by_distance;
31 changes: 31 additions & 0 deletions node-graph/gcore/src/vector/vector_data.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
mod attributes;
mod indexed;
mod modification;

pub use attributes::*;
pub use indexed::VectorDataIndex;
pub use modification::*;

use super::style::{PathStyle, Stroke};
Expand All @@ -12,6 +15,7 @@ use dyn_any::DynAny;

use core::borrow::Borrow;
use glam::{DAffine2, DVec2};
use std::borrow::Cow;

// TODO: Eventually remove this migration document upgrade code
pub fn migrate_vector_data<'de, D: serde::Deserializer<'de>>(deserializer: D) -> Result<VectorDataTable, D::Error> {
Expand All @@ -35,6 +39,8 @@ pub type VectorDataTable = Instances<VectorData>;

/// [VectorData] is passed between nodes.
/// It contains a list of subpaths (that may be open or closed), a transform, and some style information.
///
/// Segments are connected if they share end points.
#[derive(Clone, Debug, PartialEq, DynAny)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct VectorData {
Expand Down Expand Up @@ -65,6 +71,24 @@ impl core::hash::Hash for VectorData {
}
}

impl<'a> From<&'a VectorData> for Cow<'a, VectorData> {
fn from(value: &'a VectorData) -> Self {
Self::Borrowed(value)
}
}

impl<'a> From<&'a mut VectorData> for Cow<'a, VectorData> {
fn from(value: &'a mut VectorData) -> Self {
Self::Borrowed(value)
}
}

impl From<VectorData> for Cow<'static, VectorData> {
fn from(value: VectorData) -> Self {
Self::Owned(value)
}
}

impl VectorData {
/// An empty subpath with no data, an identity transform, and a black fill.
pub const fn empty() -> Self {
Expand Down Expand Up @@ -241,6 +265,13 @@ impl VectorData {
index.flat_map(|index| self.segment_domain.connected_points(index).map(|index| self.point_domain.ids()[index]))
}

/// A slice all segment IDs
///
/// Convenience function
pub fn segment_ids(&self) -> &[SegmentId] {
self.segment_domain.ids()
}

/// Enumerate all segments that start at the point.
pub fn start_connected(&self, point: PointId) -> impl Iterator<Item = SegmentId> + '_ {
let index = [self.point_domain.resolve_id(point)].into_iter().flatten();
Expand Down
43 changes: 36 additions & 7 deletions node-graph/gcore/src/vector/vector_data/attributes.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
use crate::vector::vector_data::{HandleId, VectorData, VectorDataTable};
use crate::vector::ConcatElement;

use bezier_rs::BezierHandles;
use dyn_any::DynAny;

use core::iter::zip;
use glam::{DAffine2, DVec2};
use std::collections::HashMap;
use std::hash::{Hash, Hasher};
Expand Down Expand Up @@ -82,7 +84,7 @@ impl core::hash::BuildHasher for NoHashBuilder {
/// Stores data which is per-point. Each point is merely a position and can be used in a point cloud or to for a bézier path. In future this will be extendable at runtime with custom attributes.
pub struct PointDomain {
id: Vec<PointId>,
positions: Vec<DVec2>,
pub(crate) positions: Vec<DVec2>,
}

impl core::hash::Hash for PointDomain {
Expand Down Expand Up @@ -117,7 +119,8 @@ impl PointDomain {
id_map.push(new_index);
new_index += 1;
} else {
id_map.push(usize::MAX); // A placeholder for invalid ids. This is checked after the segment domain is modified.
// A placeholder for invalid ids. This is checked after the segment domain is modified.
id_map.push(usize::MAX);
}
}

Expand Down Expand Up @@ -184,6 +187,11 @@ impl PointDomain {
*pos = transform.transform_point2(*pos);
}
}

/// Iterate over point IDs and positions
pub fn iter(&self) -> impl Iterator<Item = (PointId, DVec2)> + '_ {
self.ids().iter().copied().zip(self.positions().iter().copied())
}
}

#[derive(Clone, Debug, Default, PartialEq, Hash, DynAny)]
Expand Down Expand Up @@ -359,6 +367,7 @@ impl SegmentDomain {
})
}

/// Get index from ID, `O(n)`
fn id_to_index(&self, id: SegmentId) -> Option<usize> {
debug_assert_eq!(self.ids.len(), self.handles.len());
debug_assert_eq!(self.ids.len(), self.start_point.len());
Expand Down Expand Up @@ -413,11 +422,35 @@ impl SegmentDomain {
pub(crate) fn connected_count(&self, point: usize) -> usize {
self.all_connected(point).count()
}

/// Iterate over segments in the domain.
///
/// tuple is: (id, start point, end point, handles)
pub(crate) fn iter(&self) -> impl Iterator<Item = (SegmentId, usize, usize, BezierHandles)> + '_ {
let ids = self.ids.iter().copied();
let start_point = self.start_point.iter().copied();
let end_point = self.end_point.iter().copied();
let handles = self.handles.iter().copied();
zip(ids, zip(start_point, zip(end_point, handles))).map(|(id, (start_point, (end_point, handles)))| (id, start_point, end_point, handles))
}

/// Iterate over segments in the domain.
///
/// tuple is: (id, start point, end point, handles)
pub(crate) fn iter_mut(&mut self) -> impl Iterator<Item = (&mut SegmentId, &mut usize, &mut usize, &mut BezierHandles)> + '_ {
let ids = self.ids.iter_mut();
let start_point = self.start_point.iter_mut();
let end_point = self.end_point.iter_mut();
let handles = self.handles.iter_mut();
zip(ids, zip(start_point, zip(end_point, handles))).map(|(id, (start_point, (end_point, handles)))| (id, start_point, end_point, handles))
}
}

#[derive(Clone, Debug, Default, PartialEq, Hash, DynAny)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
/// Stores data which is per-region. A region is an enclosed area composed of a range of segments from the [`SegmentDomain`] that can be given a fill. In future this will be extendable at runtime with custom attributes.
/// Stores data which is per-region. A region is an enclosed area composed of a range
/// of segments from the [`SegmentDomain`] that can be given a fill. In future this will
/// be extendable at runtime with custom attributes.
pub struct RegionDomain {
ids: Vec<RegionId>,
segment_range: Vec<core::ops::RangeInclusive<SegmentId>>,
Expand Down Expand Up @@ -457,10 +490,6 @@ impl RegionDomain {
self.fill.push(fill);
}

fn _resolve_id(&self, id: RegionId) -> Option<usize> {
self.ids.iter().position(|&check_id| check_id == id)
}

pub fn next_id(&self) -> RegionId {
self.ids.iter().copied().max_by(|a, b| a.0.cmp(&b.0)).map(|mut id| id.next_id()).unwrap_or(RegionId::ZERO)
}
Expand Down
Loading

0 comments on commit 23a5e56

Please sign in to comment.