Skip to content

Commit

Permalink
WIP TODO squash this commit
Browse files Browse the repository at this point in the history
  • Loading branch information
richard-uk1 committed Feb 19, 2025
1 parent 90a8036 commit 7cd11f3
Show file tree
Hide file tree
Showing 8 changed files with 403 additions and 14 deletions.
24 changes: 21 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 4 additions & 5 deletions node-graph/gcore/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,7 @@ std = [
"image",
"reflections",
]
reflections = [
"alloc",
"ctor",
]
reflections = ["alloc", "ctor"]
serde = [
"dep:serde",
"glam/serde",
Expand Down Expand Up @@ -67,7 +64,7 @@ rand_chacha = { workspace = true, optional = true }
bezier-rs = { workspace = true, optional = true }
kurbo = { workspace = true, optional = true }
base64 = { workspace = true, optional = true }
vello = { workspace = true, optional = true }
vello = { workspace = true, optional = true }
wgpu = { workspace = true, optional = true }
specta = { workspace = true, optional = true }
rustybuzz = { workspace = true, optional = true }
Expand All @@ -80,6 +77,8 @@ image = { workspace = true, optional = true, default-features = false, features
"png",
] }
math-parser = { path = "../../libraries/math-parser" }
rustc-hash = { workspace = true }
petgraph = "0.7.1"

[dev-dependencies]
# Workspace dependencies
Expand Down
100 changes: 100 additions & 0 deletions node-graph/gcore/src/vector/merge_by_distance.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
//! Collapse all nodes with all edges < distance
use core::mem;

use glam::DVec2;
use petgraph::graph::EdgeIndex;
use petgraph::graph::NodeIndex;
use petgraph::graph::UnGraph;
use petgraph::prelude::GraphMap;
use petgraph::prelude::UnGraphMap;
use rustc_hash::FxHashMap;
use rustc_hash::FxHashSet;

use super::PointId;
use super::SegmentId;
use super::VectorData;

use super::IndexedVectorData;

impl VectorData {
/// Collapse all nodes with all edges < distance
pub(crate) fn merge_by_distance(&mut self, distance: f64) {
// treat self as an undirected graph with point = node, and segment = edge
let mut data = IndexedVectorData::from_data(&*self);

// Graph that will contain only short edges. References data graph
let mut short_edges = UnGraphMap::new();
for seg_id in data.segments() {
let length = data.segment_chord_length(seg_id);
if length < distance {
let [start, end] = data.segment_ends(seg_id);
let start = data.point_graph.node_weight(start).unwrap().id;
let end = data.point_graph.node_weight(end).unwrap().id;

short_edges.add_node(start);
short_edges.add_node(end);
short_edges.add_edge(start, end, seg_id);
}
}

// Now group connected segments - all will be collapsed to a single point.
// Note: there are a few algos for this - perhaps test empirically to find fastest
let collapse: Vec<FxHashSet<PointId>> = petgraph::algo::tarjan_scc(&short_edges).into_iter().map(|connected| connected.into_iter().collect()).collect();
let average_position = collapse
.iter()
.map(|collapse_set| {
let sum: DVec2 = collapse_set.iter().map(|&id| data.point_position(id)).sum();
sum / collapse_set.len() as f64
})
.collect::<Vec<_>>();

// steal the point->offset mapping before we drop the indexes
let point_to_offset = mem::replace(&mut data.point_to_offset, Default::default());
drop(data);

// we collect all points up and delete them at the end, so that our indices aren't invalidated
let mut points_to_delete = FxHashSet::default();
let mut segments_to_delete = FxHashSet::default();
for (mut collapse_set, average_pos) in collapse.into_iter().zip(average_position.into_iter()) {
// remove any segments where both endpoints are in the collapse set
segments_to_delete.extend(self.segment_domain.iter().filter_map(|(id, start_offset, end_offset, _)| {
let start = self.point_domain.ids()[start_offset];
let end = self.point_domain.ids()[end_offset];
if collapse_set.contains(&start) && collapse_set.contains(&end) {
Some(id)
} else {
None
}
}));

// Delete all points but the first (arbitrary). Set that point's position to the
// average of the points, update segments to use replace all points with collapsed
// point.

// Unwrap: set created from connected algo will not be empty
let first_id = collapse_set.iter().copied().next().unwrap();
// `first_id` the point we will collapse to.
collapse_set.remove(&first_id);
let first_offset = point_to_offset[&first_id];
self.point_domain.positions[first_offset] = average_pos;

// look for segments with ends in collapse_set and replace them with the point we are collapsing to
for (_, start_offset, end_offset, ..) in self.segment_domain.iter_mut() {
let start_id = self.point_domain.ids()[*start_offset];
let end_id = self.point_domain.ids()[*end_offset];
if collapse_set.contains(&start_id) {
*start_offset = first_offset;
} else if collapse_set.contains(&end_id) {
*end_offset = first_offset;
}
}

points_to_delete.extend(collapse_set)
}
self.segment_domain.retain(|id| !segments_to_delete.contains(id), usize::MAX);
self.point_domain.retain(&mut self.segment_domain, |id| !points_to_delete.contains(id));

// TODO: don't forget about faces
}
}
2 changes: 2 additions & 0 deletions node-graph/gcore/src/vector/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,5 @@ mod vector_nodes;
pub use vector_nodes::*;

pub use bezier_rs;

mod merge_by_distance;
26 changes: 26 additions & 0 deletions node-graph/gcore/src/vector/vector_data.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
mod attributes;
mod indexed;
mod modification;

pub use attributes::*;
pub use indexed::IndexedVectorData;
pub use modification::*;

use rustc_hash::FxHashMap;

use super::style::{PathStyle, Stroke};
use crate::instances::Instances;
use crate::{AlphaBlending, Color, GraphicGroupTable};
Expand All @@ -12,6 +17,7 @@ use dyn_any::DynAny;

use core::borrow::Borrow;
use glam::{DAffine2, DVec2};
use std::borrow::Cow;

// TODO: Eventually remove this migration document upgrade code
pub fn migrate_vector_data<'de, D: serde::Deserializer<'de>>(deserializer: D) -> Result<VectorDataTable, D::Error> {
Expand All @@ -35,6 +41,8 @@ pub type VectorDataTable = Instances<VectorData>;

/// [VectorData] is passed between nodes.
/// It contains a list of subpaths (that may be open or closed), a transform, and some style information.
///
/// Segments are connected if they share end points.
#[derive(Clone, Debug, PartialEq, DynAny)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct VectorData {
Expand Down Expand Up @@ -65,6 +73,24 @@ impl core::hash::Hash for VectorData {
}
}

impl<'a> From<&'a VectorData> for Cow<'a, VectorData> {
fn from(value: &'a VectorData) -> Self {
Self::Borrowed(value)
}
}

impl<'a> From<&'a mut VectorData> for Cow<'a, VectorData> {
fn from(value: &'a mut VectorData) -> Self {
Self::Borrowed(value)
}
}

impl From<VectorData> for Cow<'static, VectorData> {
fn from(value: VectorData) -> Self {
Self::Owned(value)
}
}

impl VectorData {
/// An empty subpath with no data, an identity transform, and a black fill.
pub const fn empty() -> Self {
Expand Down
Loading

0 comments on commit 7cd11f3

Please sign in to comment.