Skip to content

Pre-processing

Mesh preparation utilities before slicing.

pre_processing

Pre-processing utilities for mesh preparation before slicing.

GradientEvaluation

GradientEvaluation(mesh, DATA_PATH)

Evaluation of the gradient of the scalar function of the mesh. The scalar function should be stored as a vertex attribute on every vertex, with key='scalar_field'

Attributes:

Name Type Description
mesh :class: 'compas.datastructures.Mesh'
DATA_PATH str, path to the data folder
Source code in src/compas_slicer/pre_processing/gradient_evaluation.py
def __init__(self, mesh: Mesh, DATA_PATH: str | FilePath) -> None:
    for v_key, data in mesh.vertices(data=True):
        if 'scalar_field' not in data:
            raise ValueError(f"Vertex {v_key} does not have the attribute 'scalar_field'")

    logger.info('Gradient evaluation')
    self.mesh = mesh
    self.DATA_PATH = DATA_PATH
    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)

    self.minima: list[int] = []
    self.maxima: list[int] = []
    self.saddles: list[int] = []

    self.face_gradient: NDArray[np.floating] | list = []  # np.array (#F x 3) one gradient vector per face.
    self.vertex_gradient: NDArray[np.floating] | list = []  # np.array (#V x 3) one gradient vector per vertex.
    self.face_gradient_norm: list[float] = []  # list (#F x 1)
    self.vertex_gradient_norm: list[float] = []  # list (#V x 1)

compute_gradient

compute_gradient()

Computes the gradient on the faces and the vertices.

Source code in src/compas_slicer/pre_processing/gradient_evaluation.py
def compute_gradient(self) -> None:
    """ Computes the gradient on the faces and the vertices. """
    u_v = [self.mesh.vertex[vkey]['scalar_field'] for vkey in self.mesh.vertices()]
    self.face_gradient = get_face_gradient_from_scalar_field(self.mesh, u_v)
    self.vertex_gradient = get_vertex_gradient_from_face_gradient(self.mesh, self.face_gradient)

compute_gradient_norm

compute_gradient_norm()

Computes the norm of the gradient.

Source code in src/compas_slicer/pre_processing/gradient_evaluation.py
def compute_gradient_norm(self) -> None:
    """ Computes the norm of the gradient. """
    logger.info('Computing norm of gradient')
    f_g = np.array([self.face_gradient[i] for i, fkey in enumerate(self.mesh.faces())])
    v_g = np.array([self.vertex_gradient[i] for i, vkey in enumerate(self.mesh.vertices())])
    self.face_gradient_norm = list(np.linalg.norm(f_g, axis=1))
    self.vertex_gradient_norm = list(np.linalg.norm(v_g, axis=1))

find_critical_points

find_critical_points()

Finds minima, maxima and saddle points of the scalar function on the mesh.

Source code in src/compas_slicer/pre_processing/gradient_evaluation.py
def find_critical_points(self) -> None:
    """ Finds minima, maxima and saddle points of the scalar function on the mesh. """
    for vkey, data in self.mesh.vertices(data=True):
        current_v = data['scalar_field']
        neighbors = self.mesh.vertex_neighbors(vkey, ordered=True)
        values = []
        if len(neighbors) > 0:
            neighbors.append(neighbors[0])
            for n in neighbors:
                v = self.mesh.vertex_attributes(n)['scalar_field']
                if abs(v - current_v) > 0.0:
                    values.append(current_v - v)
            sgc = count_sign_changes(values)

            if sgc == 0:  # extreme point
                if current_v > self.mesh.vertex_attributes(neighbors[0])['scalar_field']:
                    self.maxima.append(vkey)
                else:
                    self.minima.append(vkey)
            if sgc == 2:  # regular point
                pass
            if sgc > 2 and sgc % 2 == 0:
                self.saddles.append(vkey)

InterpolationSlicingPreprocessor

InterpolationSlicingPreprocessor(mesh, config=None, DATA_PATH='.')

Handles pre-processing for interpolation slicing.

Attributes:

Name Type Description
mesh Mesh

Input mesh.

config InterpolationConfig

Interpolation configuration.

DATA_PATH str | Path

Path to the data folder.

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def __init__(
    self, mesh: Mesh, config: InterpolationConfig | None = None, DATA_PATH: str | Path = "."
) -> None:
    self.mesh = mesh
    self.config = config if config else InterpolationConfig()
    self.DATA_PATH = DATA_PATH

    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
    self.target_LOW: CompoundTarget | None = None
    self.target_HIGH: CompoundTarget | None = None

    self.split_meshes: list[Mesh] = []
    # The meshes that result from the region splitting process.

    utils.utils.check_triangular_mesh(mesh)

create_compound_targets

create_compound_targets()

Create target_LOW and target_HIGH and compute geodesic distances.

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def create_compound_targets(self) -> None:
    """Create target_LOW and target_HIGH and compute geodesic distances."""

    # --- low target
    geodesics_method = self.config.target_low_geodesics_method.value
    method = 'min'  # no other union methods currently supported for lower target
    params: list[float] = []
    self.target_LOW = CompoundTarget(self.mesh, 'boundary', 1, self.DATA_PATH,
                                     union_method=method,
                                     union_params=params,
                                     geodesics_method=geodesics_method)

    # --- high target
    geodesics_method = self.config.target_high_geodesics_method.value
    method = self.config.target_high_union_method.value
    params = self.config.target_high_union_params
    logger.info(f"Creating target with union type: {method} and params: {params}")
    self.target_HIGH = CompoundTarget(self.mesh, 'boundary', 2, self.DATA_PATH,
                                      union_method=method,
                                      union_params=params,
                                      geodesics_method=geodesics_method)

    # --- uneven boundaries of high target
    self.target_HIGH.offset = self.config.uneven_upper_targets_offset
    self.target_HIGH.compute_uneven_boundaries_weight_max(self.target_LOW)

    #  --- save intermediary get_distance outputs
    self.target_LOW.save_distances("distances_LOW.json")
    self.target_HIGH.save_distances("distances_HIGH.json")

targets_laplacian_smoothing

targets_laplacian_smoothing(iterations, strength)

Smooth geodesic distances of targets. Saves again the distances to json.

Parameters:

Name Type Description Default
iterations int
required
strength float
required
Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def targets_laplacian_smoothing(self, iterations: int, strength: float) -> None:
    """
    Smooth geodesic distances of targets. Saves again the distances to json.

    Parameters
    ----------
    iterations: int
    strength: float
    """
    if self.target_LOW is None or self.target_HIGH is None:
        raise RuntimeError("Targets not initialized. Call create_compound_targets() first.")
    self.target_LOW.laplacian_smoothing(iterations=iterations, strength=strength)
    self.target_HIGH.laplacian_smoothing(iterations=iterations, strength=strength)
    self.target_LOW.save_distances("distances_LOW.json")
    self.target_HIGH.save_distances("distances_HIGH.json")

create_gradient_evaluation

create_gradient_evaluation(target_1, target_2=None, save_output=True, norm_filename='gradient_norm.json', g_filename='gradient.json')

Creates a compas_slicer.pre_processing.GradientEvaluation that is stored in self.g_evaluation Also, computes the gradient and gradient_norm and saves them to Json .

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def create_gradient_evaluation(
    self,
    target_1: CompoundTarget,
    target_2: CompoundTarget | None = None,
    save_output: bool = True,
    norm_filename: str = 'gradient_norm.json',
    g_filename: str = 'gradient.json',
) -> GradientEvaluation:
    """
    Creates a compas_slicer.pre_processing.GradientEvaluation that is stored in self.g_evaluation
    Also, computes the gradient and gradient_norm and saves them to Json .
    """
    if self.target_LOW is None or self.target_HIGH is None:
        raise RuntimeError("Targets not initialized. Call create_compound_targets() first.")
    if self.target_LOW.VN != target_1.VN:
        raise ValueError("Preprocessor does not match targets: vertex count mismatch.")
    assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=0.5,
                                                   target_LOW=self.target_LOW, target_HIGH=self.target_HIGH)
    g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH)
    g_evaluation.compute_gradient()
    g_evaluation.compute_gradient_norm()

    if save_output:
        # save results to json
        utils.save_to_json(g_evaluation.vertex_gradient_norm, self.OUTPUT_PATH, norm_filename)
        utils.save_to_json(utils.point_list_to_dict(g_evaluation.vertex_gradient), self.OUTPUT_PATH, g_filename)

    return g_evaluation

find_critical_points

find_critical_points(g_evaluation, output_filenames)

Computes and saves to json the critical points of the df on the mesh (minima, maxima, saddles)

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def find_critical_points(
    self, g_evaluation: GradientEvaluation, output_filenames: tuple[str, str, str]
) -> None:
    """ Computes and saves to json the critical points of the df on the mesh (minima, maxima, saddles)"""
    g_evaluation.find_critical_points()
    # save results to json
    utils.save_to_json(g_evaluation.minima, self.OUTPUT_PATH, output_filenames[0])
    utils.save_to_json(g_evaluation.maxima, self.OUTPUT_PATH, output_filenames[1])
    utils.save_to_json(g_evaluation.saddles, self.OUTPUT_PATH, output_filenames[2])

region_split

region_split(cut_mesh=True, separate_neighborhoods=True, topological_sorting=True, save_split_meshes=True)

Splits the mesh on the saddle points. This process can take a long time. It consists of four parts: 1) Create cuts on the mesh so that they intersect the saddle points and follow the get_distance function iso-contour 2) Separate mesh neighborhoods from cuts 3) Topological sorting of split meshes to determine their connectivity and sequence. 4) Finally resulting meshes are saved to json.

The intermediary outputs are saved to json, so if you don'weight want to be recomputing the entire thing every time, you can turn the respective processes to false.

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def region_split(
    self,
    cut_mesh: bool = True,
    separate_neighborhoods: bool = True,
    topological_sorting: bool = True,
    save_split_meshes: bool = True,
) -> None:
    """
    Splits the mesh on the saddle points. This process can take a long time.
    It consists of four parts:
    1) Create cuts on the mesh so that they intersect the saddle points and follow the get_distance function
    iso-contour
    2) Separate mesh neighborhoods  from cuts
    3) Topological sorting of split meshes to determine their connectivity and sequence.
    4) Finally resulting meshes are saved to json.

    The intermediary outputs are saved to json, so if you don'weight want to be recomputing the entire thing every
    time, you can turn the respective processes to false.
    """

    logger.info("--- Mesh region splitting")

    if cut_mesh:  # (1)
        self.mesh.update_default_vertex_attributes({'cut': 0})
        mesh_splitter = rs.MeshSplitter(self.mesh, self.target_LOW, self.target_HIGH, self.DATA_PATH)
        mesh_splitter.run()

        self.mesh = mesh_splitter.mesh
        logger.info('Completed Region splitting')
        logger.info(f"Region split cut indices: {mesh_splitter.cut_indices}")
        # save results to json
        output_path = Path(self.OUTPUT_PATH)
        self.mesh.to_obj(str(output_path / 'mesh_with_cuts.obj'))
        self.mesh.to_json(str(output_path / 'mesh_with_cuts.json'))
        logger.info(f"Saving to Obj and Json: {output_path / 'mesh_with_cuts.json'}")

    if separate_neighborhoods:  # (2)
        logger.info("--- Separating mesh disconnected components")
        self.mesh = Mesh.from_json(str(Path(self.OUTPUT_PATH) / 'mesh_with_cuts.json'))
        region_split_cut_indices = get_existing_cut_indices(self.mesh)

        # save results to json
        utils.save_to_json(get_vertices_that_belong_to_cuts(self.mesh, region_split_cut_indices),
                           self.OUTPUT_PATH, "vertices_on_cuts.json")

        self.split_meshes = rs.separate_disconnected_components(self.mesh, attr='cut',
                                                                values=region_split_cut_indices,
                                                                OUTPUT_PATH=self.OUTPUT_PATH)
        logger.info(f'Created {len(self.split_meshes)} split meshes.')

    if topological_sorting:  # (3)
        logger.info("--- Topological sort of meshes directed graph to determine print order")
        graph = topo_sort.MeshDirectedGraph(self.split_meshes, self.DATA_PATH)
        all_orders = graph.get_all_topological_orders()
        selected_order = all_orders[0]
        logger.info(f'selected_order: {selected_order}')  # TODO: improve the way an order is selected
        self.cleanup_mesh_attributes_based_on_selected_order(selected_order, graph)

        # reorder split_meshes based on selected order
        self.split_meshes = [self.split_meshes[i] for i in selected_order]

    # --- save split meshes
    if save_split_meshes:  # (4)
        logger.info("--- Saving resulting split meshes")
        output_path = Path(self.OUTPUT_PATH)
        for i, m in enumerate(self.split_meshes):
            m.to_obj(str(output_path / f'split_mesh_{i}.obj'))
            m.to_json(str(output_path / f'split_mesh_{i}.json'))
        logger.info(f'Saving to Obj and Json: {output_path / "split_mesh_%.obj"}')
        logger.info(f"Saved {len(self.split_meshes)} split_meshes")

cleanup_mesh_attributes_based_on_selected_order

cleanup_mesh_attributes_based_on_selected_order(selected_order, graph)

Based on the selected order of split meshes, it rearranges their attributes, so that they can then be used with an interpolation slicer that requires data['boundary'] to be filled for every vertex. The vertices that originated from cuts have data['cut']=cut_index. This is replaced by data['boundary'] = 1 or 2 depending on connectivity of mesh.

Parameters:

Name Type Description Default
selected_order list[int]

The indices of ordered split meshes.

required
graph MeshDirectedGraph
required
Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def cleanup_mesh_attributes_based_on_selected_order(
    self, selected_order: list[int], graph: MeshDirectedGraph
) -> None:
    """
    Based on the selected order of split meshes, it rearranges their attributes, so that they can then be used
    with an interpolation slicer that requires data['boundary'] to be filled for every vertex.
    The vertices that originated from cuts have data['cut']=cut_index. This is replaced
    by data['boundary'] = 1 or 2 depending on connectivity of mesh.

    Parameters
    ----------
    selected_order: list, int
        The indices of ordered split meshes.
    graph: :class: 'networkx.Graph'
    """
    for index in selected_order:
        mesh = self.split_meshes[index]
        for child_node in graph.adj_list[index]:
            child_mesh = self.split_meshes[child_node]
            edge = graph.G.edges[index, child_node]
            common_cuts = edge['cut']
            for cut_id in common_cuts:
                replace_mesh_vertex_attribute(mesh, 'cut', cut_id, 'boundary', 2)
                replace_mesh_vertex_attribute(child_mesh, 'cut', cut_id, 'boundary', 1)

        # save results to json
        pts_boundary_LOW = utils.get_mesh_vertex_coords_with_attribute(mesh, 'boundary', 1)
        pts_boundary_HIGH = utils.get_mesh_vertex_coords_with_attribute(mesh, 'boundary', 2)
        utils.save_to_json(utils.point_list_to_dict(pts_boundary_LOW), self.OUTPUT_PATH,
                           f'pts_boundary_LOW_{index}.json')
        utils.save_to_json(utils.point_list_to_dict(pts_boundary_HIGH), self.OUTPUT_PATH,
                           f'pts_boundary_HIGH_{index}.json')

move_mesh_to_point

move_mesh_to_point(mesh, target_point)

Moves (translates) a mesh to a target point.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh.

required
target_point Point

The point to move the mesh to.

required
Source code in src/compas_slicer/pre_processing/positioning.py
def move_mesh_to_point(mesh: Mesh, target_point: Point) -> Mesh:
    """Moves (translates) a mesh to a target point.

    Parameters
    ----------
    mesh: :class:`compas.datastructures.Mesh`
        A compas mesh.
    target_point: :class:`compas.geometry.Point`
        The point to move the mesh to.
    """
    mesh_center_pt = get_mid_pt_base(mesh)

    # transform mesh
    mesh_frame = Frame(mesh_center_pt, (1, 0, 0), (0, 1, 0))
    target_frame = Frame(target_point, (1, 0, 0), (0, 1, 0))

    T = Transformation.from_frame_to_frame(mesh_frame, target_frame)
    mesh.transform(T)

    logger.info(f"Mesh moved to: {target_point}")

    return mesh

get_mid_pt_base

get_mid_pt_base(mesh)

Gets the middle point of the base (bottom) of the mesh.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh.

required

Returns:

Name Type Description
mesh_mid_pt :class:`compas.geometry.Point`

Middle point of the base of the mesh.

Source code in src/compas_slicer/pre_processing/positioning.py
def get_mid_pt_base(mesh: Mesh) -> Point:
    """Gets the middle point of the base (bottom) of the mesh.

    Parameters
    ----------
    mesh: :class:`compas.datastructures.Mesh`
        A compas mesh.

    Returns
    -------
    mesh_mid_pt: :class:`compas.geometry.Point`
        Middle point of the base of the mesh.

    """
    # get center bottom point of mesh model
    vertices = list(mesh.vertices_attributes('xyz'))
    bbox = bounding_box(vertices)
    corner_pts = [bbox[0], bbox[2]]

    x = [p[0] for p in corner_pts]
    y = [p[1] for p in corner_pts]
    z = [p[2] for p in corner_pts]

    mesh_mid_pt = Point((sum(x) / 2), (sum(y) / 2), (sum(z) / 2))

    return mesh_mid_pt

remesh_mesh

remesh_mesh(mesh, target_edge_length, number_of_iterations=10, do_project=True)

Remesh a triangle mesh to achieve uniform edge lengths.

Uses CGAL's isotropic remeshing to improve mesh quality for slicing. This can help with curved slicing and geodesic computations.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh (must be triangulated).

required
target_edge_length float

Target edge length for the remeshed output.

required
number_of_iterations int

Number of remeshing iterations (default: 10).

10
do_project bool

Reproject vertices onto original surface (default: True).

True

Returns:

Type Description
Mesh

Remeshed compas mesh.

Raises:

Type Description
ImportError

If compas_cgal is not available.

Examples:

>>> from compas.datastructures import Mesh
>>> from compas_slicer.pre_processing import remesh_mesh
>>> mesh = Mesh.from_stl('model.stl')
>>> remeshed = remesh_mesh(mesh, target_edge_length=2.0)
Source code in src/compas_slicer/pre_processing/positioning.py
def remesh_mesh(
    mesh: Mesh,
    target_edge_length: float,
    number_of_iterations: int = 10,
    do_project: bool = True
) -> Mesh:
    """Remesh a triangle mesh to achieve uniform edge lengths.

    Uses CGAL's isotropic remeshing to improve mesh quality for slicing.
    This can help with curved slicing and geodesic computations.

    Parameters
    ----------
    mesh : Mesh
        A compas mesh (must be triangulated).
    target_edge_length : float
        Target edge length for the remeshed output.
    number_of_iterations : int
        Number of remeshing iterations (default: 10).
    do_project : bool
        Reproject vertices onto original surface (default: True).

    Returns
    -------
    Mesh
        Remeshed compas mesh.

    Raises
    ------
    ImportError
        If compas_cgal is not available.

    Examples
    --------
    >>> from compas.datastructures import Mesh
    >>> from compas_slicer.pre_processing import remesh_mesh
    >>> mesh = Mesh.from_stl('model.stl')
    >>> remeshed = remesh_mesh(mesh, target_edge_length=2.0)
    """
    try:
        from compas_cgal.meshing import trimesh_remesh
    except ImportError as e:
        raise ImportError(
            "remesh_mesh requires compas_cgal. Install with: pip install compas_cgal"
        ) from e

    from compas.datastructures import Mesh as CompasMesh

    M = mesh.to_vertices_and_faces()
    V, F = trimesh_remesh(M, target_edge_length, number_of_iterations, do_project)

    result = CompasMesh.from_vertices_and_faces(V.tolist(), F.tolist())

    logger.info(
        f"Remeshed: {mesh.number_of_vertices()} -> {result.number_of_vertices()} vertices, "
        f"target edge length: {target_edge_length}"
    )

    return result

gradient_evaluation

GradientEvaluation

GradientEvaluation(mesh, DATA_PATH)

Evaluation of the gradient of the scalar function of the mesh. The scalar function should be stored as a vertex attribute on every vertex, with key='scalar_field'

Attributes:

Name Type Description
mesh :class: 'compas.datastructures.Mesh'
DATA_PATH str, path to the data folder
Source code in src/compas_slicer/pre_processing/gradient_evaluation.py
def __init__(self, mesh: Mesh, DATA_PATH: str | FilePath) -> None:
    for v_key, data in mesh.vertices(data=True):
        if 'scalar_field' not in data:
            raise ValueError(f"Vertex {v_key} does not have the attribute 'scalar_field'")

    logger.info('Gradient evaluation')
    self.mesh = mesh
    self.DATA_PATH = DATA_PATH
    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)

    self.minima: list[int] = []
    self.maxima: list[int] = []
    self.saddles: list[int] = []

    self.face_gradient: NDArray[np.floating] | list = []  # np.array (#F x 3) one gradient vector per face.
    self.vertex_gradient: NDArray[np.floating] | list = []  # np.array (#V x 3) one gradient vector per vertex.
    self.face_gradient_norm: list[float] = []  # list (#F x 1)
    self.vertex_gradient_norm: list[float] = []  # list (#V x 1)
compute_gradient
compute_gradient()

Computes the gradient on the faces and the vertices.

Source code in src/compas_slicer/pre_processing/gradient_evaluation.py
def compute_gradient(self) -> None:
    """ Computes the gradient on the faces and the vertices. """
    u_v = [self.mesh.vertex[vkey]['scalar_field'] for vkey in self.mesh.vertices()]
    self.face_gradient = get_face_gradient_from_scalar_field(self.mesh, u_v)
    self.vertex_gradient = get_vertex_gradient_from_face_gradient(self.mesh, self.face_gradient)
compute_gradient_norm
compute_gradient_norm()

Computes the norm of the gradient.

Source code in src/compas_slicer/pre_processing/gradient_evaluation.py
def compute_gradient_norm(self) -> None:
    """ Computes the norm of the gradient. """
    logger.info('Computing norm of gradient')
    f_g = np.array([self.face_gradient[i] for i, fkey in enumerate(self.mesh.faces())])
    v_g = np.array([self.vertex_gradient[i] for i, vkey in enumerate(self.mesh.vertices())])
    self.face_gradient_norm = list(np.linalg.norm(f_g, axis=1))
    self.vertex_gradient_norm = list(np.linalg.norm(v_g, axis=1))
find_critical_points
find_critical_points()

Finds minima, maxima and saddle points of the scalar function on the mesh.

Source code in src/compas_slicer/pre_processing/gradient_evaluation.py
def find_critical_points(self) -> None:
    """ Finds minima, maxima and saddle points of the scalar function on the mesh. """
    for vkey, data in self.mesh.vertices(data=True):
        current_v = data['scalar_field']
        neighbors = self.mesh.vertex_neighbors(vkey, ordered=True)
        values = []
        if len(neighbors) > 0:
            neighbors.append(neighbors[0])
            for n in neighbors:
                v = self.mesh.vertex_attributes(n)['scalar_field']
                if abs(v - current_v) > 0.0:
                    values.append(current_v - v)
            sgc = count_sign_changes(values)

            if sgc == 0:  # extreme point
                if current_v > self.mesh.vertex_attributes(neighbors[0])['scalar_field']:
                    self.maxima.append(vkey)
                else:
                    self.minima.append(vkey)
            if sgc == 2:  # regular point
                pass
            if sgc > 2 and sgc % 2 == 0:
                self.saddles.append(vkey)

count_sign_changes

count_sign_changes(values)

Returns the number of sign changes in a list of values.

Source code in src/compas_slicer/pre_processing/gradient_evaluation.py
def count_sign_changes(values: list[float]) -> int:
    """ Returns the number of sign changes in a list of values. """
    count = 0
    prev_v: float = 0.0
    for i, v in enumerate(values):
        if i == 0:
            prev_v = v
        else:
            if prev_v * v < 0:
                count += 1
            prev_v = v
    return count

interpolation_slicing_preprocessor

InterpolationSlicingPreprocessor

InterpolationSlicingPreprocessor(mesh, config=None, DATA_PATH='.')

Handles pre-processing for interpolation slicing.

Attributes:

Name Type Description
mesh Mesh

Input mesh.

config InterpolationConfig

Interpolation configuration.

DATA_PATH str | Path

Path to the data folder.

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def __init__(
    self, mesh: Mesh, config: InterpolationConfig | None = None, DATA_PATH: str | Path = "."
) -> None:
    self.mesh = mesh
    self.config = config if config else InterpolationConfig()
    self.DATA_PATH = DATA_PATH

    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
    self.target_LOW: CompoundTarget | None = None
    self.target_HIGH: CompoundTarget | None = None

    self.split_meshes: list[Mesh] = []
    # The meshes that result from the region splitting process.

    utils.utils.check_triangular_mesh(mesh)
create_compound_targets
create_compound_targets()

Create target_LOW and target_HIGH and compute geodesic distances.

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def create_compound_targets(self) -> None:
    """Create target_LOW and target_HIGH and compute geodesic distances."""

    # --- low target
    geodesics_method = self.config.target_low_geodesics_method.value
    method = 'min'  # no other union methods currently supported for lower target
    params: list[float] = []
    self.target_LOW = CompoundTarget(self.mesh, 'boundary', 1, self.DATA_PATH,
                                     union_method=method,
                                     union_params=params,
                                     geodesics_method=geodesics_method)

    # --- high target
    geodesics_method = self.config.target_high_geodesics_method.value
    method = self.config.target_high_union_method.value
    params = self.config.target_high_union_params
    logger.info(f"Creating target with union type: {method} and params: {params}")
    self.target_HIGH = CompoundTarget(self.mesh, 'boundary', 2, self.DATA_PATH,
                                      union_method=method,
                                      union_params=params,
                                      geodesics_method=geodesics_method)

    # --- uneven boundaries of high target
    self.target_HIGH.offset = self.config.uneven_upper_targets_offset
    self.target_HIGH.compute_uneven_boundaries_weight_max(self.target_LOW)

    #  --- save intermediary get_distance outputs
    self.target_LOW.save_distances("distances_LOW.json")
    self.target_HIGH.save_distances("distances_HIGH.json")
targets_laplacian_smoothing
targets_laplacian_smoothing(iterations, strength)

Smooth geodesic distances of targets. Saves again the distances to json.

Parameters:

Name Type Description Default
iterations int
required
strength float
required
Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def targets_laplacian_smoothing(self, iterations: int, strength: float) -> None:
    """
    Smooth geodesic distances of targets. Saves again the distances to json.

    Parameters
    ----------
    iterations: int
    strength: float
    """
    if self.target_LOW is None or self.target_HIGH is None:
        raise RuntimeError("Targets not initialized. Call create_compound_targets() first.")
    self.target_LOW.laplacian_smoothing(iterations=iterations, strength=strength)
    self.target_HIGH.laplacian_smoothing(iterations=iterations, strength=strength)
    self.target_LOW.save_distances("distances_LOW.json")
    self.target_HIGH.save_distances("distances_HIGH.json")
create_gradient_evaluation
create_gradient_evaluation(target_1, target_2=None, save_output=True, norm_filename='gradient_norm.json', g_filename='gradient.json')

Creates a compas_slicer.pre_processing.GradientEvaluation that is stored in self.g_evaluation Also, computes the gradient and gradient_norm and saves them to Json .

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def create_gradient_evaluation(
    self,
    target_1: CompoundTarget,
    target_2: CompoundTarget | None = None,
    save_output: bool = True,
    norm_filename: str = 'gradient_norm.json',
    g_filename: str = 'gradient.json',
) -> GradientEvaluation:
    """
    Creates a compas_slicer.pre_processing.GradientEvaluation that is stored in self.g_evaluation
    Also, computes the gradient and gradient_norm and saves them to Json .
    """
    if self.target_LOW is None or self.target_HIGH is None:
        raise RuntimeError("Targets not initialized. Call create_compound_targets() first.")
    if self.target_LOW.VN != target_1.VN:
        raise ValueError("Preprocessor does not match targets: vertex count mismatch.")
    assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=0.5,
                                                   target_LOW=self.target_LOW, target_HIGH=self.target_HIGH)
    g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH)
    g_evaluation.compute_gradient()
    g_evaluation.compute_gradient_norm()

    if save_output:
        # save results to json
        utils.save_to_json(g_evaluation.vertex_gradient_norm, self.OUTPUT_PATH, norm_filename)
        utils.save_to_json(utils.point_list_to_dict(g_evaluation.vertex_gradient), self.OUTPUT_PATH, g_filename)

    return g_evaluation
find_critical_points
find_critical_points(g_evaluation, output_filenames)

Computes and saves to json the critical points of the df on the mesh (minima, maxima, saddles)

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def find_critical_points(
    self, g_evaluation: GradientEvaluation, output_filenames: tuple[str, str, str]
) -> None:
    """ Computes and saves to json the critical points of the df on the mesh (minima, maxima, saddles)"""
    g_evaluation.find_critical_points()
    # save results to json
    utils.save_to_json(g_evaluation.minima, self.OUTPUT_PATH, output_filenames[0])
    utils.save_to_json(g_evaluation.maxima, self.OUTPUT_PATH, output_filenames[1])
    utils.save_to_json(g_evaluation.saddles, self.OUTPUT_PATH, output_filenames[2])
region_split
region_split(cut_mesh=True, separate_neighborhoods=True, topological_sorting=True, save_split_meshes=True)

Splits the mesh on the saddle points. This process can take a long time. It consists of four parts: 1) Create cuts on the mesh so that they intersect the saddle points and follow the get_distance function iso-contour 2) Separate mesh neighborhoods from cuts 3) Topological sorting of split meshes to determine their connectivity and sequence. 4) Finally resulting meshes are saved to json.

The intermediary outputs are saved to json, so if you don'weight want to be recomputing the entire thing every time, you can turn the respective processes to false.

Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def region_split(
    self,
    cut_mesh: bool = True,
    separate_neighborhoods: bool = True,
    topological_sorting: bool = True,
    save_split_meshes: bool = True,
) -> None:
    """
    Splits the mesh on the saddle points. This process can take a long time.
    It consists of four parts:
    1) Create cuts on the mesh so that they intersect the saddle points and follow the get_distance function
    iso-contour
    2) Separate mesh neighborhoods  from cuts
    3) Topological sorting of split meshes to determine their connectivity and sequence.
    4) Finally resulting meshes are saved to json.

    The intermediary outputs are saved to json, so if you don'weight want to be recomputing the entire thing every
    time, you can turn the respective processes to false.
    """

    logger.info("--- Mesh region splitting")

    if cut_mesh:  # (1)
        self.mesh.update_default_vertex_attributes({'cut': 0})
        mesh_splitter = rs.MeshSplitter(self.mesh, self.target_LOW, self.target_HIGH, self.DATA_PATH)
        mesh_splitter.run()

        self.mesh = mesh_splitter.mesh
        logger.info('Completed Region splitting')
        logger.info(f"Region split cut indices: {mesh_splitter.cut_indices}")
        # save results to json
        output_path = Path(self.OUTPUT_PATH)
        self.mesh.to_obj(str(output_path / 'mesh_with_cuts.obj'))
        self.mesh.to_json(str(output_path / 'mesh_with_cuts.json'))
        logger.info(f"Saving to Obj and Json: {output_path / 'mesh_with_cuts.json'}")

    if separate_neighborhoods:  # (2)
        logger.info("--- Separating mesh disconnected components")
        self.mesh = Mesh.from_json(str(Path(self.OUTPUT_PATH) / 'mesh_with_cuts.json'))
        region_split_cut_indices = get_existing_cut_indices(self.mesh)

        # save results to json
        utils.save_to_json(get_vertices_that_belong_to_cuts(self.mesh, region_split_cut_indices),
                           self.OUTPUT_PATH, "vertices_on_cuts.json")

        self.split_meshes = rs.separate_disconnected_components(self.mesh, attr='cut',
                                                                values=region_split_cut_indices,
                                                                OUTPUT_PATH=self.OUTPUT_PATH)
        logger.info(f'Created {len(self.split_meshes)} split meshes.')

    if topological_sorting:  # (3)
        logger.info("--- Topological sort of meshes directed graph to determine print order")
        graph = topo_sort.MeshDirectedGraph(self.split_meshes, self.DATA_PATH)
        all_orders = graph.get_all_topological_orders()
        selected_order = all_orders[0]
        logger.info(f'selected_order: {selected_order}')  # TODO: improve the way an order is selected
        self.cleanup_mesh_attributes_based_on_selected_order(selected_order, graph)

        # reorder split_meshes based on selected order
        self.split_meshes = [self.split_meshes[i] for i in selected_order]

    # --- save split meshes
    if save_split_meshes:  # (4)
        logger.info("--- Saving resulting split meshes")
        output_path = Path(self.OUTPUT_PATH)
        for i, m in enumerate(self.split_meshes):
            m.to_obj(str(output_path / f'split_mesh_{i}.obj'))
            m.to_json(str(output_path / f'split_mesh_{i}.json'))
        logger.info(f'Saving to Obj and Json: {output_path / "split_mesh_%.obj"}')
        logger.info(f"Saved {len(self.split_meshes)} split_meshes")
cleanup_mesh_attributes_based_on_selected_order
cleanup_mesh_attributes_based_on_selected_order(selected_order, graph)

Based on the selected order of split meshes, it rearranges their attributes, so that they can then be used with an interpolation slicer that requires data['boundary'] to be filled for every vertex. The vertices that originated from cuts have data['cut']=cut_index. This is replaced by data['boundary'] = 1 or 2 depending on connectivity of mesh.

Parameters:

Name Type Description Default
selected_order list[int]

The indices of ordered split meshes.

required
graph MeshDirectedGraph
required
Source code in src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
def cleanup_mesh_attributes_based_on_selected_order(
    self, selected_order: list[int], graph: MeshDirectedGraph
) -> None:
    """
    Based on the selected order of split meshes, it rearranges their attributes, so that they can then be used
    with an interpolation slicer that requires data['boundary'] to be filled for every vertex.
    The vertices that originated from cuts have data['cut']=cut_index. This is replaced
    by data['boundary'] = 1 or 2 depending on connectivity of mesh.

    Parameters
    ----------
    selected_order: list, int
        The indices of ordered split meshes.
    graph: :class: 'networkx.Graph'
    """
    for index in selected_order:
        mesh = self.split_meshes[index]
        for child_node in graph.adj_list[index]:
            child_mesh = self.split_meshes[child_node]
            edge = graph.G.edges[index, child_node]
            common_cuts = edge['cut']
            for cut_id in common_cuts:
                replace_mesh_vertex_attribute(mesh, 'cut', cut_id, 'boundary', 2)
                replace_mesh_vertex_attribute(child_mesh, 'cut', cut_id, 'boundary', 1)

        # save results to json
        pts_boundary_LOW = utils.get_mesh_vertex_coords_with_attribute(mesh, 'boundary', 1)
        pts_boundary_HIGH = utils.get_mesh_vertex_coords_with_attribute(mesh, 'boundary', 2)
        utils.save_to_json(utils.point_list_to_dict(pts_boundary_LOW), self.OUTPUT_PATH,
                           f'pts_boundary_LOW_{index}.json')
        utils.save_to_json(utils.point_list_to_dict(pts_boundary_HIGH), self.OUTPUT_PATH,
                           f'pts_boundary_HIGH_{index}.json')

positioning

move_mesh_to_point

move_mesh_to_point(mesh, target_point)

Moves (translates) a mesh to a target point.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh.

required
target_point Point

The point to move the mesh to.

required
Source code in src/compas_slicer/pre_processing/positioning.py
def move_mesh_to_point(mesh: Mesh, target_point: Point) -> Mesh:
    """Moves (translates) a mesh to a target point.

    Parameters
    ----------
    mesh: :class:`compas.datastructures.Mesh`
        A compas mesh.
    target_point: :class:`compas.geometry.Point`
        The point to move the mesh to.
    """
    mesh_center_pt = get_mid_pt_base(mesh)

    # transform mesh
    mesh_frame = Frame(mesh_center_pt, (1, 0, 0), (0, 1, 0))
    target_frame = Frame(target_point, (1, 0, 0), (0, 1, 0))

    T = Transformation.from_frame_to_frame(mesh_frame, target_frame)
    mesh.transform(T)

    logger.info(f"Mesh moved to: {target_point}")

    return mesh

get_mid_pt_base

get_mid_pt_base(mesh)

Gets the middle point of the base (bottom) of the mesh.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh.

required

Returns:

Name Type Description
mesh_mid_pt :class:`compas.geometry.Point`

Middle point of the base of the mesh.

Source code in src/compas_slicer/pre_processing/positioning.py
def get_mid_pt_base(mesh: Mesh) -> Point:
    """Gets the middle point of the base (bottom) of the mesh.

    Parameters
    ----------
    mesh: :class:`compas.datastructures.Mesh`
        A compas mesh.

    Returns
    -------
    mesh_mid_pt: :class:`compas.geometry.Point`
        Middle point of the base of the mesh.

    """
    # get center bottom point of mesh model
    vertices = list(mesh.vertices_attributes('xyz'))
    bbox = bounding_box(vertices)
    corner_pts = [bbox[0], bbox[2]]

    x = [p[0] for p in corner_pts]
    y = [p[1] for p in corner_pts]
    z = [p[2] for p in corner_pts]

    mesh_mid_pt = Point((sum(x) / 2), (sum(y) / 2), (sum(z) / 2))

    return mesh_mid_pt

remesh_mesh

remesh_mesh(mesh, target_edge_length, number_of_iterations=10, do_project=True)

Remesh a triangle mesh to achieve uniform edge lengths.

Uses CGAL's isotropic remeshing to improve mesh quality for slicing. This can help with curved slicing and geodesic computations.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh (must be triangulated).

required
target_edge_length float

Target edge length for the remeshed output.

required
number_of_iterations int

Number of remeshing iterations (default: 10).

10
do_project bool

Reproject vertices onto original surface (default: True).

True

Returns:

Type Description
Mesh

Remeshed compas mesh.

Raises:

Type Description
ImportError

If compas_cgal is not available.

Examples:

>>> from compas.datastructures import Mesh
>>> from compas_slicer.pre_processing import remesh_mesh
>>> mesh = Mesh.from_stl('model.stl')
>>> remeshed = remesh_mesh(mesh, target_edge_length=2.0)
Source code in src/compas_slicer/pre_processing/positioning.py
def remesh_mesh(
    mesh: Mesh,
    target_edge_length: float,
    number_of_iterations: int = 10,
    do_project: bool = True
) -> Mesh:
    """Remesh a triangle mesh to achieve uniform edge lengths.

    Uses CGAL's isotropic remeshing to improve mesh quality for slicing.
    This can help with curved slicing and geodesic computations.

    Parameters
    ----------
    mesh : Mesh
        A compas mesh (must be triangulated).
    target_edge_length : float
        Target edge length for the remeshed output.
    number_of_iterations : int
        Number of remeshing iterations (default: 10).
    do_project : bool
        Reproject vertices onto original surface (default: True).

    Returns
    -------
    Mesh
        Remeshed compas mesh.

    Raises
    ------
    ImportError
        If compas_cgal is not available.

    Examples
    --------
    >>> from compas.datastructures import Mesh
    >>> from compas_slicer.pre_processing import remesh_mesh
    >>> mesh = Mesh.from_stl('model.stl')
    >>> remeshed = remesh_mesh(mesh, target_edge_length=2.0)
    """
    try:
        from compas_cgal.meshing import trimesh_remesh
    except ImportError as e:
        raise ImportError(
            "remesh_mesh requires compas_cgal. Install with: pip install compas_cgal"
        ) from e

    from compas.datastructures import Mesh as CompasMesh

    M = mesh.to_vertices_and_faces()
    V, F = trimesh_remesh(M, target_edge_length, number_of_iterations, do_project)

    result = CompasMesh.from_vertices_and_faces(V.tolist(), F.tolist())

    logger.info(
        f"Remeshed: {mesh.number_of_vertices()} -> {result.number_of_vertices()} vertices, "
        f"target edge length: {target_edge_length}"
    )

    return result

preprocessing_utils

CompoundTarget

CompoundTarget(mesh, v_attr, value, DATA_PATH, union_method='min', union_params=None, geodesics_method='heat_cgal', anisotropic_scaling=False)

Represents a desired user-provided target. It acts as a key-frame that controls the print paths orientations. After the curved slicing , the print paths will be aligned to the compound target close to its area. The vertices that belong to the target are marked with their vertex attributes; they have data['v_attr'] = value.

Attributes:

Name Type Description
mesh :class:`compas.datastructures.Mesh`
v_attr str

The key of the attribute dict to be checked.

value int

The value of the attribute dict with key=v_attr. If in a vertex data[v_attr]==value then the vertex is part of this target.

DATA_PATH str
has_blend_union bool
blend_radius float
geodesics_method str

'heat_cgal' CGAL heat geodesic distances (recommended) 'heat' custom heat geodesic distances

anisotropic_scaling bool

This is not yet implemented

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def __init__(
    self,
    mesh: Mesh,
    v_attr: str,
    value: int,
    DATA_PATH: str,
    union_method: UnionMethod = 'min',
    union_params: list[Any] | None = None,
    geodesics_method: GeodesicsMethod = 'heat_cgal',
    anisotropic_scaling: bool = False,
) -> None:

    if union_params is None:
        union_params = []
    logger.info(f'Creating target with attribute : {v_attr}={value}')
    logger.info(f'union_method: {union_method}, union_params: {union_params}')
    self.mesh = mesh
    self.v_attr = v_attr
    self.value = value
    self.DATA_PATH = DATA_PATH
    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)

    self.union_method = union_method
    self.union_params = union_params

    self.geodesics_method = geodesics_method
    self.anisotropic_scaling = anisotropic_scaling  # Anisotropic scaling not yet implemented

    self.offset = 0
    self.VN = len(list(self.mesh.vertices()))

    # filled in by function 'self.find_targets_connected_components()'
    self.all_target_vkeys: list[int] = []  # flattened list with all vi_starts
    self.clustered_vkeys: list[list[int]] = []  # nested list with all vi_starts
    self.number_of_boundaries: int = 0

    self.weight_max_per_cluster: list[float] = []

    # geodesic distances
    # filled in by function 'self.update_distances_lists()'
    self._distances_lists: list[list[float]] = []  # Shape: number_of_boundaries x number_of_vertices
    self._distances_lists_flipped: list[list[float]] = []  # Shape: number_of_vertices x number_of_boundaries
    self._np_distances_lists_flipped: NDArray[np.floating] = np.array([])
    self._max_dist: float | None = None  # maximum distance from target on any mesh vertex

    # compute
    self.find_targets_connected_components()
    self.compute_geodesic_distances()
has_uneven_weights property
has_uneven_weights

Returns True if the target has uneven_weights calculated, False otherwise.

find_targets_connected_components
find_targets_connected_components()

Clusters all the vertices that belong to the target into neighborhoods using a graph. Each target can have an arbitrary number of neighborhoods/clusters. Fills in the attributes: self.all_target_vkeys, self.clustered_vkeys, self.number_of_boundaries

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def find_targets_connected_components(self) -> None:
    """
    Clusters all the vertices that belong to the target into neighborhoods using a graph.
    Each target can have an arbitrary number of neighborhoods/clusters.
    Fills in the attributes: self.all_target_vkeys, self.clustered_vkeys, self.number_of_boundaries
    """
    self.all_target_vkeys = [vkey for vkey, data in self.mesh.vertices(data=True) if
                             data[self.v_attr] == self.value]
    if len(self.all_target_vkeys) == 0:
        raise ValueError(
            f"No vertices in mesh with attribute '{self.v_attr}'={self.value}. "
            "Check your target creation."
        )
    G = _create_graph_from_mesh_vkeys(self.mesh, self.all_target_vkeys)
    if len(list(G.nodes())) != len(self.all_target_vkeys):
        raise RuntimeError("Graph node count doesn't match target vertex count.")
    self.number_of_boundaries = len(list(nx.connected_components(G)))

    for _i, cp in enumerate(nx.connected_components(G)):
        self.clustered_vkeys.append(list(cp))
    logger.info(
        f"Compound target with 'boundary'={self.value}. Number of connected_components : "
        f"{len(list(nx.connected_components(G)))}"
    )
compute_geodesic_distances
compute_geodesic_distances()

Computes the geodesic distances from each of the target's neighborhoods to all the mesh vertices. Fills in the distances attributes.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def compute_geodesic_distances(self) -> None:
    """
    Computes the geodesic distances from each of the target's neighborhoods  to all the mesh vertices.
    Fills in the distances attributes.
    """
    if self.geodesics_method == 'exact_igl':
        distances_lists = [get_igl_EXACT_geodesic_distances(self.mesh, vstarts) for vstarts in
                           self.clustered_vkeys]
    elif self.geodesics_method == 'heat_igl':
        distances_lists = [get_igl_HEAT_geodesic_distances(self.mesh, vstarts) for vstarts in
                           self.clustered_vkeys]
    elif self.geodesics_method == 'heat_cgal':
        distances_lists = [get_cgal_HEAT_geodesic_distances(self.mesh, vstarts) for vstarts in
                           self.clustered_vkeys]
    elif self.geodesics_method == 'heat':
        distances_lists = [get_custom_HEAT_geodesic_distances(self.mesh, vstarts, str(self.OUTPUT_PATH)) for vstarts in
                           self.clustered_vkeys]
    else:
        raise ValueError('Unknown geodesics method : ' + self.geodesics_method)

    distances_lists = [list(dl) for dl in distances_lists]  # number_of_boundaries x #V
    self.update_distances_lists(distances_lists)
update_distances_lists
update_distances_lists(distances_lists)

Fills in the distances attributes.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def update_distances_lists(self, distances_lists: list[list[float]]) -> None:
    """
    Fills in the distances attributes.
    """
    self._distances_lists = distances_lists
    self._distances_lists_flipped = []  # empty
    for i in range(self.VN):
        current_values = [self._distances_lists[list_index][i] for list_index in range(self.number_of_boundaries)]
        self._distances_lists_flipped.append(current_values)
    self._np_distances_lists_flipped = np.array(self._distances_lists_flipped)
    self._max_dist = np.max(self._np_distances_lists_flipped)
compute_uneven_boundaries_weight_max
compute_uneven_boundaries_weight_max(other_target)

If the target has multiple neighborhoods/clusters of vertices, then it computes their maximum distance from the other_target. Based on that it calculates their weight_max for the interpolation process

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def compute_uneven_boundaries_weight_max(self, other_target: CompoundTarget) -> None:
    """
    If the target has multiple neighborhoods/clusters of vertices, then it computes their maximum distance from
    the other_target. Based on that it calculates their weight_max for the interpolation process
    """
    if self.number_of_boundaries > 1:
        ds_avg_HIGH = self.get_boundaries_rel_dist_from_other_target(other_target)
        max_param = max(ds_avg_HIGH)
        for i, d in enumerate(ds_avg_HIGH):  # offset all distances except the maximum one
            if abs(d - max_param) > 0.01:  # if it isn't the max value
                ds_avg_HIGH[i] = d + self.offset

        self.weight_max_per_cluster = [d / max_param for d in ds_avg_HIGH]
        logger.info(f'weight_max_per_cluster: {self.weight_max_per_cluster}')
    else:
        logger.info("Did not compute_norm_of_gradient uneven boundaries, target consists of single component")
get_boundaries_rel_dist_from_other_target
get_boundaries_rel_dist_from_other_target(other_target, avg_type='median')

Returns a list, one relative distance value per connected boundary neighborhood. That is the average of the distances of the vertices of that boundary neighborhood from the other_target.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_boundaries_rel_dist_from_other_target(
    self, other_target: CompoundTarget, avg_type: Literal['mean', 'median'] = 'median'
) -> list[float]:
    """
    Returns a list, one relative distance value per connected boundary neighborhood.
    That is the average of the distances of the vertices of that boundary neighborhood from the other_target.
    """
    distances = []
    for vi_starts in self.clustered_vkeys:
        ds = [other_target.get_distance(vi) for vi in vi_starts]
        if avg_type == 'mean':
            distances.append(statistics.mean(ds))
        else:  # 'median'
            distances.append(statistics.median(ds))
    return distances
get_avg_distances_from_other_target
get_avg_distances_from_other_target(other_target)

Returns the minimum and maximum distance of the vertices of this target from the other_target

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_avg_distances_from_other_target(self, other_target: CompoundTarget) -> float:
    """
    Returns the minimum and maximum distance of the vertices of this target from the other_target
    """
    extreme_distances = []
    for v_index in other_target.all_target_vkeys:
        extreme_distances.append(self.get_all_distances()[v_index])
    return float(np.average(np.array(extreme_distances)))
get_all_clusters_distances_dict
get_all_clusters_distances_dict()

Returns dict. keys: index of connected target neighborhood, value: list, distances (one per vertex).

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_all_clusters_distances_dict(self) -> dict[int, list[float]]:
    """ Returns dict. keys: index of connected target neighborhood, value: list, distances (one per vertex). """
    return {i: self._distances_lists[i] for i in range(self.number_of_boundaries)}
get_max_dist
get_max_dist()

Returns the maximum distance that the target has on a mesh vertex.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_max_dist(self) -> float | None:
    """ Returns the maximum distance that the target has on a mesh vertex. """
    return self._max_dist
get_all_distances
get_all_distances()

Return distances for all vertices as 1D array, applying union method.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_all_distances(self) -> np.ndarray:
    """Return distances for all vertices as 1D array, applying union method."""
    if self.union_method == 'min':
        return np.min(self._np_distances_lists_flipped, axis=1)
    elif self.union_method == 'smooth':
        return np.array([
            blend_union_list(row.tolist(), self.union_params[0])
            for row in self._np_distances_lists_flipped
        ])
    elif self.union_method == 'chamfer':
        return np.array([
            chamfer_union_list(row.tolist(), self.union_params[0])
            for row in self._np_distances_lists_flipped
        ])
    elif self.union_method == 'stairs':
        return np.array([
            stairs_union_list(row.tolist(), self.union_params[0], self.union_params[1])
            for row in self._np_distances_lists_flipped
        ])
    else:
        raise ValueError(f"Unknown union method: {self.union_method}")
get_all_distances_array
get_all_distances_array()

Return raw distances as (n_boundaries, n_vertices) array.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_all_distances_array(self) -> np.ndarray:
    """Return raw distances as (n_boundaries, n_vertices) array."""
    return np.array(self._distances_lists)
get_all_distances_for_vkey
get_all_distances_for_vkey(i)

Returns distances from each cluster separately for vertex i. Smooth union doesn't play here any role.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_all_distances_for_vkey(self, i: int) -> list[float]:
    """ Returns distances from each cluster separately for vertex i. Smooth union doesn't play here any role. """
    return [self._distances_lists[list_index][i] for list_index in range(self.number_of_boundaries)]
get_distance
get_distance(i)

Return get_distance for vertex with vkey i.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_distance(self, i: int) -> float:
    """ Return get_distance for vertex with vkey i. """
    if self.union_method == 'min':
        # --- simple union
        return float(np.min(self._np_distances_lists_flipped[i]))
    elif self.union_method == 'smooth':
        # --- blend (smooth) union
        return blend_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0])
    elif self.union_method == 'chamfer':
        # --- blend (smooth) union
        return chamfer_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0])
    elif self.union_method == 'stairs':
        # --- stairs union
        return stairs_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0],
                                 n=self.union_params[1])
    else:
        raise ValueError("Unknown Union method : ", self.union_method)
laplacian_smoothing
laplacian_smoothing(iterations, strength)

Smooth the distances on the mesh, using iterative laplacian smoothing.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def laplacian_smoothing(self, iterations: int, strength: float) -> None:
    """ Smooth the distances on the mesh, using iterative laplacian smoothing. """
    L = utils.get_mesh_cotmatrix_igl(self.mesh, fix_boundaries=True)
    new_distances_lists = []

    logger.info('Laplacian smoothing of all distances')
    for _i, a in enumerate(self._distances_lists):
        a = np.array(a)  # a: numpy array containing the attribute to be smoothed
        for _ in range(iterations):  # iterative smoothing
            a_prime = a + strength * L * a
            a = a_prime
        new_distances_lists.append(list(a))
    self.update_distances_lists(new_distances_lists)
save_distances
save_distances(name)

Save distances to json. Saves one list with distance values (one per vertex).

Parameters:

Name Type Description Default
name str
required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def save_distances(self, name: str) -> None:
    """
    Save distances to json.
    Saves one list with distance values (one per vertex).

    Parameters
    ----------
    name: str, name of json to be saved
    """
    utils.save_to_json(self.get_all_distances().tolist(), self.OUTPUT_PATH, name)
assign_new_mesh
assign_new_mesh(mesh)

When the base mesh changes, a new mesh needs to be assigned.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def assign_new_mesh(self, mesh: Mesh) -> None:
    """ When the base mesh changes, a new mesh needs to be assigned. """
    mesh.to_json(self.OUTPUT_PATH + "/temp.obj")
    mesh = Mesh.from_json(self.OUTPUT_PATH + "/temp.obj")
    self.mesh = mesh
    self.VN = len(list(self.mesh.vertices()))

GeodesicsCache

GeodesicsCache()

Cache for geodesic distances to avoid redundant computations.

Note: This class is kept for backwards compatibility but now uses CGAL. The CGAL solver has its own internal caching via _cgal_solver_cache.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def __init__(self) -> None:
    self._cache: dict[tuple[int, str], NDArray[np.floating]] = {}
    self._mesh_hash: int | None = None
clear
clear()

Clear the cache.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def clear(self) -> None:
    """Clear the cache."""
    self._cache.clear()
    self._mesh_hash = None
get_distances
get_distances(mesh, sources, method='heat')

Get geodesic distances from sources, using cache when possible.

Parameters:

Name Type Description Default
mesh Mesh

The mesh to compute distances on.

required
sources list[int]

Source vertex indices.

required
method str

Geodesic method (ignored, always uses CGAL heat method).

'heat'

Returns:

Type Description
NDArray

Minimum distance from any source to each vertex.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def get_distances(
    self, mesh: Mesh, sources: list[int], method: str = 'heat'
) -> NDArray[np.floating]:
    """Get geodesic distances from sources, using cache when possible.

    Parameters
    ----------
    mesh : Mesh
        The mesh to compute distances on.
    sources : list[int]
        Source vertex indices.
    method : str
        Geodesic method (ignored, always uses CGAL heat method).

    Returns
    -------
    NDArray
        Minimum distance from any source to each vertex.
    """
    return get_heat_geodesic_distances(mesh, sources)

MeshSplitter

MeshSplitter(mesh, target_LOW, target_HIGH, DATA_PATH)

Curved slicing pre-processing step.

Takes one continuous mesh with various saddle points and splits it up at every saddle point following the direction of the iso-contour that intersects that saddle point, so that the resulting mesh has no remaining saddle points.

The result is a series of split meshes whose vertex attributes have been updated with boundary attributes at the newly created cuts, (i.e. they all have vertex 'boundary' attributes 1,2 on their lower and upper boundaries)

For each newly created mesh, a separate slicer needs to be created. Like that, we will always have one slicer per mesh with the correct attributes already assigned. However, it can still happen that a slicer that takes a split mesh outputs more than one vertical_layers_print_data (vertical layers).

Attributes:

Name Type Description
mesh :class: 'compas.datastructures.Mesh'
target_LOW :class: 'compas_slicer.pre_processing.CompoundTarget'
target_HIGH :class: 'compas_slicer.pre_processing.CompoundTarget'
DATA_PATH str, the path to the data folder
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def __init__(self, mesh, target_LOW, target_HIGH, DATA_PATH):
    self.mesh = mesh  # compas mesh
    self.DATA_PATH = DATA_PATH
    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
    self.target_LOW, self.target_HIGH = target_LOW, target_HIGH

    assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=0.5, target_LOW=self.target_LOW,
                                                   target_HIGH=self.target_HIGH)
    # Late import to avoid circular dependency
    from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation

    g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH)
    g_evaluation.find_critical_points()  # First estimation of saddle points with weight = 0.5
    self.saddles = g_evaluation.saddles
    self.cut_indices = []
run
run()

Runs the mesh splitting process. This consists of the following parts.

(1) Find the iso-contours that intersect the saddle points. Iteratively find the weights (from 0 to 1) that output a distance field whose iso-contour is intersecting each saddle point. Here two iterations are carried out (one rough and one exact search).

For each saddle point and its respective weight and iso-contour: (2) Find the zero-crossing points of the iso-contour and merge points that are close to the saddle to ensure connection. (3) Cleanup iso-contour. Only keep neighborhoods that are intersecting the saddle point. Discard all other. (4) Create the cut on the mesh. (5) Weld the resulting mesh, and restore all the vertex attributes. Note that the mesh remains in one piece, although the cuts have been created. (6) Update compound tardets with the new mesh.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def run(self):
    """
    Runs the mesh splitting process. This consists of the following parts.

    (1) Find the iso-contours that intersect the saddle points.
    Iteratively find the weights (from 0 to 1) that output a distance field whose iso-contour is intersecting
    each saddle point. Here two iterations are carried out (one rough and one exact search).

    For each saddle point and its respective weight and iso-contour:
    (2) Find the zero-crossing points of the iso-contour and merge points that are close to the saddle to ensure
    connection.
    (3) Cleanup iso-contour. Only keep neighborhoods that are intersecting the saddle point. Discard all other.
    (4) Create the cut on the mesh.
    (5) Weld the resulting mesh, and restore all the vertex attributes. Note that the mesh remains in one piece,
    although the cuts have been created.
    (6) Update compound tardets with the new mesh.
    """

    # (1) first rough estimation of split params
    split_params = self.identify_positions_to_split(self.saddles)
    # TODO: merge params that are too close together to avoid creation of very thin neighborhoods.
    logger.info(f"{len(split_params)} Split params. First rough estimation :  {split_params}")

    # split mesh at params
    logger.info('Splitting mesh at split params')
    current_cut_index = 1

    for i, param_first_estimation in enumerate(split_params):
        logger.info(f'cut_index : {current_cut_index}, param_first_estimation : {param_first_estimation:.6f}')

        # --- (1) More exact estimation of intersecting weight. Recompute gradient evaluation.
        # Find exact saddle point and the weight that intersects it.

        assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=param_first_estimation,
                                                       target_LOW=self.target_LOW, target_HIGH=self.target_HIGH)
        # Late import to avoid circular dependency
        from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation

        g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH)
        g_evaluation.find_critical_points()
        saddles_ds_tupples = [(vkey, abs(g_evaluation.mesh.vertex_attribute(vkey, 'scalar_field'))) for vkey in
                              g_evaluation.saddles]
        saddles_ds_tupples = sorted(saddles_ds_tupples, key=lambda saddle_tupple: saddle_tupple[1])
        vkey = saddles_ds_tupples[0][0]
        t = self.identify_positions_to_split([vkey])[0]
        logger.info(f'vkey_exact : {vkey} , t_exact : {t:.6f}')

        # --- (2) find zero-crossing points
        assign_interpolation_distance_to_mesh_vertices(self.mesh, t, self.target_LOW, self.target_HIGH)
        # Late import to avoid circular dependency
        from compas_slicer.slicers.slice_utilities import ScalarFieldContours

        zero_contours = ScalarFieldContours(self.mesh)
        zero_contours.compute()
        keys_of_clusters_to_keep = merge_clusters_saddle_point(zero_contours, saddle_vkeys=[vkey])

        # --- (3) Cleaning up zero-crossing neighborhoods
        cleanup_unrelated_isocontour_neighborhoods(zero_contours, keys_of_clusters_to_keep)

        if zero_contours:  # if there are remaining zero-crossing neighborhoods
            zero_contours = smoothen_cut(zero_contours, self.mesh, saddle_vkeys=[vkey], iterations=15,
                                         strength=0.2)  # smoothen the cut close to the saddle point.

            # save to json intermediary results
            zero_contours.save_point_clusters_as_polylines_to_json(self.OUTPUT_PATH,
                                                                   f'point_clusters_polylines_{int(i)}.json')

            #  --- (4) Create cut
            logger.info("Creating cut on mesh")
            self.cut_indices.append(current_cut_index)
            self.split_intersected_faces(zero_contours, current_cut_index)
            current_cut_index += 1

            #  --- (5) Weld mesh and restore attributes
            logger.info('Cleaning up the mesh. Welding and restoring attributes')
            v_attributes_dict = save_vertex_attributes(self.mesh)
            self.mesh = weld_mesh(self.mesh, self.OUTPUT_PATH)
            restore_mesh_attributes(self.mesh, v_attributes_dict)

            #  --- (6) Update targets
            if i < len(split_params) - 1:  # does not need to happen at the end
                logger.info('Updating targets, recomputing geodesic distances')
                self.update_targets()

        self.mesh.to_obj(str(Path(self.OUTPUT_PATH) / 'most_recent_cut_mesh.obj'))
update_targets
update_targets()

Update targets with the new mesh that was created during the split process. Note: This only works if the target vertices have not been touched. If all has gone well, targets can only have minima and maxima, so they should remain intact after the split

Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def update_targets(self):
    """
    Update targets with the new mesh that was created during the split process.
    Note: This only works if the target vertices have not been touched. If all has gone well, targets can only have
    minima and maxima, so they should remain intact after the split
    """
    self.target_LOW.assign_new_mesh(self.mesh)
    self.target_LOW.find_targets_connected_components()
    self.target_LOW.compute_geodesic_distances()
    if self.target_HIGH:
        self.target_HIGH.assign_new_mesh(self.mesh)
        self.target_HIGH.find_targets_connected_components()
        self.target_HIGH.compute_geodesic_distances()
split_intersected_faces
split_intersected_faces(zero_contours, cut_index)

Create cuts on intersected faces

Parameters:

Name Type Description Default
zero_contours
required
cut_index
required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def split_intersected_faces(self, zero_contours, cut_index):
    """
    Create cuts on intersected faces

    Parameters
    ----------
    zero_contours: :class: 'compas_slicer.pre_processing.ScalarFieldContours'
    cut_index: int, the vertex attribute value data['cut'] of the current cut
    """
    for key in zero_contours.sorted_point_clusters:  # cluster_pair
        edges = zero_contours.sorted_edge_clusters[key]
        pts = zero_contours.sorted_point_clusters[key]

        # add first vertex
        p = pts[0]
        v0 = self.mesh.add_vertex(x=p[0], y=p[1], z=p[2], attr_dict={'cut': 1})

        for i, edge in enumerate(edges):
            next_edge = edges[(i + 1) % len(edges)]
            p = pts[(i + 1) % len(pts)]

            faces_current_edge = self.mesh.edge_faces(u=edge[0], v=edge[1])
            faces_next_edge = self.mesh.edge_faces(u=next_edge[0], v=next_edge[1])

            fkey_common = list(set(faces_current_edge).intersection(faces_next_edge))[0]
            vkey_common = list(set(edge).intersection(next_edge))[0]
            v_other_a = list(set(edge).difference([vkey_common]))[0]
            v_other_b = list(set(next_edge).difference([vkey_common]))[0]

            v_new = self.mesh.add_vertex(x=p[0], y=p[1], z=p[2], attr_dict={'cut': cut_index})

            # remove and add faces
            if fkey_common in list(self.mesh.faces()):
                self.mesh.delete_face(fkey_common)
                self.mesh.add_face([vkey_common, v_new, v0])
                self.mesh.add_face([v_new, v_other_a, v0])
                self.mesh.add_face([v_other_b, v_other_a, v_new])
            else:
                logger.warning('Did not need to modify faces.')
            v0 = v_new

    self.mesh.cull_vertices()  # remove all unused vertices
    try:
        self.mesh.unify_cycles()
    except AssertionError:
        logger.warning('Could NOT unify cycles')
    if not self.mesh.is_valid():
        logger.warning('Attention! Mesh is NOT valid!')
identify_positions_to_split
identify_positions_to_split(saddles)

Find the weights that create iso-contours that intersect the saddle points.

Parameters:

Name Type Description Default
saddles
required

Returns:

Type Description
list, float, the weights from 0 to 1. One for each saddle point.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def identify_positions_to_split(self, saddles):
    """
    Find the weights that create iso-contours that intersect the saddle points.

    Parameters
    ----------
    saddles: list, int, the vertex keys of the saddle points

    Returns
    ----------
    list, float, the weights from 0 to 1. One for each saddle point.
    """
    split_params = []
    for vkey in saddles:
        param = self.find_weight_intersecting_vkey(vkey, threshold=HIT_THRESHOLD, resolution=T_SEARCH_RESOLUTION)
        split_params.append(param)
    return split_params
find_weight_intersecting_vkey
find_weight_intersecting_vkey(vkey, threshold, resolution)

Find the weights that intersect the vertex.

Parameters:

Name Type Description Default
vkey
required
threshold
required
resolution
required

Returns:

Type Description
float, the weights from 0 to 1.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def find_weight_intersecting_vkey(self, vkey, threshold, resolution):
    """
    Find the weights that intersect the vertex.

    Parameters
    ----------
    vkey: list, int, the vertex key to intersect
    threshold: float, the d value below which we consider we have a hit. Should be a very small value
    resolution: int, the resolution of search, should be a value more than 10**4

    Returns
    ----------
    float, the weights from 0 to 1.
    """
    weight_list = get_weights_list(n=resolution, start=0.001, end=0.999)
    # TODO: save next d to avoid re-evaluating
    for i, weight in enumerate(weight_list[:-1]):
        current_d = assign_interpolation_distance_to_mesh_vertex(vkey, weight, self.target_LOW, self.target_HIGH)
        next_d = assign_interpolation_distance_to_mesh_vertex(vkey, weight_list[i + 1], self.target_LOW, self.target_HIGH)
        if abs(current_d) < abs(next_d) and current_d < threshold:
            return weight
    raise ValueError(f'Could NOT find param for saddle vkey {vkey}!')

assign_interpolation_distance_to_mesh_vertices

assign_interpolation_distance_to_mesh_vertices(mesh, weight, target_LOW, target_HIGH)

Fills in the 'get_distance' attribute of every vertex of the mesh.

Parameters:

Name Type Description Default
mesh Mesh
required
weight float

The weighting of the distances from the lower and the upper target, from 0 to 1.

required
target_LOW CompoundTarget

The lower compound target.

required
target_HIGH CompoundTarget | None

The upper compound target.

required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py
def assign_interpolation_distance_to_mesh_vertices(
    mesh: Mesh, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget | None
) -> None:
    """
    Fills in the 'get_distance' attribute of every vertex of the mesh.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    weight: float,
        The weighting of the distances from the lower and the upper target, from 0 to 1.
    target_LOW: :class: 'compas_slicer.pre_processing.CompoundTarget'
        The lower compound target.
    target_HIGH:  :class: 'compas_slicer.pre_processing.CompoundTarget'
        The upper compound target.
    """
    # Vectorized computation for all vertices at once
    distances = _compute_all_distances_vectorized(weight, target_LOW, target_HIGH)
    for vkey, d in zip(mesh.vertices(), distances):
        mesh.vertex[vkey]['scalar_field'] = float(d)

assign_interpolation_distance_to_mesh_vertex

assign_interpolation_distance_to_mesh_vertex(vkey, weight, target_LOW, target_HIGH)

Fills in the 'get_distance' attribute for a single vertex with vkey.

Parameters:

Name Type Description Default
vkey int

The vertex key.

required
weight float

The weighting of the distances from the lower and the upper target, from 0 to 1.

required
target_LOW CompoundTarget

The lower compound target.

required
target_HIGH CompoundTarget | None

The upper compound target.

required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py
def assign_interpolation_distance_to_mesh_vertex(
    vkey: int, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget | None
) -> float:
    """
    Fills in the 'get_distance' attribute for a single vertex with vkey.

    Parameters
    ----------
    vkey: int
        The vertex key.
    weight: float,
        The weighting of the distances from the lower and the upper target, from 0 to 1.
    target_LOW: :class: 'compas_slicer.pre_processing.CompoundTarget'
        The lower compound target.
    target_HIGH:  :class: 'compas_slicer.pre_processing.CompoundTarget'
        The upper compound target.
    """
    if target_LOW and target_HIGH:  # then interpolate targets
        d = get_weighted_distance(vkey, weight, target_LOW, target_HIGH)
    elif target_LOW:  # then offset target
        offset = weight * target_LOW.get_max_dist()
        d = target_LOW.get_distance(vkey) - offset
    else:
        raise ValueError('You need to provide at least one target')
    return d

blend_union_list

blend_union_list(values, r)

Returns a smooth union of all the elements in the list, with blend radius blend_radius.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def blend_union_list(values: NDArray[np.floating] | list[float], r: float) -> float:
    """ Returns a smooth union of all the elements in the list, with blend radius blend_radius. """
    d_result: float = 9999999.0  # very big number
    for d in values:
        d_result = blend_union(d_result, float(d), r)
    return d_result

stairs_union_list

stairs_union_list(values, r, n)

Returns a stairs union of all the elements in the list, with blend radius r and number of peaks n-1.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def stairs_union_list(values: NDArray[np.floating] | list[float], r: float, n: int) -> float:
    """ Returns a stairs union of all the elements in the list, with blend radius r and number of peaks n-1."""
    d_result: float = 9999999.0  # very big number
    for _i, d in enumerate(values):
        d_result = stairs_union(d_result, float(d), r, n)
    return d_result

get_heat_geodesic_distances

get_heat_geodesic_distances(mesh, vertices_start)

Calculate geodesic distances using CGAL heat method.

Uses compas_cgal's HeatGeodesicSolver which provides CGAL's Heat_method_3 implementation with intrinsic Delaunay triangulation.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh (must be triangulated).

required
vertices_start list[int]

Source vertex indices.

required

Returns:

Type Description
NDArray

Minimum distance from any source to each vertex.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def get_heat_geodesic_distances(
    mesh: Mesh, vertices_start: list[int]
) -> NDArray[np.floating]:
    """
    Calculate geodesic distances using CGAL heat method.

    Uses compas_cgal's HeatGeodesicSolver which provides CGAL's Heat_method_3
    implementation with intrinsic Delaunay triangulation.

    Parameters
    ----------
    mesh : Mesh
        A compas mesh (must be triangulated).
    vertices_start : list[int]
        Source vertex indices.

    Returns
    -------
    NDArray
        Minimum distance from any source to each vertex.
    """
    from compas_cgal.geodesics import HeatGeodesicSolver

    # Check if we have a cached solver for this mesh
    mesh_hash = hash((len(list(mesh.vertices())), len(list(mesh.faces()))))
    if mesh_hash not in _cgal_solver_cache:
        _cgal_solver_cache.clear()  # Clear old solvers
        _cgal_solver_cache[mesh_hash] = HeatGeodesicSolver(mesh)

    solver = _cgal_solver_cache[mesh_hash]

    # Compute distances for each source and take minimum
    all_distances = []
    for source in vertices_start:
        distances = solver.solve([source])
        all_distances.append(distances)

    return np.min(np.array(all_distances), axis=0)

get_custom_HEAT_geodesic_distances

get_custom_HEAT_geodesic_distances(mesh, vi_sources, OUTPUT_PATH, v_equalize=None)

Calculate geodesic distances using the custom heat method.

This is a pure Python implementation of the heat method (Crane et al., 2013). For production use, prefer CGAL's implementation via get_heat_geodesic_distances() which uses intrinsic Delaunay triangulation for better accuracy.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh (must be triangulated).

required
vi_sources list[int]

Source vertex indices.

required
OUTPUT_PATH str

Path to save intermediate results.

required
v_equalize list[int] | None

Vertices to equalize (for saddle point handling).

None

Returns:

Type Description
NDArray

Geodesic distance from sources to each vertex.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def get_custom_HEAT_geodesic_distances(
    mesh: Mesh,
    vi_sources: list[int],
    OUTPUT_PATH: str,
    v_equalize: list[int] | None = None,
) -> NDArray[np.floating]:
    """Calculate geodesic distances using the custom heat method.

    This is a pure Python implementation of the heat method (Crane et al., 2013).
    For production use, prefer CGAL's implementation via get_heat_geodesic_distances()
    which uses intrinsic Delaunay triangulation for better accuracy.

    Parameters
    ----------
    mesh : Mesh
        A compas mesh (must be triangulated).
    vi_sources : list[int]
        Source vertex indices.
    OUTPUT_PATH : str
        Path to save intermediate results.
    v_equalize : list[int] | None
        Vertices to equalize (for saddle point handling).

    Returns
    -------
    NDArray
        Geodesic distance from sources to each vertex.
    """
    geodesics_solver = GeodesicsSolver(mesh, OUTPUT_PATH)
    u = geodesics_solver.diffuse_heat(vi_sources, v_equalize)
    geodesic_dist = geodesics_solver.get_geodesic_distances(u, vi_sources, v_equalize)
    return geodesic_dist

get_vertex_gradient_from_face_gradient

get_vertex_gradient_from_face_gradient(mesh, face_gradient)

Finds vertex gradient given an already calculated per face gradient.

Parameters:

Name Type Description Default
mesh Mesh
required
face_gradient NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #V x 3) one gradient vector per vertex.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_vertex_gradient_from_face_gradient(
    mesh: Mesh, face_gradient: NDArray[np.floating]
) -> NDArray[np.floating]:
    """
    Finds vertex gradient given an already calculated per face gradient.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    face_gradient: np.array with one vec3 per face of the mesh. (dimensions : #F x 3)

    Returns
    ----------
    np.array (dimensions : #V x 3) one gradient vector per vertex.
    """
    logger.info('Computing per vertex gradient')
    V, F = _mesh_to_arrays(mesh)
    face_areas = np.array([mesh.face_area(f) for f in mesh.faces()], dtype=np.float64)
    return _vertex_gradient_vectorized(V, F, face_gradient, face_areas)

get_edge_gradient_from_vertex_gradient

get_edge_gradient_from_vertex_gradient(mesh, vertex_gradient)

Finds edge gradient given an already calculated per vertex gradient.

Parameters:

Name Type Description Default
mesh Mesh
required
vertex_gradient NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #E x 3) one gradient vector per edge.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_edge_gradient_from_vertex_gradient(
    mesh: Mesh, vertex_gradient: NDArray[np.floating]
) -> NDArray[np.floating]:
    """
    Finds edge gradient given an already calculated per vertex gradient.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    vertex_gradient: np.array with one vec3 per vertex of the mesh. (dimensions : #V x 3)

    Returns
    ----------
    np.array (dimensions : #E x 3) one gradient vector per edge.
    """
    edges = np.array(list(mesh.edges()), dtype=np.intp)
    return _edge_gradient_vectorized(edges, vertex_gradient)

get_face_gradient_from_scalar_field

get_face_gradient_from_scalar_field(mesh, u)

Finds face gradient from scalar field u. Scalar field u is given per vertex.

Parameters:

Name Type Description Default
mesh Mesh
required
u NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #F x 3) one gradient vector per face.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_face_gradient_from_scalar_field(
    mesh: Mesh, u: NDArray[np.floating]
) -> NDArray[np.floating]:
    """
    Finds face gradient from scalar field u.
    Scalar field u is given per vertex.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    u: list, float. (dimensions : #VN x 1)

    Returns
    ----------
    np.array (dimensions : #F x 3) one gradient vector per face.
    """
    logger.info('Computing per face gradient')
    V, F = _mesh_to_arrays(mesh)
    scalar_field = np.asarray(u, dtype=np.float64)
    face_normals = np.array([mesh.face_normal(f) for f in mesh.faces()], dtype=np.float64)
    face_areas = np.array([mesh.face_area(f) for f in mesh.faces()], dtype=np.float64)
    return _face_gradient_vectorized(V, F, scalar_field, face_normals, face_areas)

get_per_vertex_divergence

get_per_vertex_divergence(mesh, X, cotans)

Computes the divergence of the gradient X for the mesh, using cotangent weights.

Parameters:

Name Type Description Default
mesh Mesh
required
X NDArray[floating]
required
cotans NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #V x 1) one float (divergence value) per vertex.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_per_vertex_divergence(
    mesh: Mesh, X: NDArray[np.floating], cotans: NDArray[np.floating]
) -> NDArray[np.floating]:
    """
    Computes the divergence of the gradient X for the mesh, using cotangent weights.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    X: np.array, (dimensions: #F x 3), per face gradient
    cotans:  np.array, (dimensions: #F x 3), 1/2*cotangents corresponding angles

    Returns
    ----------
    np.array (dimensions : #V x 1) one float (divergence value) per vertex.
    """
    V, F = _mesh_to_arrays(mesh)
    cotans = cotans.reshape(-1, 3)
    return _divergence_vectorized(V, F, X, cotans)

normalize_gradient

normalize_gradient(X)

Returns normalized gradient X.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def normalize_gradient(X: NDArray[np.floating]) -> NDArray[np.floating]:
    """ Returns normalized gradient X. """
    norm = np.linalg.norm(X, axis=1)[..., np.newaxis]
    return X / norm  # normalize

get_scalar_field_from_gradient

get_scalar_field_from_gradient(mesh, X, C, cotans)

Find scalar field u that best explains gradient X. Laplacian(u) = Divergence(X). This defines a scalar field up to translation, then we subtract the min to make sure it starts from 0.

Parameters:

Name Type Description Default
mesh Mesh
required
X NDArray[floating]
required
C csr_matrix

sparse matrix (dimensions: #V x #V), cotmatrix, each row i corresponding to v(i, :)

required
cotans NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #V x 1) one scalar value per vertex.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_scalar_field_from_gradient(
    mesh: Mesh,
    X: NDArray[np.floating],
    C: scipy.sparse.csr_matrix,
    cotans: NDArray[np.floating],
) -> NDArray[np.floating]:
    """
    Find scalar field u that best explains gradient X.
    Laplacian(u) = Divergence(X).
    This defines a scalar field up to translation, then we subtract the min to make sure it starts from 0.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    X: np.array, (dimensions: #F x 3), per face gradient
    C: 'scipy.sparse.csr_matrix',
        sparse matrix (dimensions: #V x #V), cotmatrix, each row i corresponding to v(i, :)
    cotans: np.array, (dimensions: #F x 3), 1/2*cotangents corresponding angles

    Returns
    ----------
    np.array (dimensions : #V x 1) one scalar value per vertex.
    """
    div_X = get_per_vertex_divergence(mesh, X, cotans)
    u = scipy.sparse.linalg.spsolve(C, div_X)
    logger.info(f'Solved Δ(u) = div(X). Linear system error |Δ(u) - div(X)| = {np.linalg.norm(C * u - div_X):.6e}')
    u = u - np.amin(u)  # make start value equal 0
    u = 2*u
    return u

create_mesh_boundary_attributes

create_mesh_boundary_attributes(mesh, low_boundary_vs, high_boundary_vs)

Creates a default vertex attribute data['boundary']=0. Then it gives the value 1 to the vertices that belong to the lower boundary, and the value 2 to the vertices that belong to the higher boundary.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def create_mesh_boundary_attributes(
    mesh: Mesh, low_boundary_vs: list[int], high_boundary_vs: list[int]
) -> None:
    """
    Creates a default vertex attribute data['boundary']=0. Then it gives the value 1 to the vertices that belong
    to the lower boundary, and the value 2 to the vertices that belong to the higher boundary.
    """
    mesh.update_default_vertex_attributes({'boundary': 0})
    for vkey, data in mesh.vertices(data=True):
        if vkey in low_boundary_vs:
            data['boundary'] = 1
        elif vkey in high_boundary_vs:
            data['boundary'] = 2

get_existing_cut_indices

get_existing_cut_indices(mesh)

Returns:

Type Description
list, int.

The cut indices (data['cut']>0) that exist on the mesh vertices.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def get_existing_cut_indices(mesh: Mesh) -> list[int]:
    """
    Returns
    ----------
        list, int.
        The cut indices (data['cut']>0) that exist on the mesh vertices.
    """
    cut_indices = []
    for _vkey, data in mesh.vertices(data=True):
        if data['cut'] > 0 and data['cut'] not in cut_indices:
            cut_indices.append(data['cut'])
    cut_indices = sorted(cut_indices)
    return cut_indices

get_existing_boundary_indices

get_existing_boundary_indices(mesh)

Returns:

Type Description
list, int.

The boundary indices (data['boundary']>0) that exist on the mesh vertices.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def get_existing_boundary_indices(mesh: Mesh) -> list[int]:
    """
    Returns
    ----------
        list, int.
        The boundary indices (data['boundary']>0) that exist on the mesh vertices.
    """
    indices = []
    for _vkey, data in mesh.vertices(data=True):
        if data['boundary'] > 0 and data['boundary'] not in indices:
            indices.append(data['boundary'])
    boundary_indices = sorted(indices)
    return boundary_indices

get_vertices_that_belong_to_cuts

get_vertices_that_belong_to_cuts(mesh, cut_indices)

Returns:

Type Description
dict, key: int, the index of each cut

value: dict, the points that belong to this cut (point_list_to_dict format)

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def get_vertices_that_belong_to_cuts(
    mesh: Mesh, cut_indices: list[int]
) -> dict[int, dict[int, list[float]]]:
    """
    Returns
    ----------
        dict, key: int, the index of each cut
              value: dict, the points that belong to this cut (point_list_to_dict format)
    """
    cuts_dict: dict[int, list[list[float]]] = {i: [] for i in cut_indices}

    for vkey, data in mesh.vertices(data=True):
        if data['cut'] > 0:
            cut_index = data['cut']
            cuts_dict[cut_index].append(mesh.vertex_coordinates(vkey))

    result: dict[int, dict[int, list[float]]] = {}
    for cut_index in cuts_dict:
        result[cut_index] = utils.point_list_to_dict(cuts_dict[cut_index])

    return result

save_vertex_attributes

save_vertex_attributes(mesh)

Saves the boundary and cut attributes that are on the mesh on a dictionary.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def save_vertex_attributes(mesh: Mesh) -> dict[str, Any]:
    """
    Saves the boundary and cut attributes that are on the mesh on a dictionary.
    """
    v_attributes_dict: dict[str, Any] = {'boundary_1': [], 'boundary_2': [], 'cut': {}}

    cut_indices = []
    for _vkey, data in mesh.vertices(data=True):
        cut_index = data['cut']
        if cut_index not in cut_indices:
            cut_indices.append(cut_index)
    cut_indices = sorted(cut_indices)

    for cut_index in cut_indices:
        v_attributes_dict['cut'][cut_index] = []

    for vkey, data in mesh.vertices(data=True):
        if data['boundary'] == 1:
            v_coords = mesh.vertex_coordinates(vkey)
            pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2])
            v_attributes_dict['boundary_1'].append(pt)
        elif data['boundary'] == 2:
            v_coords = mesh.vertex_coordinates(vkey)
            pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2])
            v_attributes_dict['boundary_2'].append(pt)
        if data['cut'] > 0:
            cut_index = data['cut']
            v_coords = mesh.vertex_coordinates(vkey)
            pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2])
            v_attributes_dict['cut'][cut_index].append(pt)
    return v_attributes_dict

restore_mesh_attributes

restore_mesh_attributes(mesh, v_attributes_dict)

Restores the cut and boundary attributes on the mesh vertices from the dictionary of the previously saved attributes

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def restore_mesh_attributes(mesh: Mesh, v_attributes_dict: dict[str, Any]) -> None:
    """
    Restores the cut and boundary attributes on the mesh vertices from the dictionary of the previously saved attributes
    """
    mesh.update_default_vertex_attributes({'boundary': 0})
    mesh.update_default_vertex_attributes({'cut': 0})

    D_THRESHOLD = 0.01

    # Build KDTree once for all queries
    vkeys = list(mesh.vertices())
    welded_mesh_vertices = np.array([mesh.vertex_coordinates(vkey) for vkey in vkeys], dtype=np.float64)
    indices_to_vkeys = dict(enumerate(vkeys))
    tree = cKDTree(welded_mesh_vertices)

    def _restore_attribute_batch(pts_list, attr_name, attr_value):
        """Restore attribute for a batch of points using KDTree."""
        if not pts_list:
            return
        query_pts = np.array([[p.x, p.y, p.z] if hasattr(p, 'x') else p for p in pts_list], dtype=np.float64)
        distances, indices = tree.query(query_pts)
        for dist, idx in zip(distances, indices):
            if dist ** 2 < D_THRESHOLD:
                c_vkey = indices_to_vkeys[idx]
                mesh.vertex_attribute(c_vkey, attr_name, value=attr_value)

    _restore_attribute_batch(v_attributes_dict['boundary_1'], 'boundary', 1)
    _restore_attribute_batch(v_attributes_dict['boundary_2'], 'boundary', 2)

    for cut_index in v_attributes_dict['cut']:
        _restore_attribute_batch(v_attributes_dict['cut'][cut_index], 'cut', int(cut_index))

replace_mesh_vertex_attribute

replace_mesh_vertex_attribute(mesh, old_attr, old_val, new_attr, new_val)

Replaces one vertex attribute with a new one. For all the vertices where data[old_attr]=old_val, then the old_val is replaced with 0, and data[new_attr]=new_val.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def replace_mesh_vertex_attribute(
    mesh: Mesh, old_attr: str, old_val: int, new_attr: str, new_val: int
) -> None:
    """
    Replaces one vertex attribute with a new one. For all the vertices where data[old_attr]=old_val, then the
    old_val is replaced with 0, and data[new_attr]=new_val.
    """
    for vkey, data in mesh.vertices(data=True):
        if data[old_attr] == old_val:
            mesh.vertex_attribute(vkey, old_attr, 0)
            mesh.vertex_attribute(vkey, new_attr, new_val)

assign_vertex_distance

assign_interpolation_distance_to_mesh_vertices
assign_interpolation_distance_to_mesh_vertices(mesh, weight, target_LOW, target_HIGH)

Fills in the 'get_distance' attribute of every vertex of the mesh.

Parameters:

Name Type Description Default
mesh Mesh
required
weight float

The weighting of the distances from the lower and the upper target, from 0 to 1.

required
target_LOW CompoundTarget

The lower compound target.

required
target_HIGH CompoundTarget | None

The upper compound target.

required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py
def assign_interpolation_distance_to_mesh_vertices(
    mesh: Mesh, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget | None
) -> None:
    """
    Fills in the 'get_distance' attribute of every vertex of the mesh.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    weight: float,
        The weighting of the distances from the lower and the upper target, from 0 to 1.
    target_LOW: :class: 'compas_slicer.pre_processing.CompoundTarget'
        The lower compound target.
    target_HIGH:  :class: 'compas_slicer.pre_processing.CompoundTarget'
        The upper compound target.
    """
    # Vectorized computation for all vertices at once
    distances = _compute_all_distances_vectorized(weight, target_LOW, target_HIGH)
    for vkey, d in zip(mesh.vertices(), distances):
        mesh.vertex[vkey]['scalar_field'] = float(d)
assign_interpolation_distance_to_mesh_vertex
assign_interpolation_distance_to_mesh_vertex(vkey, weight, target_LOW, target_HIGH)

Fills in the 'get_distance' attribute for a single vertex with vkey.

Parameters:

Name Type Description Default
vkey int

The vertex key.

required
weight float

The weighting of the distances from the lower and the upper target, from 0 to 1.

required
target_LOW CompoundTarget

The lower compound target.

required
target_HIGH CompoundTarget | None

The upper compound target.

required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py
def assign_interpolation_distance_to_mesh_vertex(
    vkey: int, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget | None
) -> float:
    """
    Fills in the 'get_distance' attribute for a single vertex with vkey.

    Parameters
    ----------
    vkey: int
        The vertex key.
    weight: float,
        The weighting of the distances from the lower and the upper target, from 0 to 1.
    target_LOW: :class: 'compas_slicer.pre_processing.CompoundTarget'
        The lower compound target.
    target_HIGH:  :class: 'compas_slicer.pre_processing.CompoundTarget'
        The upper compound target.
    """
    if target_LOW and target_HIGH:  # then interpolate targets
        d = get_weighted_distance(vkey, weight, target_LOW, target_HIGH)
    elif target_LOW:  # then offset target
        offset = weight * target_LOW.get_max_dist()
        d = target_LOW.get_distance(vkey) - offset
    else:
        raise ValueError('You need to provide at least one target')
    return d
get_weighted_distance
get_weighted_distance(vkey, weight, target_LOW, target_HIGH)

Computes the weighted get_distance for a single vertex with vkey.

Parameters:

Name Type Description Default
vkey int

The vertex key.

required
weight float

The weighting of the distances from the lower and the upper target, from 0 to 1.

required
target_LOW CompoundTarget

The lower compound target.

required
target_HIGH CompoundTarget

The upper compound target.

required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py
def get_weighted_distance(
    vkey: int, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget
) -> float:
    """
    Computes the weighted get_distance for a single vertex with vkey.

    Parameters
    ----------
    vkey: int
        The vertex key.
    weight: float,
        The weighting of the distances from the lower and the upper target, from 0 to 1.
    target_LOW: :class: 'compas_slicer.pre_processing.CompoundTarget'
        The lower compound target.
    target_HIGH:  :class: 'compas_slicer.pre_processing.CompoundTarget'
        The upper compound target.
    """
    # --- calculation with uneven weights
    if target_HIGH.has_uneven_weights:
        d_low = target_LOW.get_distance(vkey)  # float
        ds_high = target_HIGH.get_all_distances_for_vkey(vkey)  # list of floats (# number_of_boundaries)

        if target_HIGH.number_of_boundaries > 1:
            weights_remapped = [remap_unbound(weight, 0, weight_max, 0, 1)
                                for weight_max in target_HIGH.weight_max_per_cluster]
            weights = weights_remapped
        else:
            weights = [weight]

        distances = [(weight - 1) * d_low + weight * d_high for d_high, weight in zip(ds_high, weights)]

        # return the distance based on the union method of the high target
        if target_HIGH.union_method == 'min':
            # --- simple union
            return np.min(distances)
        elif target_HIGH.union_method == 'smooth':
            # --- blend (smooth) union
            return blend_union_list(values=distances, r=target_HIGH.union_params[0])
        elif target_HIGH.union_method == 'chamfer':
            # --- blend (smooth) union
            return chamfer_union_list(values=distances, r=target_HIGH.union_params[0])
        elif target_HIGH.union_method == 'stairs':
            # --- stairs union
            return stairs_union_list(values=distances, r=target_HIGH.union_params[0], n=target_HIGH.union_params[1])

    # --- simple calculation (without uneven weights)
    else:
        d_low = target_LOW.get_distance(vkey)
        d_high = target_HIGH.get_distance(vkey)
        return (d_low * (1 - weight)) - (d_high * weight)

compound_target

CompoundTarget
CompoundTarget(mesh, v_attr, value, DATA_PATH, union_method='min', union_params=None, geodesics_method='heat_cgal', anisotropic_scaling=False)

Represents a desired user-provided target. It acts as a key-frame that controls the print paths orientations. After the curved slicing , the print paths will be aligned to the compound target close to its area. The vertices that belong to the target are marked with their vertex attributes; they have data['v_attr'] = value.

Attributes:

Name Type Description
mesh :class:`compas.datastructures.Mesh`
v_attr str

The key of the attribute dict to be checked.

value int

The value of the attribute dict with key=v_attr. If in a vertex data[v_attr]==value then the vertex is part of this target.

DATA_PATH str
has_blend_union bool
blend_radius float
geodesics_method str

'heat_cgal' CGAL heat geodesic distances (recommended) 'heat' custom heat geodesic distances

anisotropic_scaling bool

This is not yet implemented

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def __init__(
    self,
    mesh: Mesh,
    v_attr: str,
    value: int,
    DATA_PATH: str,
    union_method: UnionMethod = 'min',
    union_params: list[Any] | None = None,
    geodesics_method: GeodesicsMethod = 'heat_cgal',
    anisotropic_scaling: bool = False,
) -> None:

    if union_params is None:
        union_params = []
    logger.info(f'Creating target with attribute : {v_attr}={value}')
    logger.info(f'union_method: {union_method}, union_params: {union_params}')
    self.mesh = mesh
    self.v_attr = v_attr
    self.value = value
    self.DATA_PATH = DATA_PATH
    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)

    self.union_method = union_method
    self.union_params = union_params

    self.geodesics_method = geodesics_method
    self.anisotropic_scaling = anisotropic_scaling  # Anisotropic scaling not yet implemented

    self.offset = 0
    self.VN = len(list(self.mesh.vertices()))

    # filled in by function 'self.find_targets_connected_components()'
    self.all_target_vkeys: list[int] = []  # flattened list with all vi_starts
    self.clustered_vkeys: list[list[int]] = []  # nested list with all vi_starts
    self.number_of_boundaries: int = 0

    self.weight_max_per_cluster: list[float] = []

    # geodesic distances
    # filled in by function 'self.update_distances_lists()'
    self._distances_lists: list[list[float]] = []  # Shape: number_of_boundaries x number_of_vertices
    self._distances_lists_flipped: list[list[float]] = []  # Shape: number_of_vertices x number_of_boundaries
    self._np_distances_lists_flipped: NDArray[np.floating] = np.array([])
    self._max_dist: float | None = None  # maximum distance from target on any mesh vertex

    # compute
    self.find_targets_connected_components()
    self.compute_geodesic_distances()
has_uneven_weights property
has_uneven_weights

Returns True if the target has uneven_weights calculated, False otherwise.

find_targets_connected_components
find_targets_connected_components()

Clusters all the vertices that belong to the target into neighborhoods using a graph. Each target can have an arbitrary number of neighborhoods/clusters. Fills in the attributes: self.all_target_vkeys, self.clustered_vkeys, self.number_of_boundaries

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def find_targets_connected_components(self) -> None:
    """
    Clusters all the vertices that belong to the target into neighborhoods using a graph.
    Each target can have an arbitrary number of neighborhoods/clusters.
    Fills in the attributes: self.all_target_vkeys, self.clustered_vkeys, self.number_of_boundaries
    """
    self.all_target_vkeys = [vkey for vkey, data in self.mesh.vertices(data=True) if
                             data[self.v_attr] == self.value]
    if len(self.all_target_vkeys) == 0:
        raise ValueError(
            f"No vertices in mesh with attribute '{self.v_attr}'={self.value}. "
            "Check your target creation."
        )
    G = _create_graph_from_mesh_vkeys(self.mesh, self.all_target_vkeys)
    if len(list(G.nodes())) != len(self.all_target_vkeys):
        raise RuntimeError("Graph node count doesn't match target vertex count.")
    self.number_of_boundaries = len(list(nx.connected_components(G)))

    for _i, cp in enumerate(nx.connected_components(G)):
        self.clustered_vkeys.append(list(cp))
    logger.info(
        f"Compound target with 'boundary'={self.value}. Number of connected_components : "
        f"{len(list(nx.connected_components(G)))}"
    )
compute_geodesic_distances
compute_geodesic_distances()

Computes the geodesic distances from each of the target's neighborhoods to all the mesh vertices. Fills in the distances attributes.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def compute_geodesic_distances(self) -> None:
    """
    Computes the geodesic distances from each of the target's neighborhoods  to all the mesh vertices.
    Fills in the distances attributes.
    """
    if self.geodesics_method == 'exact_igl':
        distances_lists = [get_igl_EXACT_geodesic_distances(self.mesh, vstarts) for vstarts in
                           self.clustered_vkeys]
    elif self.geodesics_method == 'heat_igl':
        distances_lists = [get_igl_HEAT_geodesic_distances(self.mesh, vstarts) for vstarts in
                           self.clustered_vkeys]
    elif self.geodesics_method == 'heat_cgal':
        distances_lists = [get_cgal_HEAT_geodesic_distances(self.mesh, vstarts) for vstarts in
                           self.clustered_vkeys]
    elif self.geodesics_method == 'heat':
        distances_lists = [get_custom_HEAT_geodesic_distances(self.mesh, vstarts, str(self.OUTPUT_PATH)) for vstarts in
                           self.clustered_vkeys]
    else:
        raise ValueError('Unknown geodesics method : ' + self.geodesics_method)

    distances_lists = [list(dl) for dl in distances_lists]  # number_of_boundaries x #V
    self.update_distances_lists(distances_lists)
update_distances_lists
update_distances_lists(distances_lists)

Fills in the distances attributes.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def update_distances_lists(self, distances_lists: list[list[float]]) -> None:
    """
    Fills in the distances attributes.
    """
    self._distances_lists = distances_lists
    self._distances_lists_flipped = []  # empty
    for i in range(self.VN):
        current_values = [self._distances_lists[list_index][i] for list_index in range(self.number_of_boundaries)]
        self._distances_lists_flipped.append(current_values)
    self._np_distances_lists_flipped = np.array(self._distances_lists_flipped)
    self._max_dist = np.max(self._np_distances_lists_flipped)
compute_uneven_boundaries_weight_max
compute_uneven_boundaries_weight_max(other_target)

If the target has multiple neighborhoods/clusters of vertices, then it computes their maximum distance from the other_target. Based on that it calculates their weight_max for the interpolation process

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def compute_uneven_boundaries_weight_max(self, other_target: CompoundTarget) -> None:
    """
    If the target has multiple neighborhoods/clusters of vertices, then it computes their maximum distance from
    the other_target. Based on that it calculates their weight_max for the interpolation process
    """
    if self.number_of_boundaries > 1:
        ds_avg_HIGH = self.get_boundaries_rel_dist_from_other_target(other_target)
        max_param = max(ds_avg_HIGH)
        for i, d in enumerate(ds_avg_HIGH):  # offset all distances except the maximum one
            if abs(d - max_param) > 0.01:  # if it isn't the max value
                ds_avg_HIGH[i] = d + self.offset

        self.weight_max_per_cluster = [d / max_param for d in ds_avg_HIGH]
        logger.info(f'weight_max_per_cluster: {self.weight_max_per_cluster}')
    else:
        logger.info("Did not compute_norm_of_gradient uneven boundaries, target consists of single component")
get_boundaries_rel_dist_from_other_target
get_boundaries_rel_dist_from_other_target(other_target, avg_type='median')

Returns a list, one relative distance value per connected boundary neighborhood. That is the average of the distances of the vertices of that boundary neighborhood from the other_target.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_boundaries_rel_dist_from_other_target(
    self, other_target: CompoundTarget, avg_type: Literal['mean', 'median'] = 'median'
) -> list[float]:
    """
    Returns a list, one relative distance value per connected boundary neighborhood.
    That is the average of the distances of the vertices of that boundary neighborhood from the other_target.
    """
    distances = []
    for vi_starts in self.clustered_vkeys:
        ds = [other_target.get_distance(vi) for vi in vi_starts]
        if avg_type == 'mean':
            distances.append(statistics.mean(ds))
        else:  # 'median'
            distances.append(statistics.median(ds))
    return distances
get_avg_distances_from_other_target
get_avg_distances_from_other_target(other_target)

Returns the minimum and maximum distance of the vertices of this target from the other_target

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_avg_distances_from_other_target(self, other_target: CompoundTarget) -> float:
    """
    Returns the minimum and maximum distance of the vertices of this target from the other_target
    """
    extreme_distances = []
    for v_index in other_target.all_target_vkeys:
        extreme_distances.append(self.get_all_distances()[v_index])
    return float(np.average(np.array(extreme_distances)))
get_all_clusters_distances_dict
get_all_clusters_distances_dict()

Returns dict. keys: index of connected target neighborhood, value: list, distances (one per vertex).

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_all_clusters_distances_dict(self) -> dict[int, list[float]]:
    """ Returns dict. keys: index of connected target neighborhood, value: list, distances (one per vertex). """
    return {i: self._distances_lists[i] for i in range(self.number_of_boundaries)}
get_max_dist
get_max_dist()

Returns the maximum distance that the target has on a mesh vertex.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_max_dist(self) -> float | None:
    """ Returns the maximum distance that the target has on a mesh vertex. """
    return self._max_dist
get_all_distances
get_all_distances()

Return distances for all vertices as 1D array, applying union method.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_all_distances(self) -> np.ndarray:
    """Return distances for all vertices as 1D array, applying union method."""
    if self.union_method == 'min':
        return np.min(self._np_distances_lists_flipped, axis=1)
    elif self.union_method == 'smooth':
        return np.array([
            blend_union_list(row.tolist(), self.union_params[0])
            for row in self._np_distances_lists_flipped
        ])
    elif self.union_method == 'chamfer':
        return np.array([
            chamfer_union_list(row.tolist(), self.union_params[0])
            for row in self._np_distances_lists_flipped
        ])
    elif self.union_method == 'stairs':
        return np.array([
            stairs_union_list(row.tolist(), self.union_params[0], self.union_params[1])
            for row in self._np_distances_lists_flipped
        ])
    else:
        raise ValueError(f"Unknown union method: {self.union_method}")
get_all_distances_array
get_all_distances_array()

Return raw distances as (n_boundaries, n_vertices) array.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_all_distances_array(self) -> np.ndarray:
    """Return raw distances as (n_boundaries, n_vertices) array."""
    return np.array(self._distances_lists)
get_all_distances_for_vkey
get_all_distances_for_vkey(i)

Returns distances from each cluster separately for vertex i. Smooth union doesn't play here any role.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_all_distances_for_vkey(self, i: int) -> list[float]:
    """ Returns distances from each cluster separately for vertex i. Smooth union doesn't play here any role. """
    return [self._distances_lists[list_index][i] for list_index in range(self.number_of_boundaries)]
get_distance
get_distance(i)

Return get_distance for vertex with vkey i.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def get_distance(self, i: int) -> float:
    """ Return get_distance for vertex with vkey i. """
    if self.union_method == 'min':
        # --- simple union
        return float(np.min(self._np_distances_lists_flipped[i]))
    elif self.union_method == 'smooth':
        # --- blend (smooth) union
        return blend_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0])
    elif self.union_method == 'chamfer':
        # --- blend (smooth) union
        return chamfer_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0])
    elif self.union_method == 'stairs':
        # --- stairs union
        return stairs_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0],
                                 n=self.union_params[1])
    else:
        raise ValueError("Unknown Union method : ", self.union_method)
laplacian_smoothing
laplacian_smoothing(iterations, strength)

Smooth the distances on the mesh, using iterative laplacian smoothing.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def laplacian_smoothing(self, iterations: int, strength: float) -> None:
    """ Smooth the distances on the mesh, using iterative laplacian smoothing. """
    L = utils.get_mesh_cotmatrix_igl(self.mesh, fix_boundaries=True)
    new_distances_lists = []

    logger.info('Laplacian smoothing of all distances')
    for _i, a in enumerate(self._distances_lists):
        a = np.array(a)  # a: numpy array containing the attribute to be smoothed
        for _ in range(iterations):  # iterative smoothing
            a_prime = a + strength * L * a
            a = a_prime
        new_distances_lists.append(list(a))
    self.update_distances_lists(new_distances_lists)
save_distances
save_distances(name)

Save distances to json. Saves one list with distance values (one per vertex).

Parameters:

Name Type Description Default
name str
required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def save_distances(self, name: str) -> None:
    """
    Save distances to json.
    Saves one list with distance values (one per vertex).

    Parameters
    ----------
    name: str, name of json to be saved
    """
    utils.save_to_json(self.get_all_distances().tolist(), self.OUTPUT_PATH, name)
assign_new_mesh
assign_new_mesh(mesh)

When the base mesh changes, a new mesh needs to be assigned.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def assign_new_mesh(self, mesh: Mesh) -> None:
    """ When the base mesh changes, a new mesh needs to be assigned. """
    mesh.to_json(self.OUTPUT_PATH + "/temp.obj")
    mesh = Mesh.from_json(self.OUTPUT_PATH + "/temp.obj")
    self.mesh = mesh
    self.VN = len(list(self.mesh.vertices()))
blend_union_list
blend_union_list(values, r)

Returns a smooth union of all the elements in the list, with blend radius blend_radius.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def blend_union_list(values: NDArray[np.floating] | list[float], r: float) -> float:
    """ Returns a smooth union of all the elements in the list, with blend radius blend_radius. """
    d_result: float = 9999999.0  # very big number
    for d in values:
        d_result = blend_union(d_result, float(d), r)
    return d_result
stairs_union_list
stairs_union_list(values, r, n)

Returns a stairs union of all the elements in the list, with blend radius r and number of peaks n-1.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def stairs_union_list(values: NDArray[np.floating] | list[float], r: float, n: int) -> float:
    """ Returns a stairs union of all the elements in the list, with blend radius r and number of peaks n-1."""
    d_result: float = 9999999.0  # very big number
    for _i, d in enumerate(values):
        d_result = stairs_union(d_result, float(d), r, n)
    return d_result
blend_union
blend_union(da, db, r)

Returns a smooth union of the two elements da, db with blend radius blend_radius.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def blend_union(da: float, db: float, r: float) -> float:
    """ Returns a smooth union of the two elements da, db with blend radius blend_radius. """
    e = max(r - abs(da - db), 0)
    return min(da, db) - e * e * 0.25 / r
chamfer_union
chamfer_union(a, b, r)

Returns a chamfer union of the two elements da, db with radius r.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def chamfer_union(a: float, b: float, r: float) -> float:
    """ Returns a chamfer union of the two elements da, db with radius r. """
    return min(min(a, b), (a - r + b) * math.sqrt(0.5))
stairs_union
stairs_union(a, b, r, n)

Returns a stairs union of the two elements da, db with radius r.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
def stairs_union(a: float, b: float, r: float, n: int) -> float:
    """ Returns a stairs union of the two elements da, db with radius r. """
    s = r / n
    u = b - r
    return min(min(a, b), 0.5 * (u + a + abs((u - a + s) % (2 * s) - s)))

geodesics

GeodesicsCache
GeodesicsCache()

Cache for geodesic distances to avoid redundant computations.

Note: This class is kept for backwards compatibility but now uses CGAL. The CGAL solver has its own internal caching via _cgal_solver_cache.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def __init__(self) -> None:
    self._cache: dict[tuple[int, str], NDArray[np.floating]] = {}
    self._mesh_hash: int | None = None
clear
clear()

Clear the cache.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def clear(self) -> None:
    """Clear the cache."""
    self._cache.clear()
    self._mesh_hash = None
get_distances
get_distances(mesh, sources, method='heat')

Get geodesic distances from sources, using cache when possible.

Parameters:

Name Type Description Default
mesh Mesh

The mesh to compute distances on.

required
sources list[int]

Source vertex indices.

required
method str

Geodesic method (ignored, always uses CGAL heat method).

'heat'

Returns:

Type Description
NDArray

Minimum distance from any source to each vertex.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def get_distances(
    self, mesh: Mesh, sources: list[int], method: str = 'heat'
) -> NDArray[np.floating]:
    """Get geodesic distances from sources, using cache when possible.

    Parameters
    ----------
    mesh : Mesh
        The mesh to compute distances on.
    sources : list[int]
        Source vertex indices.
    method : str
        Geodesic method (ignored, always uses CGAL heat method).

    Returns
    -------
    NDArray
        Minimum distance from any source to each vertex.
    """
    return get_heat_geodesic_distances(mesh, sources)
GeodesicsSolver
GeodesicsSolver(mesh, OUTPUT_PATH)

Computes custom geodesic distances. Starts from implementation of the method presented in the paper 'Geodesics in Heat' (Crane, 2013)

Attributes:

Name Type Description
mesh :class: compas.datastructures.Mesh
OUTPUT_PATH str
Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def __init__(self, mesh: Mesh, OUTPUT_PATH: str) -> None:
    logger.info('GeodesicsSolver')
    self.mesh = mesh
    self.OUTPUT_PATH = OUTPUT_PATH

    self.use_forwards_euler = True

    # Compute matrices using NumPy implementations
    self.cotans = utils.get_mesh_cotans(mesh)
    self.L = utils.get_mesh_cotmatrix(mesh, fix_boundaries=False)
    self.M = utils.get_mesh_massmatrix(mesh)
diffuse_heat
diffuse_heat(vi_sources, v_equalize=None)

Heat diffusion using iterative backward Euler.

This is a custom Python implementation of the heat method. For production use, prefer CGAL's heat method (geodesics_method='heat_cgal') which uses intrinsic Delaunay triangulation for better accuracy.

Parameters:

Name Type Description Default
vi_sources list[int]

The vertex indices of the heat sources.

required
v_equalize list[int] | None

Vertex indices whose values should be equalized (for handling saddle points).

None

Returns:

Type Description
NDArray

Heat distribution u, with sources at 0 and increasing away from them.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def diffuse_heat(
    self,
    vi_sources: list[int],
    v_equalize: list[int] | None = None,
) -> NDArray[np.floating]:
    """
    Heat diffusion using iterative backward Euler.

    This is a custom Python implementation of the heat method. For production use,
    prefer CGAL's heat method (geodesics_method='heat_cgal') which uses intrinsic
    Delaunay triangulation for better accuracy.

    Parameters
    ----------
    vi_sources : list[int]
        The vertex indices of the heat sources.
    v_equalize : list[int] | None
        Vertex indices whose values should be equalized (for handling saddle points).

    Returns
    -------
    NDArray
        Heat distribution u, with sources at 0 and increasing away from them.
    """
    if not v_equalize:
        v_equalize = []

    # First assign starting values (0 everywhere, 1 on the sources)
    u0 = np.zeros(len(list(self.mesh.vertices())))
    u0[vi_sources] = 1.0
    u = u0

    # Pre-factor the matrix ONCE outside the loop (major speedup)
    # Using backward Euler: (M - δL)u' = M·u
    S = self.M - DELTA * self.L
    solver = scipy.sparse.linalg.factorized(S)

    for _i in range(HEAT_DIFFUSION_ITERATIONS):
        b = self.M * u
        u_prime = solver(b)

        if len(v_equalize) > 0:
            u_prime[v_equalize] = np.min(u_prime[v_equalize])

        u = u_prime
        u[vi_sources] = 1.0  # enforce Dirichlet boundary: sources remain fixed

    # reverse values (to make sources at 0, increasing outward)
    u = ([np.max(u)] * len(u)) - u

    utils.save_to_json([float(value) for value in u], self.OUTPUT_PATH, 'diffused_heat.json')
    return u
get_geodesic_distances
get_geodesic_distances(u, vi_sources, v_equalize=None)

Finds geodesic distances from heat distribution u. I

Parameters:

Name Type Description Default
u NDArray[floating]
required
vi_sources list[int]
required
v_equalize list[int] | None
None
Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def get_geodesic_distances(
    self, u: NDArray[np.floating], vi_sources: list[int], v_equalize: list[int] | None = None
) -> NDArray[np.floating]:
    """
    Finds geodesic distances from heat distribution u. I

    Parameters
    ----------
    u: np.array, dimensions: V x 1 (one scalar value per vertex)
    vi_sources: list, int, the vertex indices of the sources
    v_equalize: list, int, the vertex indices whose value should be equalized
    """
    X = get_face_gradient_from_scalar_field(self.mesh, u)
    X = normalize_gradient(X)
    geodesic_dist = get_scalar_field_from_gradient(self.mesh, X, self.L, self.cotans)
    if math.isnan(geodesic_dist[0]):
        raise RuntimeError("get_scalar_field_from_gradient returned NaN - check mesh quality.")
    geodesic_dist[vi_sources] = 0  # coerce boundary vertices to be on 0 (fixes small boundary imprecision)
    return geodesic_dist
get_heat_geodesic_distances
get_heat_geodesic_distances(mesh, vertices_start)

Calculate geodesic distances using CGAL heat method.

Uses compas_cgal's HeatGeodesicSolver which provides CGAL's Heat_method_3 implementation with intrinsic Delaunay triangulation.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh (must be triangulated).

required
vertices_start list[int]

Source vertex indices.

required

Returns:

Type Description
NDArray

Minimum distance from any source to each vertex.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def get_heat_geodesic_distances(
    mesh: Mesh, vertices_start: list[int]
) -> NDArray[np.floating]:
    """
    Calculate geodesic distances using CGAL heat method.

    Uses compas_cgal's HeatGeodesicSolver which provides CGAL's Heat_method_3
    implementation with intrinsic Delaunay triangulation.

    Parameters
    ----------
    mesh : Mesh
        A compas mesh (must be triangulated).
    vertices_start : list[int]
        Source vertex indices.

    Returns
    -------
    NDArray
        Minimum distance from any source to each vertex.
    """
    from compas_cgal.geodesics import HeatGeodesicSolver

    # Check if we have a cached solver for this mesh
    mesh_hash = hash((len(list(mesh.vertices())), len(list(mesh.faces()))))
    if mesh_hash not in _cgal_solver_cache:
        _cgal_solver_cache.clear()  # Clear old solvers
        _cgal_solver_cache[mesh_hash] = HeatGeodesicSolver(mesh)

    solver = _cgal_solver_cache[mesh_hash]

    # Compute distances for each source and take minimum
    all_distances = []
    for source in vertices_start:
        distances = solver.solve([source])
        all_distances.append(distances)

    return np.min(np.array(all_distances), axis=0)
get_custom_HEAT_geodesic_distances
get_custom_HEAT_geodesic_distances(mesh, vi_sources, OUTPUT_PATH, v_equalize=None)

Calculate geodesic distances using the custom heat method.

This is a pure Python implementation of the heat method (Crane et al., 2013). For production use, prefer CGAL's implementation via get_heat_geodesic_distances() which uses intrinsic Delaunay triangulation for better accuracy.

Parameters:

Name Type Description Default
mesh Mesh

A compas mesh (must be triangulated).

required
vi_sources list[int]

Source vertex indices.

required
OUTPUT_PATH str

Path to save intermediate results.

required
v_equalize list[int] | None

Vertices to equalize (for saddle point handling).

None

Returns:

Type Description
NDArray

Geodesic distance from sources to each vertex.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
def get_custom_HEAT_geodesic_distances(
    mesh: Mesh,
    vi_sources: list[int],
    OUTPUT_PATH: str,
    v_equalize: list[int] | None = None,
) -> NDArray[np.floating]:
    """Calculate geodesic distances using the custom heat method.

    This is a pure Python implementation of the heat method (Crane et al., 2013).
    For production use, prefer CGAL's implementation via get_heat_geodesic_distances()
    which uses intrinsic Delaunay triangulation for better accuracy.

    Parameters
    ----------
    mesh : Mesh
        A compas mesh (must be triangulated).
    vi_sources : list[int]
        Source vertex indices.
    OUTPUT_PATH : str
        Path to save intermediate results.
    v_equalize : list[int] | None
        Vertices to equalize (for saddle point handling).

    Returns
    -------
    NDArray
        Geodesic distance from sources to each vertex.
    """
    geodesics_solver = GeodesicsSolver(mesh, OUTPUT_PATH)
    u = geodesics_solver.diffuse_heat(vi_sources, v_equalize)
    geodesic_dist = geodesics_solver.get_geodesic_distances(u, vi_sources, v_equalize)
    return geodesic_dist

gradient

get_vertex_gradient_from_face_gradient
get_vertex_gradient_from_face_gradient(mesh, face_gradient)

Finds vertex gradient given an already calculated per face gradient.

Parameters:

Name Type Description Default
mesh Mesh
required
face_gradient NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #V x 3) one gradient vector per vertex.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_vertex_gradient_from_face_gradient(
    mesh: Mesh, face_gradient: NDArray[np.floating]
) -> NDArray[np.floating]:
    """
    Finds vertex gradient given an already calculated per face gradient.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    face_gradient: np.array with one vec3 per face of the mesh. (dimensions : #F x 3)

    Returns
    ----------
    np.array (dimensions : #V x 3) one gradient vector per vertex.
    """
    logger.info('Computing per vertex gradient')
    V, F = _mesh_to_arrays(mesh)
    face_areas = np.array([mesh.face_area(f) for f in mesh.faces()], dtype=np.float64)
    return _vertex_gradient_vectorized(V, F, face_gradient, face_areas)
get_edge_gradient_from_vertex_gradient
get_edge_gradient_from_vertex_gradient(mesh, vertex_gradient)

Finds edge gradient given an already calculated per vertex gradient.

Parameters:

Name Type Description Default
mesh Mesh
required
vertex_gradient NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #E x 3) one gradient vector per edge.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_edge_gradient_from_vertex_gradient(
    mesh: Mesh, vertex_gradient: NDArray[np.floating]
) -> NDArray[np.floating]:
    """
    Finds edge gradient given an already calculated per vertex gradient.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    vertex_gradient: np.array with one vec3 per vertex of the mesh. (dimensions : #V x 3)

    Returns
    ----------
    np.array (dimensions : #E x 3) one gradient vector per edge.
    """
    edges = np.array(list(mesh.edges()), dtype=np.intp)
    return _edge_gradient_vectorized(edges, vertex_gradient)
get_face_gradient_from_scalar_field
get_face_gradient_from_scalar_field(mesh, u)

Finds face gradient from scalar field u. Scalar field u is given per vertex.

Parameters:

Name Type Description Default
mesh Mesh
required
u NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #F x 3) one gradient vector per face.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_face_gradient_from_scalar_field(
    mesh: Mesh, u: NDArray[np.floating]
) -> NDArray[np.floating]:
    """
    Finds face gradient from scalar field u.
    Scalar field u is given per vertex.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    u: list, float. (dimensions : #VN x 1)

    Returns
    ----------
    np.array (dimensions : #F x 3) one gradient vector per face.
    """
    logger.info('Computing per face gradient')
    V, F = _mesh_to_arrays(mesh)
    scalar_field = np.asarray(u, dtype=np.float64)
    face_normals = np.array([mesh.face_normal(f) for f in mesh.faces()], dtype=np.float64)
    face_areas = np.array([mesh.face_area(f) for f in mesh.faces()], dtype=np.float64)
    return _face_gradient_vectorized(V, F, scalar_field, face_normals, face_areas)
get_face_edge_vectors
get_face_edge_vectors(mesh, fkey)

Returns the edge vectors of the face with fkey.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_face_edge_vectors(
    mesh: Mesh, fkey: int
) -> tuple[NDArray[np.floating], NDArray[np.floating], NDArray[np.floating]]:
    """ Returns the edge vectors of the face with fkey. """
    e0, e1, e2 = mesh.face_halfedges(fkey)
    edge_0 = np.array(mesh.vertex_coordinates(e0[0])) - np.array(mesh.vertex_coordinates(e0[1]))
    edge_1 = np.array(mesh.vertex_coordinates(e1[0])) - np.array(mesh.vertex_coordinates(e1[1]))
    edge_2 = np.array(mesh.vertex_coordinates(e2[0])) - np.array(mesh.vertex_coordinates(e2[1]))
    return edge_0, edge_1, edge_2
get_per_vertex_divergence
get_per_vertex_divergence(mesh, X, cotans)

Computes the divergence of the gradient X for the mesh, using cotangent weights.

Parameters:

Name Type Description Default
mesh Mesh
required
X NDArray[floating]
required
cotans NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #V x 1) one float (divergence value) per vertex.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_per_vertex_divergence(
    mesh: Mesh, X: NDArray[np.floating], cotans: NDArray[np.floating]
) -> NDArray[np.floating]:
    """
    Computes the divergence of the gradient X for the mesh, using cotangent weights.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    X: np.array, (dimensions: #F x 3), per face gradient
    cotans:  np.array, (dimensions: #F x 3), 1/2*cotangents corresponding angles

    Returns
    ----------
    np.array (dimensions : #V x 1) one float (divergence value) per vertex.
    """
    V, F = _mesh_to_arrays(mesh)
    cotans = cotans.reshape(-1, 3)
    return _divergence_vectorized(V, F, X, cotans)
normalize_gradient
normalize_gradient(X)

Returns normalized gradient X.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def normalize_gradient(X: NDArray[np.floating]) -> NDArray[np.floating]:
    """ Returns normalized gradient X. """
    norm = np.linalg.norm(X, axis=1)[..., np.newaxis]
    return X / norm  # normalize
get_scalar_field_from_gradient
get_scalar_field_from_gradient(mesh, X, C, cotans)

Find scalar field u that best explains gradient X. Laplacian(u) = Divergence(X). This defines a scalar field up to translation, then we subtract the min to make sure it starts from 0.

Parameters:

Name Type Description Default
mesh Mesh
required
X NDArray[floating]
required
C csr_matrix

sparse matrix (dimensions: #V x #V), cotmatrix, each row i corresponding to v(i, :)

required
cotans NDArray[floating]
required

Returns:

Type Description
np.array (dimensions : #V x 1) one scalar value per vertex.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
def get_scalar_field_from_gradient(
    mesh: Mesh,
    X: NDArray[np.floating],
    C: scipy.sparse.csr_matrix,
    cotans: NDArray[np.floating],
) -> NDArray[np.floating]:
    """
    Find scalar field u that best explains gradient X.
    Laplacian(u) = Divergence(X).
    This defines a scalar field up to translation, then we subtract the min to make sure it starts from 0.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    X: np.array, (dimensions: #F x 3), per face gradient
    C: 'scipy.sparse.csr_matrix',
        sparse matrix (dimensions: #V x #V), cotmatrix, each row i corresponding to v(i, :)
    cotans: np.array, (dimensions: #F x 3), 1/2*cotangents corresponding angles

    Returns
    ----------
    np.array (dimensions : #V x 1) one scalar value per vertex.
    """
    div_X = get_per_vertex_divergence(mesh, X, cotans)
    u = scipy.sparse.linalg.spsolve(C, div_X)
    logger.info(f'Solved Δ(u) = div(X). Linear system error |Δ(u) - div(X)| = {np.linalg.norm(C * u - div_X):.6e}')
    u = u - np.amin(u)  # make start value equal 0
    u = 2*u
    return u

mesh_attributes_handling

create_mesh_boundary_attributes
create_mesh_boundary_attributes(mesh, low_boundary_vs, high_boundary_vs)

Creates a default vertex attribute data['boundary']=0. Then it gives the value 1 to the vertices that belong to the lower boundary, and the value 2 to the vertices that belong to the higher boundary.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def create_mesh_boundary_attributes(
    mesh: Mesh, low_boundary_vs: list[int], high_boundary_vs: list[int]
) -> None:
    """
    Creates a default vertex attribute data['boundary']=0. Then it gives the value 1 to the vertices that belong
    to the lower boundary, and the value 2 to the vertices that belong to the higher boundary.
    """
    mesh.update_default_vertex_attributes({'boundary': 0})
    for vkey, data in mesh.vertices(data=True):
        if vkey in low_boundary_vs:
            data['boundary'] = 1
        elif vkey in high_boundary_vs:
            data['boundary'] = 2
get_existing_cut_indices
get_existing_cut_indices(mesh)

Returns:

Type Description
list, int.

The cut indices (data['cut']>0) that exist on the mesh vertices.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def get_existing_cut_indices(mesh: Mesh) -> list[int]:
    """
    Returns
    ----------
        list, int.
        The cut indices (data['cut']>0) that exist on the mesh vertices.
    """
    cut_indices = []
    for _vkey, data in mesh.vertices(data=True):
        if data['cut'] > 0 and data['cut'] not in cut_indices:
            cut_indices.append(data['cut'])
    cut_indices = sorted(cut_indices)
    return cut_indices
get_existing_boundary_indices
get_existing_boundary_indices(mesh)

Returns:

Type Description
list, int.

The boundary indices (data['boundary']>0) that exist on the mesh vertices.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def get_existing_boundary_indices(mesh: Mesh) -> list[int]:
    """
    Returns
    ----------
        list, int.
        The boundary indices (data['boundary']>0) that exist on the mesh vertices.
    """
    indices = []
    for _vkey, data in mesh.vertices(data=True):
        if data['boundary'] > 0 and data['boundary'] not in indices:
            indices.append(data['boundary'])
    boundary_indices = sorted(indices)
    return boundary_indices
get_vertices_that_belong_to_cuts
get_vertices_that_belong_to_cuts(mesh, cut_indices)

Returns:

Type Description
dict, key: int, the index of each cut

value: dict, the points that belong to this cut (point_list_to_dict format)

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def get_vertices_that_belong_to_cuts(
    mesh: Mesh, cut_indices: list[int]
) -> dict[int, dict[int, list[float]]]:
    """
    Returns
    ----------
        dict, key: int, the index of each cut
              value: dict, the points that belong to this cut (point_list_to_dict format)
    """
    cuts_dict: dict[int, list[list[float]]] = {i: [] for i in cut_indices}

    for vkey, data in mesh.vertices(data=True):
        if data['cut'] > 0:
            cut_index = data['cut']
            cuts_dict[cut_index].append(mesh.vertex_coordinates(vkey))

    result: dict[int, dict[int, list[float]]] = {}
    for cut_index in cuts_dict:
        result[cut_index] = utils.point_list_to_dict(cuts_dict[cut_index])

    return result
save_vertex_attributes
save_vertex_attributes(mesh)

Saves the boundary and cut attributes that are on the mesh on a dictionary.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def save_vertex_attributes(mesh: Mesh) -> dict[str, Any]:
    """
    Saves the boundary and cut attributes that are on the mesh on a dictionary.
    """
    v_attributes_dict: dict[str, Any] = {'boundary_1': [], 'boundary_2': [], 'cut': {}}

    cut_indices = []
    for _vkey, data in mesh.vertices(data=True):
        cut_index = data['cut']
        if cut_index not in cut_indices:
            cut_indices.append(cut_index)
    cut_indices = sorted(cut_indices)

    for cut_index in cut_indices:
        v_attributes_dict['cut'][cut_index] = []

    for vkey, data in mesh.vertices(data=True):
        if data['boundary'] == 1:
            v_coords = mesh.vertex_coordinates(vkey)
            pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2])
            v_attributes_dict['boundary_1'].append(pt)
        elif data['boundary'] == 2:
            v_coords = mesh.vertex_coordinates(vkey)
            pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2])
            v_attributes_dict['boundary_2'].append(pt)
        if data['cut'] > 0:
            cut_index = data['cut']
            v_coords = mesh.vertex_coordinates(vkey)
            pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2])
            v_attributes_dict['cut'][cut_index].append(pt)
    return v_attributes_dict
restore_mesh_attributes
restore_mesh_attributes(mesh, v_attributes_dict)

Restores the cut and boundary attributes on the mesh vertices from the dictionary of the previously saved attributes

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def restore_mesh_attributes(mesh: Mesh, v_attributes_dict: dict[str, Any]) -> None:
    """
    Restores the cut and boundary attributes on the mesh vertices from the dictionary of the previously saved attributes
    """
    mesh.update_default_vertex_attributes({'boundary': 0})
    mesh.update_default_vertex_attributes({'cut': 0})

    D_THRESHOLD = 0.01

    # Build KDTree once for all queries
    vkeys = list(mesh.vertices())
    welded_mesh_vertices = np.array([mesh.vertex_coordinates(vkey) for vkey in vkeys], dtype=np.float64)
    indices_to_vkeys = dict(enumerate(vkeys))
    tree = cKDTree(welded_mesh_vertices)

    def _restore_attribute_batch(pts_list, attr_name, attr_value):
        """Restore attribute for a batch of points using KDTree."""
        if not pts_list:
            return
        query_pts = np.array([[p.x, p.y, p.z] if hasattr(p, 'x') else p for p in pts_list], dtype=np.float64)
        distances, indices = tree.query(query_pts)
        for dist, idx in zip(distances, indices):
            if dist ** 2 < D_THRESHOLD:
                c_vkey = indices_to_vkeys[idx]
                mesh.vertex_attribute(c_vkey, attr_name, value=attr_value)

    _restore_attribute_batch(v_attributes_dict['boundary_1'], 'boundary', 1)
    _restore_attribute_batch(v_attributes_dict['boundary_2'], 'boundary', 2)

    for cut_index in v_attributes_dict['cut']:
        _restore_attribute_batch(v_attributes_dict['cut'][cut_index], 'cut', int(cut_index))
replace_mesh_vertex_attribute
replace_mesh_vertex_attribute(mesh, old_attr, old_val, new_attr, new_val)

Replaces one vertex attribute with a new one. For all the vertices where data[old_attr]=old_val, then the old_val is replaced with 0, and data[new_attr]=new_val.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
def replace_mesh_vertex_attribute(
    mesh: Mesh, old_attr: str, old_val: int, new_attr: str, new_val: int
) -> None:
    """
    Replaces one vertex attribute with a new one. For all the vertices where data[old_attr]=old_val, then the
    old_val is replaced with 0, and data[new_attr]=new_val.
    """
    for vkey, data in mesh.vertices(data=True):
        if data[old_attr] == old_val:
            mesh.vertex_attribute(vkey, old_attr, 0)
            mesh.vertex_attribute(vkey, new_attr, new_val)

region_split

MeshSplitter
MeshSplitter(mesh, target_LOW, target_HIGH, DATA_PATH)

Curved slicing pre-processing step.

Takes one continuous mesh with various saddle points and splits it up at every saddle point following the direction of the iso-contour that intersects that saddle point, so that the resulting mesh has no remaining saddle points.

The result is a series of split meshes whose vertex attributes have been updated with boundary attributes at the newly created cuts, (i.e. they all have vertex 'boundary' attributes 1,2 on their lower and upper boundaries)

For each newly created mesh, a separate slicer needs to be created. Like that, we will always have one slicer per mesh with the correct attributes already assigned. However, it can still happen that a slicer that takes a split mesh outputs more than one vertical_layers_print_data (vertical layers).

Attributes:

Name Type Description
mesh :class: 'compas.datastructures.Mesh'
target_LOW :class: 'compas_slicer.pre_processing.CompoundTarget'
target_HIGH :class: 'compas_slicer.pre_processing.CompoundTarget'
DATA_PATH str, the path to the data folder
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def __init__(self, mesh, target_LOW, target_HIGH, DATA_PATH):
    self.mesh = mesh  # compas mesh
    self.DATA_PATH = DATA_PATH
    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
    self.target_LOW, self.target_HIGH = target_LOW, target_HIGH

    assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=0.5, target_LOW=self.target_LOW,
                                                   target_HIGH=self.target_HIGH)
    # Late import to avoid circular dependency
    from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation

    g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH)
    g_evaluation.find_critical_points()  # First estimation of saddle points with weight = 0.5
    self.saddles = g_evaluation.saddles
    self.cut_indices = []
run
run()

Runs the mesh splitting process. This consists of the following parts.

(1) Find the iso-contours that intersect the saddle points. Iteratively find the weights (from 0 to 1) that output a distance field whose iso-contour is intersecting each saddle point. Here two iterations are carried out (one rough and one exact search).

For each saddle point and its respective weight and iso-contour: (2) Find the zero-crossing points of the iso-contour and merge points that are close to the saddle to ensure connection. (3) Cleanup iso-contour. Only keep neighborhoods that are intersecting the saddle point. Discard all other. (4) Create the cut on the mesh. (5) Weld the resulting mesh, and restore all the vertex attributes. Note that the mesh remains in one piece, although the cuts have been created. (6) Update compound tardets with the new mesh.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def run(self):
    """
    Runs the mesh splitting process. This consists of the following parts.

    (1) Find the iso-contours that intersect the saddle points.
    Iteratively find the weights (from 0 to 1) that output a distance field whose iso-contour is intersecting
    each saddle point. Here two iterations are carried out (one rough and one exact search).

    For each saddle point and its respective weight and iso-contour:
    (2) Find the zero-crossing points of the iso-contour and merge points that are close to the saddle to ensure
    connection.
    (3) Cleanup iso-contour. Only keep neighborhoods that are intersecting the saddle point. Discard all other.
    (4) Create the cut on the mesh.
    (5) Weld the resulting mesh, and restore all the vertex attributes. Note that the mesh remains in one piece,
    although the cuts have been created.
    (6) Update compound tardets with the new mesh.
    """

    # (1) first rough estimation of split params
    split_params = self.identify_positions_to_split(self.saddles)
    # TODO: merge params that are too close together to avoid creation of very thin neighborhoods.
    logger.info(f"{len(split_params)} Split params. First rough estimation :  {split_params}")

    # split mesh at params
    logger.info('Splitting mesh at split params')
    current_cut_index = 1

    for i, param_first_estimation in enumerate(split_params):
        logger.info(f'cut_index : {current_cut_index}, param_first_estimation : {param_first_estimation:.6f}')

        # --- (1) More exact estimation of intersecting weight. Recompute gradient evaluation.
        # Find exact saddle point and the weight that intersects it.

        assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=param_first_estimation,
                                                       target_LOW=self.target_LOW, target_HIGH=self.target_HIGH)
        # Late import to avoid circular dependency
        from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation

        g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH)
        g_evaluation.find_critical_points()
        saddles_ds_tupples = [(vkey, abs(g_evaluation.mesh.vertex_attribute(vkey, 'scalar_field'))) for vkey in
                              g_evaluation.saddles]
        saddles_ds_tupples = sorted(saddles_ds_tupples, key=lambda saddle_tupple: saddle_tupple[1])
        vkey = saddles_ds_tupples[0][0]
        t = self.identify_positions_to_split([vkey])[0]
        logger.info(f'vkey_exact : {vkey} , t_exact : {t:.6f}')

        # --- (2) find zero-crossing points
        assign_interpolation_distance_to_mesh_vertices(self.mesh, t, self.target_LOW, self.target_HIGH)
        # Late import to avoid circular dependency
        from compas_slicer.slicers.slice_utilities import ScalarFieldContours

        zero_contours = ScalarFieldContours(self.mesh)
        zero_contours.compute()
        keys_of_clusters_to_keep = merge_clusters_saddle_point(zero_contours, saddle_vkeys=[vkey])

        # --- (3) Cleaning up zero-crossing neighborhoods
        cleanup_unrelated_isocontour_neighborhoods(zero_contours, keys_of_clusters_to_keep)

        if zero_contours:  # if there are remaining zero-crossing neighborhoods
            zero_contours = smoothen_cut(zero_contours, self.mesh, saddle_vkeys=[vkey], iterations=15,
                                         strength=0.2)  # smoothen the cut close to the saddle point.

            # save to json intermediary results
            zero_contours.save_point_clusters_as_polylines_to_json(self.OUTPUT_PATH,
                                                                   f'point_clusters_polylines_{int(i)}.json')

            #  --- (4) Create cut
            logger.info("Creating cut on mesh")
            self.cut_indices.append(current_cut_index)
            self.split_intersected_faces(zero_contours, current_cut_index)
            current_cut_index += 1

            #  --- (5) Weld mesh and restore attributes
            logger.info('Cleaning up the mesh. Welding and restoring attributes')
            v_attributes_dict = save_vertex_attributes(self.mesh)
            self.mesh = weld_mesh(self.mesh, self.OUTPUT_PATH)
            restore_mesh_attributes(self.mesh, v_attributes_dict)

            #  --- (6) Update targets
            if i < len(split_params) - 1:  # does not need to happen at the end
                logger.info('Updating targets, recomputing geodesic distances')
                self.update_targets()

        self.mesh.to_obj(str(Path(self.OUTPUT_PATH) / 'most_recent_cut_mesh.obj'))
update_targets
update_targets()

Update targets with the new mesh that was created during the split process. Note: This only works if the target vertices have not been touched. If all has gone well, targets can only have minima and maxima, so they should remain intact after the split

Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def update_targets(self):
    """
    Update targets with the new mesh that was created during the split process.
    Note: This only works if the target vertices have not been touched. If all has gone well, targets can only have
    minima and maxima, so they should remain intact after the split
    """
    self.target_LOW.assign_new_mesh(self.mesh)
    self.target_LOW.find_targets_connected_components()
    self.target_LOW.compute_geodesic_distances()
    if self.target_HIGH:
        self.target_HIGH.assign_new_mesh(self.mesh)
        self.target_HIGH.find_targets_connected_components()
        self.target_HIGH.compute_geodesic_distances()
split_intersected_faces
split_intersected_faces(zero_contours, cut_index)

Create cuts on intersected faces

Parameters:

Name Type Description Default
zero_contours
required
cut_index
required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def split_intersected_faces(self, zero_contours, cut_index):
    """
    Create cuts on intersected faces

    Parameters
    ----------
    zero_contours: :class: 'compas_slicer.pre_processing.ScalarFieldContours'
    cut_index: int, the vertex attribute value data['cut'] of the current cut
    """
    for key in zero_contours.sorted_point_clusters:  # cluster_pair
        edges = zero_contours.sorted_edge_clusters[key]
        pts = zero_contours.sorted_point_clusters[key]

        # add first vertex
        p = pts[0]
        v0 = self.mesh.add_vertex(x=p[0], y=p[1], z=p[2], attr_dict={'cut': 1})

        for i, edge in enumerate(edges):
            next_edge = edges[(i + 1) % len(edges)]
            p = pts[(i + 1) % len(pts)]

            faces_current_edge = self.mesh.edge_faces(u=edge[0], v=edge[1])
            faces_next_edge = self.mesh.edge_faces(u=next_edge[0], v=next_edge[1])

            fkey_common = list(set(faces_current_edge).intersection(faces_next_edge))[0]
            vkey_common = list(set(edge).intersection(next_edge))[0]
            v_other_a = list(set(edge).difference([vkey_common]))[0]
            v_other_b = list(set(next_edge).difference([vkey_common]))[0]

            v_new = self.mesh.add_vertex(x=p[0], y=p[1], z=p[2], attr_dict={'cut': cut_index})

            # remove and add faces
            if fkey_common in list(self.mesh.faces()):
                self.mesh.delete_face(fkey_common)
                self.mesh.add_face([vkey_common, v_new, v0])
                self.mesh.add_face([v_new, v_other_a, v0])
                self.mesh.add_face([v_other_b, v_other_a, v_new])
            else:
                logger.warning('Did not need to modify faces.')
            v0 = v_new

    self.mesh.cull_vertices()  # remove all unused vertices
    try:
        self.mesh.unify_cycles()
    except AssertionError:
        logger.warning('Could NOT unify cycles')
    if not self.mesh.is_valid():
        logger.warning('Attention! Mesh is NOT valid!')
identify_positions_to_split
identify_positions_to_split(saddles)

Find the weights that create iso-contours that intersect the saddle points.

Parameters:

Name Type Description Default
saddles
required

Returns:

Type Description
list, float, the weights from 0 to 1. One for each saddle point.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def identify_positions_to_split(self, saddles):
    """
    Find the weights that create iso-contours that intersect the saddle points.

    Parameters
    ----------
    saddles: list, int, the vertex keys of the saddle points

    Returns
    ----------
    list, float, the weights from 0 to 1. One for each saddle point.
    """
    split_params = []
    for vkey in saddles:
        param = self.find_weight_intersecting_vkey(vkey, threshold=HIT_THRESHOLD, resolution=T_SEARCH_RESOLUTION)
        split_params.append(param)
    return split_params
find_weight_intersecting_vkey
find_weight_intersecting_vkey(vkey, threshold, resolution)

Find the weights that intersect the vertex.

Parameters:

Name Type Description Default
vkey
required
threshold
required
resolution
required

Returns:

Type Description
float, the weights from 0 to 1.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def find_weight_intersecting_vkey(self, vkey, threshold, resolution):
    """
    Find the weights that intersect the vertex.

    Parameters
    ----------
    vkey: list, int, the vertex key to intersect
    threshold: float, the d value below which we consider we have a hit. Should be a very small value
    resolution: int, the resolution of search, should be a value more than 10**4

    Returns
    ----------
    float, the weights from 0 to 1.
    """
    weight_list = get_weights_list(n=resolution, start=0.001, end=0.999)
    # TODO: save next d to avoid re-evaluating
    for i, weight in enumerate(weight_list[:-1]):
        current_d = assign_interpolation_distance_to_mesh_vertex(vkey, weight, self.target_LOW, self.target_HIGH)
        next_d = assign_interpolation_distance_to_mesh_vertex(vkey, weight_list[i + 1], self.target_LOW, self.target_HIGH)
        if abs(current_d) < abs(next_d) and current_d < threshold:
            return weight
    raise ValueError(f'Could NOT find param for saddle vkey {vkey}!')
get_weights_list
get_weights_list(n, start=0.03, end=1.0)

Returns a numpy array with n numbers from start to end.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def get_weights_list(n, start=0.03, end=1.0):
    """ Returns a numpy array with n numbers from start to end. """
    return list(np.arange(start=start, stop=end, step=(end - start) / n))
separate_disconnected_components
separate_disconnected_components(mesh, attr, values, OUTPUT_PATH)

Given a mesh with cuts that have already been created, it separates the disconnected components by cutting along marked edges. Then it welds them and restores their attributes.

Parameters:

Name Type Description Default
mesh
required
attr
required
values
required
OUTPUT_PATH
required

Returns:

Type Description
list, :class: 'compas.datastructures.Mesh' The resulting split meshes.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def separate_disconnected_components(mesh, attr, values, OUTPUT_PATH):
    """
    Given a mesh with cuts that have already been created, it separates the disconnected
    components by cutting along marked edges. Then it welds them and restores their attributes.

    Parameters
    ----------
    mesh: :class: 'compas.datastructures.Mesh'
    attr: str, the key of the vertex attributes that signals the cuts. most likely 'cut'
    values: list, int, the cut indices
    OUTPUT_PATH: str, the path to the output folder

    Returns
    ----------
    list, :class: 'compas.datastructures.Mesh' The resulting split meshes.
    """

    v_attributes_dict = save_vertex_attributes(mesh)

    v, f = mesh.to_vertices_and_faces()
    v, f = np.array(v), np.array(f)

    # --- create cut flags for edges
    cut_flags = []
    for fkey in mesh.faces():
        edges = mesh.face_halfedges(fkey)
        current_face_flags = []
        for fu, fv in edges:
            fu_attr, fv_attr = mesh.vertex_attribute(fu, attr), mesh.vertex_attribute(fv, attr)
            if fu_attr == fv_attr and fu_attr in values:
                current_face_flags.append(1)
            else:
                current_face_flags.append(0)
        cut_flags.append(current_face_flags)
    cut_flags = np.array(cut_flags)
    if cut_flags.shape != f.shape:
        raise RuntimeError(f"Cut flags shape {cut_flags.shape} doesn't match face array shape {f.shape}")

    # --- cut mesh by duplicating vertices along cut edges
    v_cut, f_cut = _trimesh_cut_mesh(v, f, cut_flags)
    connected_components = _trimesh_face_components(v_cut, f_cut)

    f_dict: dict[int, list[list[int]]] = {}
    for i in range(max(connected_components) + 1):
        f_dict[i] = []
    for f_index, face in enumerate(f_cut):
        component = connected_components[f_index]
        f_dict[component].append(face.tolist() if hasattr(face, 'tolist') else list(face))

    cut_meshes = []
    for component in f_dict:
        cut_mesh = Mesh.from_vertices_and_faces(v_cut.tolist(), f_dict[component])
        cut_mesh.cull_vertices()
        if len(list(cut_mesh.faces())) > 2:
            temp_path = Path(OUTPUT_PATH) / 'temp.obj'
            cut_mesh.to_obj(str(temp_path))
            cut_mesh = Mesh.from_obj(str(temp_path))  # get rid of too many empty keys
            cut_meshes.append(cut_mesh)

    for mesh in cut_meshes:
        restore_mesh_attributes(mesh, v_attributes_dict)

    return cut_meshes
smoothen_cut
smoothen_cut(zero_contours, mesh, saddle_vkeys, iterations, strength, distance_threshold=20.0 * 20.0)

Iterative smoothing of the cut around the saddle point.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def smoothen_cut(zero_contours, mesh, saddle_vkeys, iterations, strength, distance_threshold=20.0*20.0):
    """ Iterative smoothing of the cut around the saddle point. """
    for _ in range(iterations):
        saddles = [mesh.vertex_coordinates(key) for key in saddle_vkeys]
        count = 0

        for cluster_key in zero_contours.sorted_point_clusters:
            pts = zero_contours.sorted_point_clusters[cluster_key]
            edges = zero_contours.sorted_edge_clusters[cluster_key]
            for i, pt in enumerate(pts):
                if 0.01 < min([distance_point_point_sqrd(pt, s) for s in saddles]) < distance_threshold:
                    count += 1
                    edge = edges[i]
                    prev = pts[i - 1]
                    next_p = pts[(i + 1) % len(pts)]
                    avg = [(prev[0] + next_p[0]) * 0.5, (prev[1] + next_p[1]) * 0.5, (prev[2] + next_p[2]) * 0.5]
                    point = np.array(avg) * strength + np.array(pt) * (1 - strength)
                    line = Line(mesh.vertex_coordinates(edge[0]), mesh.vertex_coordinates(edge[1]))
                    projected_pt = project_point_line(point, line)
                    pts[i] = projected_pt
                    zero_contours.sorted_point_clusters[cluster_key][i] = projected_pt

    return zero_contours
merge_clusters_saddle_point
merge_clusters_saddle_point(zero_contours, saddle_vkeys)

Merge the points that are on edges that have the saddle point as one of their edges to the saddle point.

Parameters:

Name Type Description Default
zero_contours ScalarFieldContours

Contours object.

required
saddle_vkeys list[int]

Vertex keys of the current saddle points (currently only single saddle point supported).

required

Returns:

Type Description
list, int. The index neighborhoods that are related to the saddle points.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def merge_clusters_saddle_point(zero_contours, saddle_vkeys):
    """
    Merge the points that are on edges that have the saddle point as one of their edges to the saddle point.

    Parameters
    ----------
    zero_contours : ScalarFieldContours
        Contours object.
    saddle_vkeys : list[int]
        Vertex keys of the current saddle points (currently only single saddle point supported).

    Returns
    ----------
    list, int. The index neighborhoods that are related to the saddle points.
    """
    keys_of_clusters_to_keep = []
    for cluster_key in zero_contours.sorted_edge_clusters:
        edges = zero_contours.sorted_edge_clusters[cluster_key]
        for i, e in enumerate(edges):
            for saddle_vkey in saddle_vkeys:
                if saddle_vkey in e:
                    zero_contours.sorted_point_clusters[cluster_key][i] = \
                        zero_contours.mesh.vertex_coordinates(saddle_vkey)  # merge point with saddle point
                    logger.debug(f'Found edge to merge: {e}')
                    if cluster_key not in keys_of_clusters_to_keep:
                        keys_of_clusters_to_keep.append(cluster_key)

    return keys_of_clusters_to_keep
cleanup_unrelated_isocontour_neighborhoods
cleanup_unrelated_isocontour_neighborhoods(zero_contours, keys_of_clusters_to_keep)

Remove the neighborhoods of the zero-crossing contour when they are not related to the saddle points.

Parameters:

Name Type Description Default
zero_contours
required
keys_of_clusters_to_keep
required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def cleanup_unrelated_isocontour_neighborhoods(zero_contours, keys_of_clusters_to_keep):
    """
    Remove the neighborhoods of the zero-crossing contour when they are not related to the saddle points.

    Parameters
    ----------
    zero_contours: :class: 'compas_slicer.pre_processing.ScalarFieldContours'
    keys_of_clusters_to_keep: list, int. The index neighborhoods that are related to the saddle points.
    """
    if len(keys_of_clusters_to_keep) == 0:
        logger.error("No common vertex found! Skipping this split_param")
        return None
    else:
        logger.info(f'keys_of_clusters_to_keep: {keys_of_clusters_to_keep}')
        # empty all other clusters that are not in the matching_pair
        sorted_point_clusters_clean = copy.deepcopy(zero_contours.sorted_point_clusters)
        sorted_edge_clusters_clean = copy.deepcopy(zero_contours.sorted_edge_clusters)
        for key in zero_contours.sorted_point_clusters:
            if key not in keys_of_clusters_to_keep:
                del sorted_point_clusters_clean[key]
                del sorted_edge_clusters_clean[key]

        zero_contours.sorted_point_clusters = sorted_point_clusters_clean
        zero_contours.sorted_edge_clusters = sorted_edge_clusters_clean
weld_mesh
weld_mesh(mesh, OUTPUT_PATH, precision='2f')

Welds mesh and check that the result is valid.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
def weld_mesh(mesh, OUTPUT_PATH, precision='2f'):
    """ Welds mesh and check that the result is valid. """
    for f_key in mesh.faces():
        if len(mesh.face_vertices(f_key)) < 3:
            mesh.delete_face(f_key)

    welded_mesh = mesh.weld(precision=precision)

    temp_path = Path(OUTPUT_PATH) / 'temp.obj'
    welded_mesh.to_obj(str(temp_path))  # make sure there's no empty f_keys
    welded_mesh = Mesh.from_obj(str(temp_path))  # TODO: find a better way to do this

    try:
        welded_mesh.unify_cycles()
        logger.info("Unified cycles of welded_mesh")
    except AssertionError:
        logger.error("Attention! Could NOT unify cycles of welded_mesh")

    if not welded_mesh.is_valid():  # and iteration < 3:
        logger.error("Attention! Welded mesh is INVALID")
    if not welded_mesh.is_manifold():
        logger.error("Attention! Welded mesh is NON-MANIFOLD")

    return welded_mesh

topological_sorting

DirectedGraph
DirectedGraph()

Base class for topological sorting of prints that consist of several parts that lie on each other. For example the graph A->B->C would represent a print that consists of three parts; A, B, C where A lies on the build platform, B lies on A, and C lies on B. This graph cannot have cycles; cycles would represent an unfeasible print.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def __init__(self) -> None:
    logger.info('Topological sorting')

    self.G: nx.DiGraph = nx.DiGraph()
    self.create_graph_nodes()
    self.root_indices = self.find_roots()
    logger.info(f'Graph roots: {self.root_indices}')
    if len(self.root_indices) == 0:
        raise ValueError("No root nodes were found. At least one root node is needed.")
    self.end_indices = self.find_ends()
    logger.info(f'Graph ends: {self.end_indices}')
    if len(self.end_indices) == 0:
        raise ValueError("No end nodes were found. At least one end node is needed.")

    self.create_directed_graph_edges(copy.deepcopy(self.root_indices))
    logger.info(f'Nodes: {list(self.G.nodes(data=True))}')
    logger.info(f'Edges: {list(self.G.edges(data=True))}')

    self.N: int = len(list(self.G.nodes()))
    self.adj_list: list[list[int]] = self.get_adjacency_list()  # Nested list where adj_list[i] is a list of all the neighbors
    # of the i-th component
    self.check_that_all_nodes_found_their_connectivity()
    logger.info(f'Adjacency list: {self.adj_list}')
    self.in_degree: list[int] = self.get_in_degree()  # Nested list where adj_list[i] is a list of all the edges pointing
    # to the i-th node.
    self.all_orders: list[list[int]] = []
find_roots abstractmethod
find_roots()

Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
@abstractmethod
def find_roots(self) -> list[int]:
    """ Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first. """
    pass
find_ends abstractmethod
find_ends()

Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
@abstractmethod
def find_ends(self) -> list[int]:
    """ Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last. """
    pass
create_graph_nodes abstractmethod
create_graph_nodes()

Add the nodes to the graph with their attributes.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
@abstractmethod
def create_graph_nodes(self) -> None:
    """ Add the nodes to the graph with their attributes. """
    pass
get_children_of_node abstractmethod
get_children_of_node(root)

Find all the vertical_layers_print_data that lie on the current root segment.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
@abstractmethod
def get_children_of_node(self, root: int) -> tuple[list[int], list[Any]]:
    """ Find all the vertical_layers_print_data that lie on the current root segment. """
    pass
create_directed_graph_edges
create_directed_graph_edges(root_indices)

Create the connectivity of the directed graph using breadth-first search graph traversal.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def create_directed_graph_edges(self, root_indices: list[int]) -> None:
    """ Create the connectivity of the directed graph using breadth-first search graph traversal. """
    passed_nodes = []
    queue = root_indices

    while len(queue) > 0:
        queue = self.sort_queue_with_end_targets_last(queue)
        current_node = queue[0]
        queue.remove(current_node)
        passed_nodes.append(current_node)
        children, cut_ids = self.get_children_of_node(current_node)
        for child_key, common_cuts in zip(children, cut_ids):
            self.G.add_edge(current_node, child_key, cut=common_cuts)
        for child_key in children:
            if child_key in passed_nodes:
                raise ValueError('Error: cyclic directed graph detected.')
        for child_key in children:
            if child_key not in queue:
                queue.append(child_key)
check_that_all_nodes_found_their_connectivity
check_that_all_nodes_found_their_connectivity()

Assert that there is no island, i.e. no node or groups of nodes that are not connected to the base.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def check_that_all_nodes_found_their_connectivity(self) -> None:
    """ Assert that there is no island, i.e. no node or groups of nodes that are not connected to the base. """
    good_nodes = list(self.root_indices)
    for children_list in self.adj_list:
        for child in children_list:
            if child not in good_nodes:
                good_nodes.append(child)
    if len(good_nodes) != self.N:
        raise ValueError(
            f'Floating vertical layers detected: {len(good_nodes)} connected nodes vs {self.N} total. '
            'Check graph creation process.'
        )
sort_queue_with_end_targets_last
sort_queue_with_end_targets_last(queue)

Sorts the queue so that the vertical_layers_print_data that have an end target are always at the end.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def sort_queue_with_end_targets_last(self, queue: list[int]) -> list[int]:
    """ Sorts the queue so that the vertical_layers_print_data that have an end target are always at the end. """
    queue_copy = copy.deepcopy(queue)
    for index in queue:
        if index in self.end_indices:
            queue_copy.remove(index)  # remove it from its current position
            queue_copy.append(index)  # append it last
    return queue_copy
get_adjacency_list
get_adjacency_list()

Returns adjacency list. Nested list where adj_list[i] is a list of all the neighbors of the ith component

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def get_adjacency_list(self) -> list[list[int]]:
    """ Returns adjacency list. Nested list where adj_list[i] is a list of all the neighbors of the ith component"""
    adj_list: list[list[int]] = [[] for _ in range(self.N)]  # adjacency list , size = len(Nodes), stores nodes' neighbors
    for i, adjacent_to_node in self.G.adjacency():
        for key in adjacent_to_node:
            adj_list[i].append(key)
    return adj_list
get_in_degree
get_in_degree()

Returns in_degree list. Nested list where adj_list[i] is a list of all the edges pointing to the node.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def get_in_degree(self) -> list[int]:
    """ Returns in_degree list. Nested list where adj_list[i] is a list of all the edges pointing to the node."""
    in_degree = [0] * self.N  # in_degree,  size = len(Nodes) , stores in-degree of a node
    for key_degree_tuple in self.G.in_degree:
        key = key_degree_tuple[0]
        degree = key_degree_tuple[1]
        in_degree[key] = degree
    return in_degree
get_all_topological_orders
get_all_topological_orders()

Finds all topological orders from source to sink.

Returns:

Type Description
list of lists of integers. Each list represents the indices of one topological order.
Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def get_all_topological_orders(self) -> list[list[int]]:
    """
    Finds  all topological orders from source to sink.
    Returns
    ----------
    list of lists of integers. Each list represents the indices of one topological order.
    """
    self.all_orders = []  # make sure list is empty
    discovered = [False] * self.N
    path: list[int] = []  # list to store the topological order
    self.get_orders(path, discovered)
    logger.info(f'Found {len(self.all_orders)} possible orders')
    return self.all_orders
get_orders
get_orders(path, discovered)

Finds all topological orders from source to sink. Sorting algorithm taken from https://www.techiedelight.com/find-all-possible-topological-orderings-of-dag/

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def get_orders(self, path: list[int], discovered: list[bool]) -> None:
    """
    Finds all topological orders from source to sink.
    Sorting algorithm taken from https://www.techiedelight.com/find-all-possible-topological-orderings-of-dag/
    """
    for v in range(self.N):  # for every node
        # proceed only if in-degree of current node is 0 and current node is not processed yet
        if self.in_degree[v] == 0 and not discovered[v]:

            # for every adjacent vertex u of v, reduce in-degree of u by 1
            for u in self.adj_list[v]:
                self.in_degree[u] = self.in_degree[u] - 1

            # include current node in the path and mark it as discovered
            path.append(v)
            discovered[v] = True

            # recur
            self.get_orders(path, discovered)

            # backtrack: reset in-degree information for the current node
            for u in self.adj_list[v]:
                self.in_degree[u] = self.in_degree[u] + 1

            # backtrack: remove current node from the path and mark it as undiscovered
            path.pop()
            discovered[v] = False

    # print the topological order if all vertices are included in the path
    if len(path) == self.N:
        self.all_orders.append(copy.deepcopy(path))
get_parents_of_node
get_parents_of_node(node_index)

Returns the parents of node with i = node_index.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def get_parents_of_node(self, node_index: int) -> list[int]:
    """ Returns the parents of node with i = node_index. """
    return [j for j, adj in enumerate(self.adj_list) if node_index in adj]
MeshDirectedGraph
MeshDirectedGraph(all_meshes, DATA_PATH)

Bases: DirectedGraph

The MeshDirectedGraph is used for topological sorting of multiple meshes that have been generated as a result of region split over the saddle points of the mesh scalar function

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def __init__(self, all_meshes: list[Mesh], DATA_PATH: str) -> None:
    self.all_meshes = all_meshes
    self.DATA_PATH = DATA_PATH
    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
    DirectedGraph.__init__(self)
find_roots
find_roots()

Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def find_roots(self) -> list[int]:
    """ Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first. """
    roots: list[int] = []
    for i, mesh in enumerate(self.all_meshes):
        for _vkey, data in mesh.vertices(data=True):
            if i not in roots and data['boundary'] == 1:
                roots.append(i)
    return roots
find_ends
find_ends()

Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def find_ends(self) -> list[int]:
    """ Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last. """
    ends: list[int] = []
    for i, mesh in enumerate(self.all_meshes):
        for _vkey, data in mesh.vertices(data=True):
            if i not in ends and data['boundary'] == 2:
                ends.append(i)
    return ends
create_graph_nodes
create_graph_nodes()

Add each of the split meshes to the graph as nodes. Cuts and boundaries are stored as attributes.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def create_graph_nodes(self) -> None:
    """ Add each of the split meshes to the graph as nodes. Cuts and boundaries are stored as attributes. """
    for i, m in enumerate(self.all_meshes):
        self.G.add_node(i, cuts=get_existing_cut_indices(m),
                        boundaries=get_existing_boundary_indices(m))
get_children_of_node
get_children_of_node(root)

Find all the nodes that lie on the current root.

Parameters:

Name Type Description Default
root int
required

Returns:

Type Description
2 lists [child1, child2, ...], [[common cuts 1], [common cuts 2] ...]
Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def get_children_of_node(self, root: int) -> tuple[list[int], list[list[int]]]:
    """
    Find all the nodes that lie on the current root.

    Parameters
    ----------
    root: int, index of root node

    Returns
    ----------
    2 lists [child1, child2, ...], [[common cuts 1], [common cuts 2] ...]
    """
    children: list[int] = []
    cut_ids: list[list[int]] = []
    parent_data = self.G.nodes(data=True)[root]

    for key, data in self.G.nodes(data=True):
        common_cuts = list(set(parent_data['cuts']).intersection(data['cuts']))

        if key != root and len(common_cuts) > 0 \
                and (key, root) not in self.G.edges() \
                and (root, key) not in self.G.edges() and is_true_mesh_adjacency(self.all_meshes, key, root):
            if len(common_cuts) != 1:  # if all cuts worked, this should be 1. But life is not perfect.
                logger.error(
                    f'More than one common cuts between two pieces in the following split meshes. '
                    f'Root : {root}, child : {key} . Common cuts : {common_cuts}'
                    'Probably some cut did not separate components'
                )
            children.append(key)
            cut_ids.append(common_cuts)

    # --- debugging output
    return children, cut_ids
SegmentsDirectedGraph
SegmentsDirectedGraph(mesh, segments, max_d_threshold, DATA_PATH)

Bases: DirectedGraph

The SegmentsDirectedGraph is used for topological sorting of multiple vertical_layers_print_data in one mesh

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def __init__(
    self, mesh: Mesh, segments: list[VerticalLayer], max_d_threshold: float, DATA_PATH: str
) -> None:
    self.mesh = mesh
    self.segments = segments
    self.max_d_threshold = max_d_threshold
    self.DATA_PATH = DATA_PATH
    self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
    DirectedGraph.__init__(self)
find_roots
find_roots()

Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def find_roots(self) -> list[int]:
    """ Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first. """
    boundary_pts = utils.get_mesh_vertex_coords_with_attribute(self.mesh, 'boundary', 1)
    root_segments: list[int] = []
    for i, segment in enumerate(self.segments):
        first_curve_pts = segment.paths[0].points
        if are_neighboring_point_clouds(boundary_pts, first_curve_pts, 2 * self.max_d_threshold):
            root_segments.append(i)
    return root_segments
find_ends
find_ends()

Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def find_ends(self) -> list[int]:
    """ Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last. """
    boundary_pts = utils.get_mesh_vertex_coords_with_attribute(self.mesh, 'boundary', 2)
    end_segments: list[int] = []
    for i, segment in enumerate(self.segments):
        last_curve_pts = segment.paths[-1].points
        if are_neighboring_point_clouds(boundary_pts, last_curve_pts, self.max_d_threshold):
            end_segments.append(i)
    return end_segments
create_graph_nodes
create_graph_nodes()

Add each segment to to the graph as a node.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def create_graph_nodes(self) -> None:
    """ Add each segment to to the graph as a node. """
    for i, _segment in enumerate(self.segments):
        self.G.add_node(i)
get_children_of_node
get_children_of_node(root)

Find all the nodes that lie on the current root.

Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def get_children_of_node(self, root: int) -> tuple[list[int], list[None]]:
    """ Find all the nodes that lie on the current root. """
    children: list[int] = []
    root_segment = self.segments[root]
    root_last_crv_pts = root_segment.paths[-1].points
    # utils.save_to_json(utils.point_list_to_dict(root_last_crv_pts), self.OUTPUT_PATH, "root_last_crv_pts.json")

    for i, segment in enumerate(self.segments):
        if i != root:
            segment_first_curve_pts = segment.paths[0].points
            # utils.save_to_json(utils.point_list_to_dict(segment_first_curve_pts), self.OUTPUT_PATH,
            #                    "segment_first_curve_pts.json")
            if are_neighboring_point_clouds(root_last_crv_pts, segment_first_curve_pts, self.max_d_threshold):
                children.append(i)
    return children, [None for _ in children]  # None because this graph doesn't have cut ids
are_neighboring_point_clouds
are_neighboring_point_clouds(pts1, pts2, threshold)

Returns True if 3 or more points of the point clouds are closer than the threshold. False otherwise.

Parameters:

Name Type Description Default
pts1 list[Point]
required
pts2 list[Point]
required
threshold float
required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def are_neighboring_point_clouds(pts1: list[Point], pts2: list[Point], threshold: float) -> bool:
    """
    Returns True if 3 or more points of the point clouds are closer than the threshold. False otherwise.

    Parameters
    ----------
    pts1: list, :class: 'compas.geometry.Point'
    pts2: list, :class: 'compas.geometry.Point'
    threshold: float
    """
    if len(pts1) == 0 or len(pts2) == 0:
        return False
    # Vectorized: compute min distance from each pt in pts1 to pts2
    arr1 = np.asarray(pts1, dtype=np.float64)
    arr2 = np.asarray(pts2, dtype=np.float64)
    distances = min_distances_to_set(arr1, arr2)
    return np.sum(distances < threshold) > 5
is_true_mesh_adjacency
is_true_mesh_adjacency(all_meshes, key1, key2)

Returns True if the two meshes share 3 or more vertices. False otherwise.

Parameters:

Name Type Description Default
all_meshes list[Mesh]
required
key1 int
required
key2 int
required
Source code in src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
def is_true_mesh_adjacency(all_meshes: list[Mesh], key1: int, key2: int) -> bool:
    """
    Returns True if the two meshes share 3 or more vertices. False otherwise.

    Parameters
    ----------
    all_meshes: list, :class: 'compas.datastructures.Mesh'
    key1: int, index of mesh1
    key2: int, index of mesh2
    """
    mesh1 = all_meshes[key1]
    mesh2 = all_meshes[key2]
    pts_mesh2 = [mesh2.vertex_coordinates(vkey) for vkey, data in mesh2.vertices(data=True)
                 if (data['cut'] > 0 or data['boundary'] > 0)]
    pts_mesh1 = [mesh1.vertex_coordinates(vkey) for vkey, data in mesh1.vertices(data=True)
                 if (data['cut'] > 0 or data['boundary'] > 0)]
    if len(pts_mesh1) == 0 or len(pts_mesh2) == 0:
        return False
    # Vectorized: compute min distance from each pt in mesh1 to pts_mesh2
    arr1 = np.asarray(pts_mesh1, dtype=np.float64)
    arr2 = np.asarray(pts_mesh2, dtype=np.float64)
    distances = min_distances_to_set(arr1, arr2)
    # Count points with essentially zero distance (shared vertices)
    return np.sum(distances ** 2 < 0.00001) >= 3