Skip to content

Commit 12def75

Browse files
authored
Merge pull request #23 from stuarteberg/fix-smoothing-off-by-one
Fix off-by-one error in laplacian smoothing
2 parents baee6ef + 58999c2 commit 12def75

File tree

6 files changed

+162
-9
lines changed

6 files changed

+162
-9
lines changed

.gitignore

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
build/
2-
*.pyd
2+
*.pyd
3+
__pycache__/

conda-recipe/meta.yaml

+1
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ test:
3636
- marching_cubes
3737
requires:
3838
- pytest
39+
- pandas
3940
source_files:
4041
- test/*
4142
commands:

src/laplacian_smoothing.cpp

-2
Original file line numberDiff line numberDiff line change
@@ -94,8 +94,6 @@ void smooth(Mesh& mesh, unsigned int rounds)
9494
swap(normals, new_norms);
9595
swap(vertices, new_verts);
9696
}
97-
swap(normals, new_norms);
98-
swap(vertices, new_verts);
9997
delete[] new_norms;
10098
delete[] new_verts;
10199
mesh.normals = normals;
60 Bytes
Binary file not shown.

test/laplacian_smooth.py

+89
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
import numpy as np
2+
import pandas as pd
3+
4+
def laplacian_smooth(vertices, faces, rounds=1):
5+
"""
6+
Pure-python reference implementation of laplacian smoothing.
7+
8+
Smooth the mesh in-place.
9+
10+
This is simplest mesh smoothing technique, known as Laplacian Smoothing.
11+
Relocates each vertex by averaging its position with those of its adjacent neighbors.
12+
Repeat for N iterations.
13+
14+
One disadvantage of this technique is that it results in overall shrinkage
15+
of the mesh, especially for many iterations. (But nearly all smoothing techniques
16+
cause at least some shrinkage.)
17+
18+
(Obviously, any normals you have for this mesh will be out-of-date after smoothing.)
19+
20+
Args:
21+
vertices:
22+
Vertex coordinates shape=(N,3)
23+
faces
24+
Face definitions. shape=(N,3)
25+
Each row lists 3 vertices (indexes into the vertices array)
26+
rounds:
27+
How many passes to take over the data.
28+
More iterations results in a smoother mesh, but more shrinkage (and more CPU time).
29+
30+
Returns:
31+
new vertices
32+
33+
Note:
34+
Smoothing can cause degenerate faces, particularly in some
35+
small special cases like this:
36+
37+
1 1
38+
/ \ |
39+
2---3 ==> X (where X is occupied by both 2 and 3)
40+
\ / |
41+
4 4
42+
43+
(Such meshes are not usually produced by marching cubes, though.)
44+
"""
45+
vertices = np.asarray(vertices, dtype=np.float32)
46+
faces = np.asarray(faces)
47+
48+
# Compute the list of all unique vertex adjacencies
49+
all_edges = np.concatenate( [faces[:,(0,1)],
50+
faces[:,(1,2)],
51+
faces[:,(2,0)]] )
52+
all_edges.sort(axis=1)
53+
edges_df = pd.DataFrame( all_edges, columns=['v1_id', 'v2_id'] )
54+
edges_df.drop_duplicates(inplace=True)
55+
del all_edges
56+
57+
# (This sort isn't technically necessary, but it might give
58+
# better cache locality for the vertex lookups below.)
59+
edges_df.sort_values(['v1_id', 'v2_id'], inplace=True)
60+
61+
# How many neighbors for each vertex == how many times it is mentioned in the edge list
62+
neighbor_counts = np.bincount(edges_df.values.reshape(-1), minlength=len(vertices))
63+
64+
new_vertices = np.empty_like(vertices)
65+
for _ in range(rounds):
66+
new_vertices[:] = vertices
67+
68+
# For the complete edge index list, accumulate (sum) the vertexes on
69+
# the right side of the list into the left side's address and vice-versa.
70+
#
71+
## We want something like this:
72+
# v1_indexes, v2_indexes = df['v1_id'], df['v2_id']
73+
# new_vertices[v1_indexes] += vertices[v2_indexes]
74+
# new_vertices[v2_indexes] += vertices[v1_indexes]
75+
#
76+
# ...but that doesn't work because v1_indexes will contain repeats,
77+
# and "fancy indexing" behavior is undefined in that case.
78+
#
79+
# Instead, it turns out that np.ufunc.at() works (it's an "unbuffered" operation)
80+
np.add.at(new_vertices, edges_df['v1_id'], vertices[edges_df['v2_id'], :])
81+
np.add.at(new_vertices, edges_df['v2_id'], vertices[edges_df['v1_id'], :])
82+
83+
# (plus one here because each point itself is also included in the sum)
84+
new_vertices[:] /= (neighbor_counts[:,None] + 1)
85+
86+
# Swap (save RAM allocation overhead by reusing the new_vertices array between iterations)
87+
vertices, new_vertices = new_vertices, vertices
88+
89+
return vertices

test/test_marching_regression.py

+70-6
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,22 @@
11
import marching_cubes
2-
import numpy
2+
import numpy as np
33
import pytest
44
import os
55

6+
from laplacian_smooth import laplacian_smooth
67

78
@pytest.fixture
89
def volume():
910
vol_path = os.path.join(os.path.split(__file__)[0], "data/input/sample.npy")
10-
return numpy.load(vol_path)
11+
return np.load(vol_path)
1112

1213

1314
@pytest.fixture
1415
def mesh_loader():
1516
"""Load mesh data from npz file"""
1617

1718
def _loader(mesh_file_name):
18-
data = numpy.load(mesh_file_name)
19+
data = np.load(mesh_file_name)
1920
vertices = data["vertices"]
2021
normals = data["normals"]
2122
faces = data["faces"]
@@ -36,6 +37,69 @@ def test_regression(volume, mesh_loader, smoothing, reference_mesh_file):
3637

3738
ref_vertices, ref_normals, ref_faces = mesh_loader(reference_mesh_file)
3839

39-
numpy.testing.assert_array_almost_equal(vertices, ref_vertices)
40-
numpy.testing.assert_array_almost_equal(normals, ref_normals)
41-
numpy.testing.assert_array_almost_equal(faces, ref_faces)
40+
np.testing.assert_array_almost_equal(vertices, ref_vertices)
41+
np.testing.assert_array_almost_equal(normals, ref_normals)
42+
assert (faces == ref_faces).all()
43+
44+
45+
def test_smoothing(volume):
46+
ROUNDS = 3
47+
vertices, normals, faces = marching_cubes.march(volume, 0)
48+
smoothed_vertices, smoothed_normals, smoothed_faces = marching_cubes.march(volume, ROUNDS)
49+
50+
# Compare with our reference implementation of laplacian smoothing.
51+
ref_smoothed_vertices = laplacian_smooth(vertices, faces, ROUNDS)
52+
np.allclose(smoothed_vertices, ref_smoothed_vertices, rtol=0.001)
53+
54+
assert (faces == smoothed_faces).all(), \
55+
"Smoothing should not affect face definitions."
56+
57+
assert not (normals == smoothed_normals).all(), \
58+
"Normals should not be the same after smoothing."
59+
60+
61+
def test_reference_smoothing_trivial():
62+
"""
63+
This is a simple test of our laplacian_smoothing reference function.
64+
"""
65+
vertices = np.array([[0.0, 0.0, 0.0],
66+
[0.0, 0.0, 1.0],
67+
[0.0, 0.0, 2.0]])
68+
69+
# This "face" is actually straight line,
70+
# which makes it easy to see what's going on
71+
faces = np.array([[0,1,2]])
72+
average_vertex = vertices.sum(axis=0) / 3
73+
vertices = laplacian_smooth(vertices, faces, 1)
74+
assert (vertices == average_vertex).all()
75+
76+
77+
def test_reference_smoothing_hexagon():
78+
"""
79+
This is a simple test of our laplacian_smoothing reference function.
80+
Try 'smoothing' a simple 2D hexagon, which is an easy case to understand.
81+
"""
82+
# This map is correctly labeled with the vertex indices
83+
_ = -1
84+
hexagon = [[[_,_,_,_,_,_,_],
85+
[_,_,0,_,1,_,_],
86+
[_,_,_,_,_,_,_],
87+
[_,2,_,3,_,4,_],
88+
[_,_,_,_,_,_,_],
89+
[_,_,5,_,6,_,_],
90+
[_,_,_,_,_,_,_]]]
91+
92+
hexagon = 1 + np.array(hexagon)
93+
original_vertices = np.transpose(hexagon.nonzero())
94+
faces = [[3,1,4],
95+
[3,4,6],
96+
[3,6,5],
97+
[3,5,2],
98+
[3,2,0],
99+
[3,0,1]]
100+
101+
vertices = laplacian_smooth(original_vertices, faces, 1)
102+
103+
# Since vertex 3 is exactly centered between the rest,
104+
# its location never changes.
105+
assert (vertices[3] == original_vertices[3]).all()

0 commit comments

Comments
 (0)