Skip to content

Commit

Permalink
[features] Better for loops, part 2/N (#3550)
Browse files Browse the repository at this point in the history
[features] Better for loops, part 2/N
  • Loading branch information
taketwo authored Jan 14, 2020
2 parents e2f8229 + ca463df commit f9c0154
Show file tree
Hide file tree
Showing 8 changed files with 73 additions and 81 deletions.
8 changes: 4 additions & 4 deletions features/include/pcl/features/cvfh.h
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,8 @@ namespace pcl
inline void
getCentroidClusters (std::vector<Eigen::Vector3f, Eigen::aligned_allocator<Eigen::Vector3f> > & centroids)
{
for (std::size_t i = 0; i < centroids_dominant_orientations_.size (); ++i)
centroids.push_back (centroids_dominant_orientations_[i]);
for (const auto& centroid: centroids_dominant_orientations_)
centroids.push_back (centroid);
}

/** \brief Get the normal centroids used to compute different CVFH descriptors
Expand All @@ -157,8 +157,8 @@ namespace pcl
inline void
getCentroidNormalClusters (std::vector<Eigen::Vector3f, Eigen::aligned_allocator<Eigen::Vector3f> > & centroids)
{
for (std::size_t i = 0; i < dominant_normals_.size (); ++i)
centroids.push_back (dominant_normals_[i]);
for (const auto& normal: dominant_normals_)
centroids.push_back (normal);
}

/** \brief Sets max. Euclidean distance between points to be added to the cluster
Expand Down
37 changes: 18 additions & 19 deletions features/include/pcl/features/from_meshes.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,34 +16,32 @@ namespace pcl
template <typename PointT, typename PointNT> inline void
computeApproximateNormals(const pcl::PointCloud<PointT>& cloud, const std::vector<pcl::Vertices>& polygons, pcl::PointCloud<PointNT>& normals)
{
int nr_points = static_cast<int>(cloud.points.size());
int nr_polygons = static_cast<int>(polygons.size());
const auto nr_points = cloud.points.size();

normals.header = cloud.header;
normals.width = cloud.width;
normals.height = cloud.height;
normals.points.resize(nr_points);

for ( int i = 0; i < nr_points; ++i )
normals.points[i].getNormalVector3fMap() = Eigen::Vector3f::Zero();
for (auto& point: normals.points)
point.getNormalVector3fMap() = Eigen::Vector3f::Zero();

// NOTE: for efficiency the weight is computed implicitly by using the
// cross product, this causes inaccurate normals for meshes containing
// non-triangle polygons (quads or other types)
for ( int i = 0; i < nr_polygons; ++i )
for (const auto& polygon: polygons)
{
const int nr_points_polygon = (int)polygons[i].vertices.size();
if (nr_points_polygon < 3) continue;
if (polygon.vertices.size() < 3) continue;

// compute normal for triangle
Eigen::Vector3f vec_a_b = cloud.points[polygons[i].vertices[0]].getVector3fMap() - cloud.points[polygons[i].vertices[1]].getVector3fMap();
Eigen::Vector3f vec_a_c = cloud.points[polygons[i].vertices[0]].getVector3fMap() - cloud.points[polygons[i].vertices[2]].getVector3fMap();
Eigen::Vector3f vec_a_b = cloud.points[polygon.vertices[0]].getVector3fMap() - cloud.points[polygon.vertices[1]].getVector3fMap();
Eigen::Vector3f vec_a_c = cloud.points[polygon.vertices[0]].getVector3fMap() - cloud.points[polygon.vertices[2]].getVector3fMap();
Eigen::Vector3f normal = vec_a_b.cross(vec_a_c);
pcl::flipNormalTowardsViewpoint(cloud.points[polygons[i].vertices[0]], 0.0f, 0.0f, 0.0f, normal(0), normal(1), normal(2));
pcl::flipNormalTowardsViewpoint(cloud.points[polygon.vertices[0]], 0.0f, 0.0f, 0.0f, normal(0), normal(1), normal(2));

// add normal to all points in polygon
for ( int j = 0; j < nr_points_polygon; ++j )
normals.points[polygons[i].vertices[j]].getNormalVector3fMap() += normal;
for (const auto& vertex: polygon.vertices)
normals.points[vertex].getNormalVector3fMap() += normal;
}

for (std::size_t i = 0; i < nr_points; ++i)
Expand All @@ -70,12 +68,13 @@ namespace pcl
assert(cloud.points.size() == normals.points.size());

const auto nr_points = cloud.points.size();
covariances.resize(nr_points);
for (std::size_t i = 0; i < nr_points; ++i)
covariances.clear ();
covariances.reserve (nr_points);
for (const auto& point: normals.points)
{
Eigen::Vector3d normal(normals.points[i].normal_x,
normals.points[i].normal_y,
normals.points[i].normal_z);
Eigen::Vector3d normal (normals.points[i].normal_x,
normals.points[i].normal_y,
normals.points[i].normal_z);

// compute rotation matrix
Eigen::Matrix3d rot;
Expand All @@ -86,13 +85,13 @@ namespace pcl
y.normalize();
rot.row(1) = y;
rot.row(0) = normal.cross(rot.row(1));

// comnpute approximate covariance
Eigen::Matrix3d cov;
cov << 1, 0, 0,
0, 1, 0,
0, 0, epsilon;
covariances[i] = rot.transpose()*cov*rot;
covariances.emplace_back (rot.transpose()*cov*rot);
}
}

Expand Down
13 changes: 6 additions & 7 deletions features/include/pcl/features/impl/board.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ pcl::BOARDLocalReferenceFrameEstimation<PointInT, PointNT, PointOutT>::planeFitt
// Plane Fitting using Singular Value Decomposition (SVD)
// -----------------------------------------------------

int n_points = static_cast<int> (points.rows ());
const auto n_points = points.rows ();
if (n_points == 0)
{
return;
Expand Down Expand Up @@ -254,9 +254,8 @@ pcl::BOARDLocalReferenceFrameEstimation<PointInT, PointNT, PointOutT>::computePo
Eigen::Vector3f best_margin_point;
bool best_point_found_on_margins = false;

float radius2 = tangent_radius_ * tangent_radius_;

float margin_distance2 = margin_thresh_ * margin_thresh_ * radius2;
const float radius2 = tangent_radius_ * tangent_radius_;
const float margin_distance2 = margin_thresh_ * margin_thresh_ * radius2;

float max_boundary_angle = 0;

Expand Down Expand Up @@ -400,9 +399,9 @@ pcl::BOARDLocalReferenceFrameEstimation<PointInT, PointNT, PointOutT>::computePo

//check if there is at least a hole
bool is_hole_present = false;
for (int i = 0; i < check_margin_array_size_; i++)
for (const auto check_margin: check_margin_array_)
{
if (!check_margin_array_[i])
if (!check_margin)
{
is_hole_present = true;
break;
Expand Down Expand Up @@ -471,7 +470,7 @@ pcl::BOARDLocalReferenceFrameEstimation<PointInT, PointNT, PointOutT>::computePo
float max_hole_prob = -std::numeric_limits<float>::max ();

//find holes
for (int ch = first_no_border; ch < check_margin_array_size_; ch++)
for (auto ch = first_no_border; ch < check_margin_array_size_; ch++)
{
if (!check_margin_array_[ch])
{
Expand Down
6 changes: 3 additions & 3 deletions features/include/pcl/features/impl/brisk_2d.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -103,11 +103,11 @@ pcl::BRISK2DEstimation<PointInT, PointOutT, KeypointT, IntensityT>::generateKern
d_min_ = d_min;

// get the total number of points
const int rings = int (radius_list.size ());
const auto rings = radius_list.size ();
assert (radius_list.size () != 0 && radius_list.size () == number_list.size ());
points_ = 0; // remember the total number of points
for (int ring = 0; ring < rings; ring++)
points_ += number_list[ring];
for (const auto number: number_list)
points_ += number;

// set up the patterns
pattern_points_ = new BriskPatternPoint[points_*scales_*n_rot_];
Expand Down
19 changes: 9 additions & 10 deletions features/include/pcl/features/impl/cppf.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -60,19 +60,17 @@ pcl::CPPFEstimation<PointInT, PointNT, PointOutT>::CPPFEstimation ()
template <typename PointInT, typename PointNT, typename PointOutT> void
pcl::CPPFEstimation<PointInT, PointNT, PointOutT>::computeFeature (PointCloudOut &output)
{
// Initialize output container - overwrite the sizes done by Feature::initCompute ()
output.points.resize (indices_->size () * input_->points.size ());
output.height = 1;
output.width = static_cast<std::uint32_t> (output.points.size ());
// Initialize output container
output.points.clear ();
output.points.reserve (indices_->size () * input_->points.size ());
output.is_dense = true;

// Compute point pair features for every pair of points in the cloud
for (std::size_t index_i = 0; index_i < indices_->size (); ++index_i)
for (const auto& i: *indices_)
{
std::size_t i = (*indices_)[index_i];
for (std::size_t j = 0 ; j < input_->points.size (); ++j)
{
PointOutT p;
// No need to calculate feature for identity pair (i, j) as they aren't used in future calculations
if (i != j)
{
if (
Expand Down Expand Up @@ -105,17 +103,18 @@ pcl::CPPFEstimation<PointInT, PointNT, PointOutT>::computeFeature (PointCloudOut
output.is_dense = false;
}
}
// Do not calculate the feature for identity pairs (i, i) as they are not used
// in the following computations
else
{
p.f1 = p.f2 = p.f3 = p.f4 = p.f5 = p.f6 = p.f7 = p.f8 = p.f9 = p.f10 = p.alpha_m = std::numeric_limits<float>::quiet_NaN ();
output.is_dense = false;
}

output.points[index_i*input_->points.size () + j] = p;
output.points.push_back (p);
}
}
// overwrite the sizes done by Feature::initCompute ()
output.height = 1;
output.width = static_cast<std::uint32_t> (output.points.size ());
}

#define PCL_INSTANTIATE_CPPFEstimation(T,NT,OutT) template class PCL_EXPORTS pcl::CPPFEstimation<T,NT,OutT>;
Expand Down
29 changes: 15 additions & 14 deletions features/include/pcl/features/impl/crh.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,7 @@ pcl::CRHEstimation<PointInT, PointNT, PointOutT>::computeFeature (PointCloudOut

//fill spatial data vector and the zero-initialize or "value-initialize" an array on c++,
// the initialization is made with () after the [nbins]
kiss_fft_scalar * spatial_data = new kiss_fft_scalar[nbins]();

std::vector<kiss_fft_scalar> spatial_data(nbins);

float sum_w = 0, w = 0;
int bin = 0;
Expand All @@ -109,29 +108,31 @@ pcl::CRHEstimation<PointInT, PointNT, PointOutT>::computeFeature (PointCloudOut
spatial_data[bin] += w;
}

for (int i = 0; i < nbins; ++i)
spatial_data[i] /= sum_w;
for (auto& data: spatial_data)
data /= sum_w;

kiss_fft_cpx * freq_data = new kiss_fft_cpx[nbins / 2 + 1];
std::vector<kiss_fft_cpx> freq_data(nbins / 2 + 1);
kiss_fftr_cfg mycfg = kiss_fftr_alloc (nbins, 0, nullptr, nullptr);
kiss_fftr (mycfg, spatial_data, freq_data);
kiss_fftr (mycfg, spatial_data.data (), freq_data.data ());

for (auto& data: freq_data)
{
data.r /= freq_data[0].r;
data.i /= freq_data[0].r;
}

output.points.resize (1);
output.width = output.height = 1;

output.points[0].histogram[0] = freq_data[0].r / freq_data[0].r; //dc
output.points[0].histogram[0] = freq_data[0].r; //dc
int k = 1;
for (int i = 1; i < (nbins / 2); i++, k += 2)
{
output.points[0].histogram[k] = freq_data[i].r / freq_data[0].r;
output.points[0].histogram[k + 1] = freq_data[i].i / freq_data[0].r;
output.points[0].histogram[k] = freq_data[i].r;
output.points[0].histogram[k + 1] = freq_data[i].i;
}

output.points[0].histogram[nbins - 1] = freq_data[nbins / 2].r / freq_data[0].r; //nyquist

delete[] spatial_data;
delete[] freq_data;

output.points[0].histogram[nbins - 1] = freq_data[nbins / 2].r; //nyquist
}

#define PCL_INSTANTIATE_CRHEstimation(T,NT,OutT) template class PCL_EXPORTS pcl::CRHEstimation<T,NT,OutT>;
Expand Down
41 changes: 17 additions & 24 deletions features/include/pcl/features/impl/cvfh.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -102,57 +102,50 @@ pcl::CVFHEstimation<PointInT, PointNT, PointOutT>::extractEuclideanClustersSmoot
{
if (processed[i])
continue;
processed[i] = true;

std::vector<std::size_t> seed_queue;
std::size_t sq_idx = 0;
seed_queue.push_back (i);
pcl::PointIndices r;
r.header = cloud.header;
auto& seed_queue = r.indices;

processed[i] = true;
seed_queue.push_back (i);

while (sq_idx < seed_queue.size ())
// loop has an emplace_back, making it difficult to use modern loops
for (std::size_t idx = 0; idx != seed_queue.size (); ++idx)
{
// Search for sq_idx
if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances))
// Search for seed_queue[index]
if (!tree->radiusSearch (seed_queue[idx], tolerance, nn_indices, nn_distances))
{
sq_idx++;
continue;
}

for (std::size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx
// skip index 0, since nn_indices[0] == idx, worth it?
for (std::size_t j = 1; j < nn_indices.size (); ++j)
{
if (processed[nn_indices[j]]) // Has this point been processed before ?
continue;

//processed[nn_indices[j]] = true;
// [-1;1]
const double dot_p = normals.points[seed_queue[idx]].getNormalVector3fMap().dot(
normals.points[nn_indices[j]].getNormalVector3fMap());

double dot_p = normals.points[seed_queue[sq_idx]].normal[0] * normals.points[nn_indices[j]].normal[0]
+ normals.points[seed_queue[sq_idx]].normal[1] * normals.points[nn_indices[j]].normal[1]
+ normals.points[seed_queue[sq_idx]].normal[2] * normals.points[nn_indices[j]].normal[2];

if (std::abs (std::acos (dot_p)) < eps_angle)
if (std::acos (dot_p) < eps_angle)
{
processed[nn_indices[j]] = true;
seed_queue.push_back (nn_indices[j]);
seed_queue.emplace_back (nn_indices[j]);
}
}

sq_idx++;
}

// If this queue is satisfactory, add to the clusters
if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster)
{
pcl::PointIndices r;
r.indices.resize (seed_queue.size ());
for (std::size_t j = 0; j < seed_queue.size (); ++j)
r.indices[j] = seed_queue[j];

std::sort (r.indices.begin (), r.indices.end ());
r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ());

r.header = cloud.header;
clusters.push_back (r); // We could avoid a copy by working directly in the vector
// Might be better to work directly in the cluster somehow
clusters.emplace_back (std::move(r)); // Trying to avoid a copy by moving
}
}
}
Expand Down
1 change: 1 addition & 0 deletions features/include/pcl/features/impl/esf.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ pcl::ESFEstimation<PointInT, PointOutT>::computeESF (
{
const int binsize = 64;
unsigned int sample_size = 20000;
// @TODO: Replace with c++ stdlib uniform_random_generator
srand (static_cast<unsigned int> (time (nullptr)));
int maxindex = static_cast<int> (pc.points.size ());

Expand Down

0 comments on commit f9c0154

Please sign in to comment.