Skip to content

Commit

Permalink
Transform classic loops to range-based for loops in module segmentati…
Browse files Browse the repository at this point in the history
…on (#2859)

Changes are based on the result of run-clang-tidy -header-filter='.*' -checks='-*,modernize-loop-convert' -fix
Use always const reference in for-ranged loop instead of copying primitive data types
  • Loading branch information
SunBlack authored and SergioRAgostinho committed Mar 13, 2019
1 parent 4b36645 commit 4b7f4de
Show file tree
Hide file tree
Showing 11 changed files with 77 additions and 88 deletions.
52 changes: 22 additions & 30 deletions segmentation/include/pcl/segmentation/impl/cpc_segmentation.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -136,24 +136,24 @@ pcl::CPCSegmentation<PointT>::applyCuttingPlane (uint32_t depth_levels_left)
}
bool cut_found = false;
// do the following processing for each segment separately
for (SegLabel2ClusterMap::iterator itr = seg_to_edge_points_map.begin (); itr != seg_to_edge_points_map.end (); ++itr)
for (const auto &seg_to_edge_points : seg_to_edge_points_map)
{
// if too small do not process
if (itr->second->size () < min_segment_size_for_cutting_)
if (seg_to_edge_points.second->size () < min_segment_size_for_cutting_)
{
continue;
}

std::vector<double> weights;
weights.resize (itr->second->size ());
for (std::size_t cp = 0; cp < itr->second->size (); ++cp)
weights.resize (seg_to_edge_points.second->size ());
for (std::size_t cp = 0; cp < seg_to_edge_points.second->size (); ++cp)
{
float& cur_weight = itr->second->points[cp].intensity;
float& cur_weight = seg_to_edge_points.second->points[cp].intensity;
cur_weight = cur_weight < concavity_tolerance_threshold_ ? 0 : 1;
weights[cp] = cur_weight;
}

pcl::PointCloud<WeightSACPointType>::Ptr edge_cloud_cluster = itr->second;
pcl::PointCloud<WeightSACPointType>::Ptr edge_cloud_cluster = seg_to_edge_points.second;
pcl::SampleConsensusModelPlane<WeightSACPointType>::Ptr model_p (new pcl::SampleConsensusModelPlane<WeightSACPointType> (edge_cloud_cluster));

WeightedRandomSampleConsensus weight_sac (model_p, seed_resolution_, true);
Expand Down Expand Up @@ -195,32 +195,29 @@ pcl::CPCSegmentation<PointT>::applyCuttingPlane (uint32_t depth_levels_left)
euclidean_clusterer.setInputCloud (edge_cloud_cluster);
euclidean_clusterer.setIndices (boost::make_shared <std::vector <int> > (support_indices));
euclidean_clusterer.extract (cluster_indices);
// sv_adjacency_list_[seg_to_edgeID_map[itr->first][point_index]].used_for_cutting = true;
// sv_adjacency_list_[seg_to_edgeID_map[seg_to_edge_points.first][point_index]].used_for_cutting = true;

for (size_t cc = 0; cc < cluster_indices.size (); ++cc)
for (const auto &cluster_index : cluster_indices)
{
// get centroids of vertices
int cluster_concave_pts = 0;
float cluster_score = 0;
// std::cout << "Cluster has " << cluster_indices[cc].indices.size () << " points" << std::endl;
for (size_t cp = 0; cp < cluster_indices[cc].indices.size (); ++cp)
for (const int &current_index : cluster_index.indices)
{
int current_index = cluster_indices[cc].indices[cp];
double index_score;
double index_score = weights[current_index];
if (use_directed_weights_)
index_score = weights[current_index] * 1.414 * (fabsf (plane_normal.dot (edge_cloud_cluster->at (current_index).getNormalVector3fMap ())));
else
index_score = weights[current_index];
index_score *= 1.414 * (fabsf (plane_normal.dot (edge_cloud_cluster->at (current_index).getNormalVector3fMap ())));
cluster_score += index_score;
if (weights[current_index] > 0)
++cluster_concave_pts;
}
// check if the score is below the threshold. If that is the case this segment should not be split
cluster_score = cluster_score * 1.0 / cluster_indices[cc].indices.size ();
cluster_score = cluster_score * 1.0 / cluster_index.indices.size ();
// std::cout << "Cluster score: " << cluster_score << std::endl;
if (cluster_score >= min_cut_score_)
{
cut_support_indices.insert (cut_support_indices.end (), cluster_indices[cc].indices.begin (), cluster_indices[cc].indices.end ());
cut_support_indices.insert (cut_support_indices.end (), cluster_index.indices.begin (), cluster_index.indices.end ());
}
}
if (cut_support_indices.size () == 0)
Expand All @@ -242,15 +239,13 @@ pcl::CPCSegmentation<PointT>::applyCuttingPlane (uint32_t depth_levels_left)
}

int number_connections_cut = 0;
for (size_t cs = 0; cs < cut_support_indices.size (); ++cs)
for (const int &point_index : cut_support_indices)
{
const int point_index = cut_support_indices[cs];

if (use_clean_cutting_)
{
// skip edges where both centroids are on one side of the cutting plane
uint32_t source_sv_label = sv_adjacency_list_[boost::source (seg_to_edgeIDs_map[itr->first][point_index], sv_adjacency_list_)];
uint32_t target_sv_label = sv_adjacency_list_[boost::target (seg_to_edgeIDs_map[itr->first][point_index], sv_adjacency_list_)];
uint32_t source_sv_label = sv_adjacency_list_[boost::source (seg_to_edgeIDs_map[seg_to_edge_points.first][point_index], sv_adjacency_list_)];
uint32_t target_sv_label = sv_adjacency_list_[boost::target (seg_to_edgeIDs_map[seg_to_edge_points.first][point_index], sv_adjacency_list_)];
// get centroids of vertices
const pcl::PointXYZRGBA source_centroid = sv_label_to_supervoxel_map_[source_sv_label]->centroid_;
const pcl::PointXYZRGBA target_centroid = sv_label_to_supervoxel_map_[target_sv_label]->centroid_;
Expand All @@ -260,11 +255,11 @@ pcl::CPCSegmentation<PointT>::applyCuttingPlane (uint32_t depth_levels_left)
continue;
}
}
sv_adjacency_list_[seg_to_edgeIDs_map[itr->first][point_index]].used_for_cutting = true;
if (sv_adjacency_list_[seg_to_edgeIDs_map[itr->first][point_index]].is_valid)
sv_adjacency_list_[seg_to_edgeIDs_map[seg_to_edge_points.first][point_index]].used_for_cutting = true;
if (sv_adjacency_list_[seg_to_edgeIDs_map[seg_to_edge_points.first][point_index]].is_valid)
{
++number_connections_cut;
sv_adjacency_list_[seg_to_edgeIDs_map[itr->first][point_index]].is_valid = false;
sv_adjacency_list_[seg_to_edgeIDs_map[seg_to_edge_points.first][point_index]].is_valid = false;
}
}
// std::cout << "We cut " << number_connections_cut << " connections" << std::endl;
Expand Down Expand Up @@ -333,15 +328,12 @@ pcl::CPCSegmentation<PointT>::WeightedRandomSampleConsensus::computeModel (int)
sac_model_->selectWithinDistance (model_coefficients, threshold_, *current_inliers);
double current_score = 0;
Eigen::Vector3f plane_normal (model_coefficients[0], model_coefficients[1], model_coefficients[2]);
for (size_t i = 0; i < current_inliers->size (); ++i)
for (const int &current_index : *current_inliers)
{
int current_index = current_inliers->at (i);
double index_score;
double index_score = weights_[current_index];
if (use_directed_weights_)
// the sqrt(2) factor was used in the paper and was meant for making the scores better comparable between directed and undirected weights
index_score = weights_[current_index] * 1.414 * (fabsf (plane_normal.dot (point_cloud_ptr_->at (current_index).getNormalVector3fMap ())));
else
index_score = weights_[current_index];
index_score *= 1.414 * (fabsf (plane_normal.dot (point_cloud_ptr_->at (current_index).getNormalVector3fMap ())));

current_score += index_score;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -415,9 +415,9 @@ pcl::CrfSegmentation<PointT>::segmentPoints (pcl::PointCloud<pcl::PointXYZRGBL>


std::cout << "labels size: " << labels.size () << std::endl;
for (size_t i = 0; i < labels.size (); i++)
for (const int &label : labels)
{
std::cout << labels[i] << std::endl;
std::cout << label << std::endl;
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,16 +143,16 @@ pcl::extractEuclideanClusters (const PointCloud<PointT> &cloud,
std::vector<int> nn_indices;
std::vector<float> nn_distances;
// Process all points in the indices vector
for (int i = 0; i < static_cast<int> (indices.size ()); ++i)
for (const int &index : indices)
{
if (processed[indices[i]])
if (processed[index])
continue;

std::vector<int> seed_queue;
int sq_idx = 0;
seed_queue.push_back (indices[i]);
seed_queue.push_back (index);

processed[indices[i]] = true;
processed[index] = true;

while (sq_idx < static_cast<int> (seed_queue.size ()))
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,8 @@ pcl::LabeledEuclideanClusterExtraction<PointT>::extract (std::vector<std::vector
extractLabeledEuclideanClusters (*input_, tree_, static_cast<float> (cluster_tolerance_), labeled_clusters, min_pts_per_cluster_, max_pts_per_cluster_, max_label_);

// Sort the clusters based on their size (largest one first)
for (int i = 0; i < static_cast<int> (labeled_clusters.size ()); i++)
std::sort (labeled_clusters[i].rbegin (), labeled_clusters[i].rend (), comparePointClusters);
for (auto &labeled_cluster : labeled_clusters)
std::sort (labeled_cluster.rbegin (), labeled_cluster.rend (), comparePointClusters);

deinitCompute ();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,10 +161,10 @@ pcl::GrabCut<PointT>::setBackgroundPointsIndices (const PointIndicesConstPtr &in

std::fill (trimap_.begin (), trimap_.end (), TrimapBackground);
std::fill (hard_segmentation_.begin (), hard_segmentation_.end (), SegmentationBackground);
for (std::vector<int>::const_iterator idx = indices->indices.begin (); idx != indices->indices.end (); ++idx)
for (const int &index : indices->indices)
{
trimap_[*idx] = TrimapUnknown;
hard_segmentation_[*idx] = SegmentationForeground;
trimap_[index] = TrimapUnknown;
hard_segmentation_[index] = SegmentationForeground;
}

if (!initialized_)
Expand Down Expand Up @@ -245,17 +245,17 @@ template <typename PointT> void
pcl::GrabCut<PointT>::setTrimap (const PointIndicesConstPtr &indices, segmentation::grabcut::TrimapValue t)
{
using namespace pcl::segmentation::grabcut;
for (auto idx = indices->indices.cbegin (); idx != indices->indices.cend (); ++idx)
trimap_[*idx] = t;
for (const int &index : indices->indices)
trimap_[index] = t;

// Immediately set the hard segmentation as well so that the display will update.
if (t == TrimapForeground)
for (auto idx = indices->indices.cbegin (); idx != indices->indices.cend (); ++idx)
hard_segmentation_[*idx] = SegmentationForeground;
for (const int &index : indices->indices)
hard_segmentation_[index] = SegmentationForeground;
else
if (t == TrimapBackground)
for (auto idx = indices->indices.cbegin (); idx != indices->indices.cend (); ++idx)
hard_segmentation_[*idx] = SegmentationBackground;
for (const int &index : indices->indices)
hard_segmentation_[index] = SegmentationBackground;
}

template <typename PointT> void
Expand Down
15 changes: 7 additions & 8 deletions segmentation/include/pcl/segmentation/impl/lccp_segmentation.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -114,9 +114,9 @@ pcl::LCCPSegmentation<PointT>::relabelCloud (pcl::PointCloud<pcl::PointXYZL> &la
if (grouping_data_valid_)
{
// Relabel all Points in cloud with new labels
for (auto voxel_itr = labeled_cloud_arg.begin (); voxel_itr != labeled_cloud_arg.end (); ++voxel_itr)
for (auto &voxel : labeled_cloud_arg)
{
voxel_itr->label = sv_label_to_seg_label_map_[voxel_itr->label];
voxel.label = sv_label_to_seg_label_map_[voxel.label];
}
}
else
Expand Down Expand Up @@ -236,9 +236,9 @@ pcl::LCCPSegmentation<PointT>::mergeSmallSegments ()
}

// Erase filtered Segments from segment map
for (auto filtered_ID_itr = filteredSegLabels.cbegin (); filtered_ID_itr != filteredSegLabels.cend (); ++filtered_ID_itr)
for (const unsigned int &filteredSegLabel : filteredSegLabels)
{
seg_label_to_sv_list_map_.erase (*filtered_ID_itr);
seg_label_to_sv_list_map_.erase (filteredSegLabel);
}

// After filtered Segments are deleted, compute completely new adjacency map
Expand Down Expand Up @@ -272,11 +272,10 @@ pcl::LCCPSegmentation<PointT>::prepareSegmentation (const std::map<uint32_t, typ
}

// Add all edges
for (std::multimap<uint32_t, uint32_t>::const_iterator sv_neighbors_itr = label_adjaceny_arg.begin (); sv_neighbors_itr != label_adjaceny_arg.end ();
++sv_neighbors_itr)
for (const auto &sv_neighbors_itr : label_adjaceny_arg)
{
const uint32_t& sv_label = sv_neighbors_itr->first;
const uint32_t& neighbor_label = sv_neighbors_itr->second;
const uint32_t& sv_label = sv_neighbors_itr.first;
const uint32_t& neighbor_label = sv_neighbors_itr.second;

VertexID u = label_ID_map[sv_label];
VertexID v = label_ID_map[neighbor_label];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,11 +134,11 @@ pcl::OrganizedMultiPlaneSegmentation<PointT, PointNT, PointLT>::segment (std::ve
model.values.resize (4);

// Fit Planes to each cluster
for (size_t i = 0; i < label_indices.size (); i++)
for (const auto &label_index : label_indices)
{
if (static_cast<unsigned> (label_indices[i].indices.size ()) > min_inliers_)
if (static_cast<unsigned> (label_index.indices.size ()) > min_inliers_)
{
pcl::computeMeanAndCovarianceMatrix (*input_, label_indices[i].indices, clust_cov, clust_centroid);
pcl::computeMeanAndCovarianceMatrix (*input_, label_index.indices, clust_cov, clust_centroid);
Eigen::Vector4f plane_params;

EIGEN_ALIGN16 Eigen::Vector3f::Scalar eigen_value;
Expand Down Expand Up @@ -174,7 +174,7 @@ pcl::OrganizedMultiPlaneSegmentation<PointT, PointNT, PointLT>::segment (std::ve
model.values[2] = plane_params[2];
model.values[3] = plane_params[3];
model_coefficients.push_back (model);
inlier_indices.push_back (label_indices[i]);
inlier_indices.push_back (label_index);
centroids.push_back (clust_centroid);
covariances.push_back (clust_cov);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,8 @@ pcl::seededHueSegmentation (const PointCloud<PointXYZRGB> &cloud,
std::vector<float> nn_distances;

// Process all points in the indices vector
for (size_t k = 0; k < indices_in.indices.size (); ++k)
for (const int &i : indices_in.indices)
{
int i = indices_in.indices[k];
if (processed[i])
continue;

Expand Down Expand Up @@ -111,8 +110,8 @@ pcl::seededHueSegmentation (const PointCloud<PointXYZRGB> &cloud,
sq_idx++;
}
// Copy the seed queue into the output indices
for (size_t l = 0; l < seed_queue.size (); ++l)
indices_out.indices.push_back(seed_queue[l]);
for (const int &l : seed_queue)
indices_out.indices.push_back(l);
}
// This is purely esthetical, can be removed for speed purposes
std::sort (indices_out.indices.begin (), indices_out.indices.end ());
Expand All @@ -138,9 +137,8 @@ pcl::seededHueSegmentation (const PointCloud<PointXYZRGB> &cloud,
std::vector<float> nn_distances;

// Process all points in the indices vector
for (size_t k = 0; k < indices_in.indices.size (); ++k)
for (const int &i : indices_in.indices)
{
int i = indices_in.indices[k];
if (processed[i])
continue;

Expand Down Expand Up @@ -186,8 +184,8 @@ pcl::seededHueSegmentation (const PointCloud<PointXYZRGB> &cloud,
sq_idx++;
}
// Copy the seed queue into the output indices
for (size_t l = 0; l < seed_queue.size (); ++l)
indices_out.indices.push_back(seed_queue[l]);
for (const int &l : seed_queue)
indices_out.indices.push_back(l);
}
// This is purely esthetical, can be removed for speed purposes
std::sort (indices_out.indices.begin (), indices_out.indices.end ());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -403,10 +403,10 @@ pcl::SupervoxelClustering<PointT>::selectInitialSupervoxelSeeds (std::vector<int
// This is 1/20th of the number of voxels which fit in a planar slice through search volume
// Area of planar slice / area of voxel side. (Note: This is smaller than the value mentioned in the original paper)
float min_points = 0.05f * (search_radius)*(search_radius) * 3.1415926536f / (resolution_*resolution_);
for (size_t i = 0; i < seed_indices_orig.size (); ++i)
for (const int &index_orig : seed_indices_orig)
{
int num = voxel_kdtree_->radiusSearch (seed_indices_orig[i], search_radius , neighbors, sqr_distances);
int min_index = seed_indices_orig[i];
int num = voxel_kdtree_->radiusSearch (index_orig, search_radius , neighbors, sqr_distances);
int min_index = index_orig;
if ( num > min_points)
{
seed_indices.push_back (min_index);
Expand Down Expand Up @@ -497,12 +497,12 @@ pcl::SupervoxelClustering<PointT>::getSupervoxelAdjacencyList (VoxelAdjacencyLis
uint32_t label = sv_itr->getLabel ();
std::set<uint32_t> neighbor_labels;
sv_itr->getNeighborLabels (neighbor_labels);
for (std::set<uint32_t>::iterator label_itr = neighbor_labels.begin (); label_itr != neighbor_labels.end (); ++label_itr)
for (const unsigned int &neighbor_label : neighbor_labels)
{
bool edge_added;
EdgeID edge;
VoxelID u = (label_ID_map.find (label))->second;
VoxelID v = (label_ID_map.find (*label_itr))->second;
VoxelID v = (label_ID_map.find (neighbor_label))->second;
boost::tie (edge, edge_added) = add_edge (u,v,adjacency_list_arg);
//Calc distance between centers, set as edge weight
if (edge_added)
Expand All @@ -513,7 +513,7 @@ pcl::SupervoxelClustering<PointT>::getSupervoxelAdjacencyList (VoxelAdjacencyLis

for (typename HelperListT::const_iterator neighb_itr = supervoxel_helpers_.cbegin (); neighb_itr != supervoxel_helpers_.cend (); ++neighb_itr)
{
if (neighb_itr->getLabel () == (*label_itr))
if (neighb_itr->getLabel () == neighbor_label)
{
neighb_centroid_data = neighb_itr->getCentroid ();
break;
Expand All @@ -539,8 +539,8 @@ pcl::SupervoxelClustering<PointT>::getSupervoxelAdjacency (std::multimap<uint32_
uint32_t label = sv_itr->getLabel ();
std::set<uint32_t> neighbor_labels;
sv_itr->getNeighborLabels (neighbor_labels);
for (std::set<uint32_t>::iterator label_itr = neighbor_labels.begin (); label_itr != neighbor_labels.end (); ++label_itr)
label_adjacency.insert (std::pair<uint32_t,uint32_t> (label, *label_itr) );
for (const unsigned int &neighbor_label : neighbor_labels)
label_adjacency.insert (std::pair<uint32_t,uint32_t> (label, neighbor_label) );
//if (neighbor_labels.size () == 0)
// std::cout << label<<"(size="<<sv_itr->size () << ") has "<<neighbor_labels.size () << "\n";
}
Expand Down
Loading

0 comments on commit 4b7f4de

Please sign in to comment.