Point Cloud Library (PCL)  1.9.0-dev
our_cvfh.hpp
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2010-2011, Willow Garage, Inc.
6  * Copyright (c) 2012-, Open Perception, Inc.
7  *
8  * All rights reserved.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  *
14  * * Redistributions of source code must retain the above copyright
15  * notice, this list of conditions and the following disclaimer.
16  * * Redistributions in binary form must reproduce the above
17  * copyright notice, this list of conditions and the following
18  * disclaimer in the documentation and/or other materials provided
19  * with the distribution.
20  * * Neither the name of the copyright holder(s) nor the names of its
21  * contributors may be used to endorse or promote products derived
22  * from this software without specific prior written permission.
23  *
24  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
27  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
28  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
29  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
30  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
31  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
32  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
33  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
34  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
35  * POSSIBILITY OF SUCH DAMAGE.
36  *
37  * $Id: cvfh.hpp 5311 2012-03-26 22:02:04Z aaldoma $
38  *
39  */
40 
41 #ifndef PCL_FEATURES_IMPL_OURCVFH_H_
42 #define PCL_FEATURES_IMPL_OURCVFH_H_
43 
44 #include <pcl/features/our_cvfh.h>
45 #include <pcl/features/vfh.h>
46 #include <pcl/features/normal_3d.h>
47 #include <pcl/features/pfh_tools.h>
48 #include <pcl/common/transforms.h>
49 
50 //////////////////////////////////////////////////////////////////////////////////////////////
51 template<typename PointInT, typename PointNT, typename PointOutT> void
53 {
55  {
56  output.width = output.height = 0;
57  output.points.clear ();
58  return;
59  }
60  // Resize the output dataset
61  // Important! We should only allocate precisely how many elements we will need, otherwise
62  // we risk at pre-allocating too much memory which could lead to bad_alloc
63  // (see http://dev.pointclouds.org/issues/657)
64  output.width = output.height = 1;
65  output.points.resize (1);
66 
67  // Perform the actual feature computation
68  computeFeature (output);
69 
71 }
72 
73 //////////////////////////////////////////////////////////////////////////////////////////////
74 template<typename PointInT, typename PointNT, typename PointOutT> void
76  const pcl::PointCloud<pcl::PointNormal> &normals,
77  float tolerance,
79  std::vector<pcl::PointIndices> &clusters, double eps_angle,
80  unsigned int min_pts_per_cluster,
81  unsigned int max_pts_per_cluster)
82 {
83  if (tree->getInputCloud ()->points.size () != cloud.points.size ())
84  {
85  PCL_ERROR ("[pcl::extractEuclideanClusters] Tree built for a different point cloud dataset (%lu) than the input cloud (%lu)!\n", tree->getInputCloud ()->points.size (), cloud.points.size ());
86  return;
87  }
88  if (cloud.points.size () != normals.points.size ())
89  {
90  PCL_ERROR ("[pcl::extractEuclideanClusters] Number of points in the input point cloud (%lu) different than normals (%lu)!\n", cloud.points.size (), normals.points.size ());
91  return;
92  }
93 
94  // Create a bool vector of processed point indices, and initialize it to false
95  std::vector<bool> processed (cloud.points.size (), false);
96 
97  std::vector<int> nn_indices;
98  std::vector<float> nn_distances;
99  // Process all points in the indices vector
100  for (int i = 0; i < static_cast<int> (cloud.points.size ()); ++i)
101  {
102  if (processed[i])
103  continue;
104 
105  std::vector<unsigned int> seed_queue;
106  int sq_idx = 0;
107  seed_queue.push_back (i);
108 
109  processed[i] = true;
110 
111  while (sq_idx < static_cast<int> (seed_queue.size ()))
112  {
113  // Search for sq_idx
114  if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances))
115  {
116  sq_idx++;
117  continue;
118  }
119 
120  for (size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx
121  {
122  if (processed[nn_indices[j]]) // Has this point been processed before ?
123  continue;
124 
125  //processed[nn_indices[j]] = true;
126  // [-1;1]
127 
128  double dot_p = normals.points[seed_queue[sq_idx]].normal[0] * normals.points[nn_indices[j]].normal[0]
129  + normals.points[seed_queue[sq_idx]].normal[1] * normals.points[nn_indices[j]].normal[1] + normals.points[seed_queue[sq_idx]].normal[2]
130  * normals.points[nn_indices[j]].normal[2];
131 
132  if (fabs (acos (dot_p)) < eps_angle)
133  {
134  processed[nn_indices[j]] = true;
135  seed_queue.push_back (nn_indices[j]);
136  }
137  }
138 
139  sq_idx++;
140  }
141 
142  // If this queue is satisfactory, add to the clusters
143  if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster)
144  {
146  r.indices.resize (seed_queue.size ());
147  for (size_t j = 0; j < seed_queue.size (); ++j)
148  r.indices[j] = seed_queue[j];
149 
150  std::sort (r.indices.begin (), r.indices.end ());
151  r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ());
152 
153  r.header = cloud.header;
154  clusters.push_back (r); // We could avoid a copy by working directly in the vector
155  }
156  }
157 }
158 
159 //////////////////////////////////////////////////////////////////////////////////////////////
160 template<typename PointInT, typename PointNT, typename PointOutT> void
162  std::vector<int> &indices_to_use,
163  std::vector<int> &indices_out, std::vector<int> &indices_in,
164  float threshold)
165 {
166  indices_out.resize (cloud.points.size ());
167  indices_in.resize (cloud.points.size ());
168 
169  size_t in, out;
170  in = out = 0;
171 
172  for (int i = 0; i < static_cast<int> (indices_to_use.size ()); i++)
173  {
174  if (cloud.points[indices_to_use[i]].curvature > threshold)
175  {
176  indices_out[out] = indices_to_use[i];
177  out++;
178  }
179  else
180  {
181  indices_in[in] = indices_to_use[i];
182  in++;
183  }
184  }
185 
186  indices_out.resize (out);
187  indices_in.resize (in);
188 }
189 
190 template<typename PointInT, typename PointNT, typename PointOutT> bool
191 pcl::OURCVFHEstimation<PointInT, PointNT, PointOutT>::sgurf (Eigen::Vector3f & centroid, Eigen::Vector3f & normal_centroid,
192  PointInTPtr & processed, std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> > & transformations,
193  PointInTPtr & grid, pcl::PointIndices & indices)
194 {
195 
196  Eigen::Vector3f plane_normal;
197  plane_normal[0] = -centroid[0];
198  plane_normal[1] = -centroid[1];
199  plane_normal[2] = -centroid[2];
200  Eigen::Vector3f z_vector = Eigen::Vector3f::UnitZ ();
201  plane_normal.normalize ();
202  Eigen::Vector3f axis = plane_normal.cross (z_vector);
203  double rotation = -asin (axis.norm ());
204  axis.normalize ();
205 
206  Eigen::Affine3f transformPC (Eigen::AngleAxisf (static_cast<float> (rotation), axis));
207 
208  grid->points.resize (processed->points.size ());
209  for (size_t k = 0; k < processed->points.size (); k++)
210  grid->points[k].getVector4fMap () = processed->points[k].getVector4fMap ();
211 
212  pcl::transformPointCloud (*grid, *grid, transformPC);
213 
214  Eigen::Vector4f centroid4f (centroid[0], centroid[1], centroid[2], 0);
215  Eigen::Vector4f normal_centroid4f (normal_centroid[0], normal_centroid[1], normal_centroid[2], 0);
216 
217  centroid4f = transformPC * centroid4f;
218  normal_centroid4f = transformPC * normal_centroid4f;
219 
220  Eigen::Vector3f centroid3f (centroid4f[0], centroid4f[1], centroid4f[2]);
221 
222  Eigen::Vector4f farthest_away;
223  pcl::getMaxDistance (*grid, indices.indices, centroid4f, farthest_away);
224  farthest_away[3] = 0;
225 
226  float max_dist = (farthest_away - centroid4f).norm ();
227 
228  pcl::demeanPointCloud (*grid, centroid4f, *grid);
229 
230  Eigen::Matrix4f center_mat;
231  center_mat.setIdentity (4, 4);
232  center_mat (0, 3) = -centroid4f[0];
233  center_mat (1, 3) = -centroid4f[1];
234  center_mat (2, 3) = -centroid4f[2];
235 
236  Eigen::Matrix3f scatter;
237  scatter.setZero ();
238  float sum_w = 0.f;
239 
240  //for (int k = 0; k < static_cast<intgrid->points[k].getVector3fMap ();> (grid->points.size ()); k++)
241  for (int k = 0; k < static_cast<int> (indices.indices.size ()); k++)
242  {
243  Eigen::Vector3f pvector = grid->points[indices.indices[k]].getVector3fMap ();
244  float d_k = (pvector).norm ();
245  float w = (max_dist - d_k);
246  Eigen::Vector3f diff = (pvector);
247  Eigen::Matrix3f mat = diff * diff.transpose ();
248  scatter = scatter + mat * w;
249  sum_w += w;
250  }
251 
252  scatter /= sum_w;
253 
254  Eigen::JacobiSVD <Eigen::MatrixXf> svd (scatter, Eigen::ComputeFullV);
255  Eigen::Vector3f evx = svd.matrixV ().col (0);
256  Eigen::Vector3f evy = svd.matrixV ().col (1);
257  Eigen::Vector3f evz = svd.matrixV ().col (2);
258  Eigen::Vector3f evxminus = evx * -1;
259  Eigen::Vector3f evyminus = evy * -1;
260  Eigen::Vector3f evzminus = evz * -1;
261 
262  float s_xplus, s_xminus, s_yplus, s_yminus;
263  s_xplus = s_xminus = s_yplus = s_yminus = 0.f;
264 
265  //disambiguate rf using all points
266  for (int k = 0; k < static_cast<int> (grid->points.size ()); k++)
267  {
268  Eigen::Vector3f pvector = grid->points[k].getVector3fMap ();
269  float dist_x, dist_y;
270  dist_x = std::abs (evx.dot (pvector));
271  dist_y = std::abs (evy.dot (pvector));
272 
273  if ((pvector).dot (evx) >= 0)
274  s_xplus += dist_x;
275  else
276  s_xminus += dist_x;
277 
278  if ((pvector).dot (evy) >= 0)
279  s_yplus += dist_y;
280  else
281  s_yminus += dist_y;
282 
283  }
284 
285  if (s_xplus < s_xminus)
286  evx = evxminus;
287 
288  if (s_yplus < s_yminus)
289  evy = evyminus;
290 
291  //select the axis that could be disambiguated more easily
292  float fx, fy;
293  float max_x = static_cast<float> (std::max (s_xplus, s_xminus));
294  float min_x = static_cast<float> (std::min (s_xplus, s_xminus));
295  float max_y = static_cast<float> (std::max (s_yplus, s_yminus));
296  float min_y = static_cast<float> (std::min (s_yplus, s_yminus));
297 
298  fx = (min_x / max_x);
299  fy = (min_y / max_y);
300 
301  Eigen::Vector3f normal3f = Eigen::Vector3f (normal_centroid4f[0], normal_centroid4f[1], normal_centroid4f[2]);
302  if (normal3f.dot (evz) < 0)
303  evz = evzminus;
304 
305  //if fx/y close to 1, it was hard to disambiguate
306  //what if both are equally easy or difficult to disambiguate, namely fy == fx or very close
307 
308  float max_axis = std::max (fx, fy);
309  float min_axis = std::min (fx, fy);
310 
311  if ((min_axis / max_axis) > axis_ratio_)
312  {
313  PCL_WARN ("Both axes are equally easy/difficult to disambiguate\n");
314 
315  Eigen::Vector3f evy_copy = evy;
316  Eigen::Vector3f evxminus = evx * -1;
317  Eigen::Vector3f evyminus = evy * -1;
318 
319  if (min_axis > min_axis_value_)
320  {
321  //combination of all possibilities
322  evy = evx.cross (evz);
323  Eigen::Matrix4f trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
324  transformations.push_back (trans);
325 
326  evx = evxminus;
327  evy = evx.cross (evz);
328  trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
329  transformations.push_back (trans);
330 
331  evx = evy_copy;
332  evy = evx.cross (evz);
333  trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
334  transformations.push_back (trans);
335 
336  evx = evyminus;
337  evy = evx.cross (evz);
338  trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
339  transformations.push_back (trans);
340 
341  }
342  else
343  {
344  //1-st case (evx selected)
345  evy = evx.cross (evz);
346  Eigen::Matrix4f trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
347  transformations.push_back (trans);
348 
349  //2-nd case (evy selected)
350  evx = evy_copy;
351  evy = evx.cross (evz);
352  trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
353  transformations.push_back (trans);
354  }
355  }
356  else
357  {
358  if (fy < fx)
359  {
360  evx = evy;
361  fx = fy;
362  }
363 
364  evy = evx.cross (evz);
365  Eigen::Matrix4f trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
366  transformations.push_back (trans);
367 
368  }
369 
370  return true;
371 }
372 
373 //////////////////////////////////////////////////////////////////////////////////////////////
374 template<typename PointInT, typename PointNT, typename PointOutT> void
376  std::vector<pcl::PointIndices> & cluster_indices)
377 {
378  PointCloudOut ourcvfh_output;
379 
380  cluster_axes_.clear ();
381  cluster_axes_.resize (centroids_dominant_orientations_.size ());
382 
383  for (size_t i = 0; i < centroids_dominant_orientations_.size (); i++)
384  {
385 
386  std::vector < Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> > transformations;
388  sgurf (centroids_dominant_orientations_[i], dominant_normals_[i], processed, transformations, grid, cluster_indices[i]);
389 
390  // Make a note of how many transformations correspond to each cluster
391  cluster_axes_[i] = transformations.size ();
392 
393  for (size_t t = 0; t < transformations.size (); t++)
394  {
395 
396  pcl::transformPointCloud (*processed, *grid, transformations[t]);
397  transforms_.push_back (transformations[t]);
398  valid_transforms_.push_back (true);
399 
400  std::vector < Eigen::VectorXf > quadrants (8);
401  int size_hists = 13;
402  int num_hists = 8;
403  for (int k = 0; k < num_hists; k++)
404  quadrants[k].setZero (size_hists);
405 
406  Eigen::Vector4f centroid_p;
407  centroid_p.setZero ();
408  Eigen::Vector4f max_pt;
409  pcl::getMaxDistance (*grid, centroid_p, max_pt);
410  max_pt[3] = 0;
411  double distance_normalization_factor = (centroid_p - max_pt).norm ();
412 
413  float hist_incr;
414  if (normalize_bins_)
415  hist_incr = 100.0f / static_cast<float> (grid->points.size () - 1);
416  else
417  hist_incr = 1.0f;
418 
419  float * weights = new float[num_hists];
420  float sigma = 0.01f; //1cm
421  float sigma_sq = sigma * sigma;
422 
423  for (int k = 0; k < static_cast<int> (grid->points.size ()); k++)
424  {
425  Eigen::Vector4f p = grid->points[k].getVector4fMap ();
426  p[3] = 0.f;
427  float d = p.norm ();
428 
429  //compute weight for all octants
430  float wx = 1.f - std::exp (-((p[0] * p[0]) / (2.f * sigma_sq))); //how is the weight distributed among two semi-cubes
431  float wy = 1.f - std::exp (-((p[1] * p[1]) / (2.f * sigma_sq)));
432  float wz = 1.f - std::exp (-((p[2] * p[2]) / (2.f * sigma_sq)));
433 
434  //distribute the weights using the x-coordinate
435  if (p[0] >= 0)
436  {
437  for (size_t ii = 0; ii <= 3; ii++)
438  weights[ii] = 0.5f - wx * 0.5f;
439 
440  for (size_t ii = 4; ii <= 7; ii++)
441  weights[ii] = 0.5f + wx * 0.5f;
442  }
443  else
444  {
445  for (size_t ii = 0; ii <= 3; ii++)
446  weights[ii] = 0.5f + wx * 0.5f;
447 
448  for (size_t ii = 4; ii <= 7; ii++)
449  weights[ii] = 0.5f - wx * 0.5f;
450  }
451 
452  //distribute the weights using the y-coordinate
453  if (p[1] >= 0)
454  {
455  for (size_t ii = 0; ii <= 1; ii++)
456  weights[ii] *= 0.5f - wy * 0.5f;
457  for (size_t ii = 4; ii <= 5; ii++)
458  weights[ii] *= 0.5f - wy * 0.5f;
459 
460  for (size_t ii = 2; ii <= 3; ii++)
461  weights[ii] *= 0.5f + wy * 0.5f;
462 
463  for (size_t ii = 6; ii <= 7; ii++)
464  weights[ii] *= 0.5f + wy * 0.5f;
465  }
466  else
467  {
468  for (size_t ii = 0; ii <= 1; ii++)
469  weights[ii] *= 0.5f + wy * 0.5f;
470  for (size_t ii = 4; ii <= 5; ii++)
471  weights[ii] *= 0.5f + wy * 0.5f;
472 
473  for (size_t ii = 2; ii <= 3; ii++)
474  weights[ii] *= 0.5f - wy * 0.5f;
475 
476  for (size_t ii = 6; ii <= 7; ii++)
477  weights[ii] *= 0.5f - wy * 0.5f;
478  }
479 
480  //distribute the weights using the z-coordinate
481  if (p[2] >= 0)
482  {
483  for (size_t ii = 0; ii <= 7; ii += 2)
484  weights[ii] *= 0.5f - wz * 0.5f;
485 
486  for (size_t ii = 1; ii <= 7; ii += 2)
487  weights[ii] *= 0.5f + wz * 0.5f;
488 
489  }
490  else
491  {
492  for (size_t ii = 0; ii <= 7; ii += 2)
493  weights[ii] *= 0.5f + wz * 0.5f;
494 
495  for (size_t ii = 1; ii <= 7; ii += 2)
496  weights[ii] *= 0.5f - wz * 0.5f;
497  }
498 
499  int h_index = (d <= 0) ? 0 : std::ceil (size_hists * (d / distance_normalization_factor)) - 1;
500  /* from http://www.pcl-users.org/OUR-CVFH-problem-td4028436.html
501  h_index will be 13 when d is computed on the farthest away point.
502 
503  adding the following after computing h_index fixes the problem:
504  */
505  if(h_index > 12)
506  h_index = 12;
507  for (int j = 0; j < num_hists; j++)
508  quadrants[j][h_index] += hist_incr * weights[j];
509 
510  }
511 
512  //copy to the cvfh signature
513  PointCloudOut vfh_signature;
514  vfh_signature.points.resize (1);
515  vfh_signature.width = vfh_signature.height = 1;
516  for (int d = 0; d < 308; ++d)
517  vfh_signature.points[0].histogram[d] = output.points[i].histogram[d];
518 
519  int pos = 45 * 3;
520  for (int k = 0; k < num_hists; k++)
521  {
522  for (int ii = 0; ii < size_hists; ii++, pos++)
523  {
524  vfh_signature.points[0].histogram[pos] = quadrants[k][ii];
525  }
526  }
527 
528  ourcvfh_output.points.push_back (vfh_signature.points[0]);
529  ourcvfh_output.width = ourcvfh_output.points.size ();
530  delete[] weights;
531  }
532  }
533 
534  if (ourcvfh_output.points.size ())
535  {
536  ourcvfh_output.height = 1;
537  }
538  output = ourcvfh_output;
539 }
540 
541 //////////////////////////////////////////////////////////////////////////////////////////////
542 template<typename PointInT, typename PointNT, typename PointOutT> void
544 {
545  if (refine_clusters_ <= 0.f)
546  refine_clusters_ = 1.f;
547 
548  // Check if input was set
549  if (!normals_)
550  {
551  PCL_ERROR ("[pcl::%s::computeFeature] No input dataset containing normals was given!\n", getClassName ().c_str ());
552  output.width = output.height = 0;
553  output.points.clear ();
554  return;
555  }
556  if (normals_->points.size () != surface_->points.size ())
557  {
558  PCL_ERROR ("[pcl::%s::computeFeature] The number of points in the input dataset differs from the number of points in the dataset containing the normals!\n", getClassName ().c_str ());
559  output.width = output.height = 0;
560  output.points.clear ();
561  return;
562  }
563 
564  centroids_dominant_orientations_.clear ();
565  clusters_.clear ();
566  transforms_.clear ();
567  dominant_normals_.clear ();
568 
569  // ---[ Step 0: remove normals with high curvature
570  std::vector<int> indices_out;
571  std::vector<int> indices_in;
572  filterNormalsWithHighCurvature (*normals_, *indices_, indices_out, indices_in, curv_threshold_);
573 
575  normals_filtered_cloud->width = static_cast<uint32_t> (indices_in.size ());
576  normals_filtered_cloud->height = 1;
577  normals_filtered_cloud->points.resize (normals_filtered_cloud->width);
578 
579  std::vector<int> indices_from_nfc_to_indices;
580  indices_from_nfc_to_indices.resize (indices_in.size ());
581 
582  for (size_t i = 0; i < indices_in.size (); ++i)
583  {
584  normals_filtered_cloud->points[i].x = surface_->points[indices_in[i]].x;
585  normals_filtered_cloud->points[i].y = surface_->points[indices_in[i]].y;
586  normals_filtered_cloud->points[i].z = surface_->points[indices_in[i]].z;
587  //normals_filtered_cloud->points[i].getNormalVector4fMap() = normals_->points[indices_in[i]].getNormalVector4fMap();
588  indices_from_nfc_to_indices[i] = indices_in[i];
589  }
590 
591  std::vector<pcl::PointIndices> clusters;
592 
593  if (normals_filtered_cloud->points.size () >= min_points_)
594  {
595  //recompute normals and use them for clustering
596  {
597  KdTreePtr normals_tree_filtered (new pcl::search::KdTree<pcl::PointNormal> (false));
598  normals_tree_filtered->setInputCloud (normals_filtered_cloud);
600  n3d.setRadiusSearch (radius_normals_);
601  n3d.setSearchMethod (normals_tree_filtered);
602  n3d.setInputCloud (normals_filtered_cloud);
603  n3d.compute (*normals_filtered_cloud);
604  }
605 
606  KdTreePtr normals_tree (new pcl::search::KdTree<pcl::PointNormal> (false));
607  normals_tree->setInputCloud (normals_filtered_cloud);
608 
609  extractEuclideanClustersSmooth (*normals_filtered_cloud, *normals_filtered_cloud, cluster_tolerance_, normals_tree, clusters,
610  eps_angle_threshold_, static_cast<unsigned int> (min_points_));
611 
612  std::vector<pcl::PointIndices> clusters_filtered;
613  int cluster_filtered_idx = 0;
614  for (size_t i = 0; i < clusters.size (); i++)
615  {
616 
618  pcl::PointIndices pi_cvfh;
619  pcl::PointIndices pi_filtered;
620 
621  clusters_.push_back (pi);
622  clusters_filtered.push_back (pi_filtered);
623 
624  Eigen::Vector4f avg_normal = Eigen::Vector4f::Zero ();
625  Eigen::Vector4f avg_centroid = Eigen::Vector4f::Zero ();
626 
627  for (size_t j = 0; j < clusters[i].indices.size (); j++)
628  {
629  avg_normal += normals_filtered_cloud->points[clusters[i].indices[j]].getNormalVector4fMap ();
630  avg_centroid += normals_filtered_cloud->points[clusters[i].indices[j]].getVector4fMap ();
631  }
632 
633  avg_normal /= static_cast<float> (clusters[i].indices.size ());
634  avg_centroid /= static_cast<float> (clusters[i].indices.size ());
635  avg_normal.normalize ();
636 
637  Eigen::Vector3f avg_norm (avg_normal[0], avg_normal[1], avg_normal[2]);
638  Eigen::Vector3f avg_dominant_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
639 
640  for (size_t j = 0; j < clusters[i].indices.size (); j++)
641  {
642  //decide if normal should be added
643  double dot_p = avg_normal.dot (normals_filtered_cloud->points[clusters[i].indices[j]].getNormalVector4fMap ());
644  if (fabs (acos (dot_p)) < (eps_angle_threshold_ * refine_clusters_))
645  {
646  clusters_[cluster_filtered_idx].indices.push_back (indices_from_nfc_to_indices[clusters[i].indices[j]]);
647  clusters_filtered[cluster_filtered_idx].indices.push_back (clusters[i].indices[j]);
648  }
649  }
650 
651  //remove last cluster if no points found...
652  if (clusters_[cluster_filtered_idx].indices.size () == 0)
653  {
654  clusters_.pop_back ();
655  clusters_filtered.pop_back ();
656  }
657  else
658  cluster_filtered_idx++;
659  }
660 
661  clusters = clusters_filtered;
662 
663  }
664 
666  vfh.setInputCloud (surface_);
667  vfh.setInputNormals (normals_);
668  vfh.setIndices (indices_);
669  vfh.setSearchMethod (this->tree_);
670  vfh.setUseGivenNormal (true);
671  vfh.setUseGivenCentroid (true);
672  vfh.setNormalizeBins (normalize_bins_);
673  output.height = 1;
674 
675  // ---[ Step 1b : check if any dominant cluster was found
676  if (clusters.size () > 0)
677  { // ---[ Step 1b.1 : If yes, compute CVFH using the cluster information
678 
679  for (size_t i = 0; i < clusters.size (); ++i) //for each cluster
680 
681  {
682  Eigen::Vector4f avg_normal = Eigen::Vector4f::Zero ();
683  Eigen::Vector4f avg_centroid = Eigen::Vector4f::Zero ();
684 
685  for (size_t j = 0; j < clusters[i].indices.size (); j++)
686  {
687  avg_normal += normals_filtered_cloud->points[clusters[i].indices[j]].getNormalVector4fMap ();
688  avg_centroid += normals_filtered_cloud->points[clusters[i].indices[j]].getVector4fMap ();
689  }
690 
691  avg_normal /= static_cast<float> (clusters[i].indices.size ());
692  avg_centroid /= static_cast<float> (clusters[i].indices.size ());
693  avg_normal.normalize ();
694 
695  Eigen::Vector3f avg_norm (avg_normal[0], avg_normal[1], avg_normal[2]);
696  Eigen::Vector3f avg_dominant_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
697 
698  //append normal and centroid for the clusters
699  dominant_normals_.push_back (avg_norm);
700  centroids_dominant_orientations_.push_back (avg_dominant_centroid);
701  }
702 
703  //compute modified VFH for all dominant clusters and add them to the list!
704  output.points.resize (dominant_normals_.size ());
705  output.width = static_cast<uint32_t> (dominant_normals_.size ());
706 
707  for (size_t i = 0; i < dominant_normals_.size (); ++i)
708  {
709  //configure VFH computation for CVFH
710  vfh.setNormalToUse (dominant_normals_[i]);
711  vfh.setCentroidToUse (centroids_dominant_orientations_[i]);
713  vfh.compute (vfh_signature);
714  output.points[i] = vfh_signature.points[0];
715  }
716 
717  //finish filling the descriptor with the shape distribution
718  PointInTPtr cloud_input (new pcl::PointCloud<PointInT>);
719  pcl::copyPointCloud (*surface_, *indices_, *cloud_input);
720  computeRFAndShapeDistribution (cloud_input, output, clusters_); //this will set transforms_
721  }
722  else
723  { // ---[ Step 1b.1 : If no, compute a VFH using all the object points
724 
725  PCL_WARN("No clusters were found in the surface... using VFH...\n");
726  Eigen::Vector4f avg_centroid;
727  pcl::compute3DCentroid (*surface_, avg_centroid);
728  Eigen::Vector3f cloud_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
729  centroids_dominant_orientations_.push_back (cloud_centroid);
730 
731  //configure VFH computation using all object points
732  vfh.setCentroidToUse (cloud_centroid);
733  vfh.setUseGivenNormal (false);
734 
736  vfh.compute (vfh_signature);
737 
738  output.points.resize (1);
739  output.width = 1;
740 
741  output.points[0] = vfh_signature.points[0];
742  Eigen::Matrix4f id = Eigen::Matrix4f::Identity ();
743  transforms_.push_back (id);
744  valid_transforms_.push_back (false);
745  }
746 }
747 
748 #define PCL_INSTANTIATE_OURCVFHEstimation(T,NT,OutT) template class PCL_EXPORTS pcl::OURCVFHEstimation<T,NT,OutT>;
749 
750 #endif // PCL_FEATURES_IMPL_OURCVFH_H_
void setUseGivenCentroid(bool use)
Set use_given_centroid_.
Definition: vfh.h:164
pcl::PointCloud< PointInT >::Ptr PointInTPtr
Definition: our_cvfh.h:77
void compute(PointCloudOut &output)
Overloaded computed method from pcl::Feature.
Definition: our_cvfh.hpp:52
search::KdTree is a wrapper class which inherits the pcl::KdTree class for performing search function...
Definition: kdtree.h:62
std::vector< PointT, Eigen::aligned_allocator< PointT > > points
The point data.
Definition: point_cloud.h:410
virtual int radiusSearch(const PointT &point, double radius, std::vector< int > &k_indices, std::vector< float > &k_sqr_distances, unsigned int max_nn=0) const =0
Search for all the nearest neighbors of the query point in a given radius.
void filterNormalsWithHighCurvature(const pcl::PointCloud< PointNT > &cloud, std::vector< int > &indices_to_use, std::vector< int > &indices_out, std::vector< int > &indices_in, float threshold)
Removes normals with high curvature caused by real edges or noisy data.
Definition: our_cvfh.hpp:161
std::vector< int > indices
Definition: PointIndices.h:19
void computeRFAndShapeDistribution(PointInTPtr &processed, PointCloudOut &output, std::vector< pcl::PointIndices > &cluster_indices)
Computes SGURF and the shape distribution based on the selected SGURF.
Definition: our_cvfh.hpp:375
PCL_EXPORTS void copyPointCloud(const pcl::PCLPointCloud2 &cloud_in, const std::vector< int > &indices, pcl::PCLPointCloud2 &cloud_out)
Extract the indices of a given point cloud as a new point cloud.
void setRadiusSearch(double radius)
Set the sphere radius that is to be used for determining the nearest neighbors used for the feature e...
Definition: feature.h:200
NormalEstimation estimates local surface properties (surface normals and curvatures)at each 3D point...
Definition: normal_3d.h:242
void getMaxDistance(const pcl::PointCloud< PointT > &cloud, const Eigen::Vector4f &pivot_pt, Eigen::Vector4f &max_pt)
Get the point at maximum distance from a given point and a given pointcloud.
Definition: common.hpp:144
void setUseGivenNormal(bool use)
Set use_given_normal_.
Definition: vfh.h:145
uint32_t height
The point cloud height (if organized as an image-structure).
Definition: point_cloud.h:415
boost::shared_ptr< PointCloud< PointT > > Ptr
Definition: point_cloud.h:428
uint32_t width
The point cloud width (if organized as an image-structure).
Definition: point_cloud.h:413
void setNormalToUse(const Eigen::Vector3f &normal)
Set the normal to use.
Definition: vfh.h:155
void setCentroidToUse(const Eigen::Vector3f &centroid)
Set centroid_to_use_.
Definition: vfh.h:174
pcl::search::Search< PointInT >::Ptr KdTreePtr
Definition: feature.h:117
boost::shared_ptr< pcl::search::Search< PointT > > Ptr
Definition: search.h:81
void transformPointCloud(const pcl::PointCloud< PointT > &cloud_in, pcl::PointCloud< PointT > &cloud_out, const Eigen::Transform< Scalar, 3, Eigen::Affine > &transform, bool copy_all_fields=true)
Apply an affine transform defined by an Eigen Transform.
Definition: transforms.hpp:215
void demeanPointCloud(ConstCloudIterator< PointT > &cloud_iterator, const Eigen::Matrix< Scalar, 4, 1 > &centroid, pcl::PointCloud< PointT > &cloud_out, int npts=0)
Subtract a centroid from a point cloud and return the de-meaned representation.
Definition: centroid.hpp:631
::pcl::PCLHeader header
Definition: PointIndices.h:17
void clear()
Removes all points in a cloud and sets the width and height to 0.
Definition: point_cloud.h:576
virtual PointCloudConstPtr getInputCloud() const
Get a pointer to the input point cloud dataset.
Definition: search.h:125
VFHEstimation estimates the Viewpoint Feature Histogram (VFH) descriptor for a given point cloud data...
Definition: vfh.h:71
virtual void setInputCloud(const PointCloudConstPtr &cloud)
Provide a pointer to the input dataset.
Definition: normal_3d.h:333
void setNormalizeBins(bool normalize)
set normalize_bins_
Definition: vfh.h:183
PointCloud represents the base class in PCL for storing collections of 3D points. ...
pcl::PCLHeader header
The point cloud header.
Definition: point_cloud.h:407
virtual void setIndices(const IndicesPtr &indices)
Provide a pointer to the vector of indices that represents the input data.
bool sgurf(Eigen::Vector3f &centroid, Eigen::Vector3f &normal_centroid, PointInTPtr &processed, std::vector< Eigen::Matrix4f, Eigen::aligned_allocator< Eigen::Matrix4f > > &transformations, PointInTPtr &grid, pcl::PointIndices &indices)
Computes SGURF.
Definition: our_cvfh.hpp:191
void compute(PointCloudOut &output)
Overloaded computed method from pcl::Feature.
Definition: vfh.hpp:65
void setSearchMethod(const KdTreePtr &tree)
Provide a pointer to the search object.
Definition: feature.h:166
virtual void setInputCloud(const PointCloudConstPtr &cloud)
Provide a pointer to the input dataset.
Feature represents the base feature class.
Definition: feature.h:105
void compute(PointCloudOut &output)
Base method for feature estimation for all points given in <setInputCloud (), setIndices ()> using th...
Definition: feature.hpp:189
unsigned int compute3DCentroid(ConstCloudIterator< PointT > &cloud_iterator, Eigen::Matrix< Scalar, 4, 1 > &centroid)
Compute the 3D (X-Y-Z) centroid of a set of points and return it as a 3D vector.
Definition: centroid.hpp:50
void setInputNormals(const PointCloudNConstPtr &normals)
Provide a pointer to the input dataset that contains the point normals of the XYZ dataset...
Definition: feature.h:344
OURCVFHEstimation estimates the Oriented, Unique and Repetable Clustered Viewpoint Feature Histogram ...
Definition: our_cvfh.h:62