Point Cloud Library (PCL)  1.7.0
our_cvfh.hpp
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2010-2011, Willow Garage, Inc.
6  * Copyright (c) 2012-, Open Perception, Inc.
7  *
8  * All rights reserved.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  *
14  * * Redistributions of source code must retain the above copyright
15  * notice, this list of conditions and the following disclaimer.
16  * * Redistributions in binary form must reproduce the above
17  * copyright notice, this list of conditions and the following
18  * disclaimer in the documentation and/or other materials provided
19  * with the distribution.
20  * * Neither the name of the copyright holder(s) nor the names of its
21  * contributors may be used to endorse or promote products derived
22  * from this software without specific prior written permission.
23  *
24  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
27  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
28  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
29  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
30  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
31  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
32  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
33  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
34  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
35  * POSSIBILITY OF SUCH DAMAGE.
36  *
37  * $Id: cvfh.hpp 5311 2012-03-26 22:02:04Z aaldoma $
38  *
39  */
40 
41 #ifndef PCL_FEATURES_IMPL_OURCVFH_H_
42 #define PCL_FEATURES_IMPL_OURCVFH_H_
43 
44 #include <pcl/features/our_cvfh.h>
45 #include <pcl/features/vfh.h>
46 #include <pcl/features/normal_3d.h>
47 #include <pcl/features/pfh_tools.h>
48 #include <pcl/common/transforms.h>
49 
50 //////////////////////////////////////////////////////////////////////////////////////////////
51 template<typename PointInT, typename PointNT, typename PointOutT> void
53 {
55  {
56  output.width = output.height = 0;
57  output.points.clear ();
58  return;
59  }
60  // Resize the output dataset
61  // Important! We should only allocate precisely how many elements we will need, otherwise
62  // we risk at pre-allocating too much memory which could lead to bad_alloc
63  // (see http://dev.pointclouds.org/issues/657)
64  output.width = output.height = 1;
65  output.points.resize (1);
66 
67  // Perform the actual feature computation
68  computeFeature (output);
69 
71 }
72 
73 //////////////////////////////////////////////////////////////////////////////////////////////
74 template<typename PointInT, typename PointNT, typename PointOutT> void
76  const pcl::PointCloud<pcl::PointNormal> &normals,
77  float tolerance,
79  std::vector<pcl::PointIndices> &clusters, double eps_angle,
80  unsigned int min_pts_per_cluster,
81  unsigned int max_pts_per_cluster)
82 {
83  if (tree->getInputCloud ()->points.size () != cloud.points.size ())
84  {
85  PCL_ERROR ("[pcl::extractEuclideanClusters] Tree built for a different point cloud dataset (%zu) than the input cloud (%zu)!\n", tree->getInputCloud ()->points.size (), cloud.points.size ());
86  return;
87  }
88  if (cloud.points.size () != normals.points.size ())
89  {
90  PCL_ERROR ("[pcl::extractEuclideanClusters] Number of points in the input point cloud (%zu) different than normals (%zu)!\n", cloud.points.size (), normals.points.size ());
91  return;
92  }
93 
94  // Create a bool vector of processed point indices, and initialize it to false
95  std::vector<bool> processed (cloud.points.size (), false);
96 
97  std::vector<int> nn_indices;
98  std::vector<float> nn_distances;
99  // Process all points in the indices vector
100  for (int i = 0; i < static_cast<int> (cloud.points.size ()); ++i)
101  {
102  if (processed[i])
103  continue;
104 
105  std::vector<unsigned int> seed_queue;
106  int sq_idx = 0;
107  seed_queue.push_back (i);
108 
109  processed[i] = true;
110 
111  while (sq_idx < static_cast<int> (seed_queue.size ()))
112  {
113  // Search for sq_idx
114  if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances))
115  {
116  sq_idx++;
117  continue;
118  }
119 
120  for (size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx
121  {
122  if (processed[nn_indices[j]]) // Has this point been processed before ?
123  continue;
124 
125  //processed[nn_indices[j]] = true;
126  // [-1;1]
127 
128  double dot_p = normals.points[seed_queue[sq_idx]].normal[0] * normals.points[nn_indices[j]].normal[0]
129  + normals.points[seed_queue[sq_idx]].normal[1] * normals.points[nn_indices[j]].normal[1] + normals.points[seed_queue[sq_idx]].normal[2]
130  * normals.points[nn_indices[j]].normal[2];
131 
132  if (fabs (acos (dot_p)) < eps_angle)
133  {
134  processed[nn_indices[j]] = true;
135  seed_queue.push_back (nn_indices[j]);
136  }
137  }
138 
139  sq_idx++;
140  }
141 
142  // If this queue is satisfactory, add to the clusters
143  if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster)
144  {
146  r.indices.resize (seed_queue.size ());
147  for (size_t j = 0; j < seed_queue.size (); ++j)
148  r.indices[j] = seed_queue[j];
149 
150  std::sort (r.indices.begin (), r.indices.end ());
151  r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ());
152 
153  r.header = cloud.header;
154  clusters.push_back (r); // We could avoid a copy by working directly in the vector
155  }
156  }
157 }
158 
159 //////////////////////////////////////////////////////////////////////////////////////////////
160 template<typename PointInT, typename PointNT, typename PointOutT> void
162  std::vector<int> &indices_to_use,
163  std::vector<int> &indices_out, std::vector<int> &indices_in,
164  float threshold)
165 {
166  indices_out.resize (cloud.points.size ());
167  indices_in.resize (cloud.points.size ());
168 
169  size_t in, out;
170  in = out = 0;
171 
172  for (int i = 0; i < static_cast<int> (indices_to_use.size ()); i++)
173  {
174  if (cloud.points[indices_to_use[i]].curvature > threshold)
175  {
176  indices_out[out] = indices_to_use[i];
177  out++;
178  }
179  else
180  {
181  indices_in[in] = indices_to_use[i];
182  in++;
183  }
184  }
185 
186  indices_out.resize (out);
187  indices_in.resize (in);
188 }
189 
190 template<typename PointInT, typename PointNT, typename PointOutT> bool
191 pcl::OURCVFHEstimation<PointInT, PointNT, PointOutT>::sgurf (Eigen::Vector3f & centroid, Eigen::Vector3f & normal_centroid,
192  PointInTPtr & processed, std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> > & transformations,
193  PointInTPtr & grid, pcl::PointIndices & indices)
194 {
195 
196  Eigen::Vector3f plane_normal;
197  plane_normal[0] = -centroid[0];
198  plane_normal[1] = -centroid[1];
199  plane_normal[2] = -centroid[2];
200  Eigen::Vector3f z_vector = Eigen::Vector3f::UnitZ ();
201  plane_normal.normalize ();
202  Eigen::Vector3f axis = plane_normal.cross (z_vector);
203  double rotation = -asin (axis.norm ());
204  axis.normalize ();
205 
206  Eigen::Affine3f transformPC (Eigen::AngleAxisf (static_cast<float> (rotation), axis));
207 
208  grid->points.resize (processed->points.size ());
209  for (size_t k = 0; k < processed->points.size (); k++)
210  grid->points[k].getVector4fMap () = processed->points[k].getVector4fMap ();
211 
212  pcl::transformPointCloud (*grid, *grid, transformPC);
213 
214  Eigen::Vector4f centroid4f (centroid[0], centroid[1], centroid[2], 0);
215  Eigen::Vector4f normal_centroid4f (normal_centroid[0], normal_centroid[1], normal_centroid[2], 0);
216 
217  centroid4f = transformPC * centroid4f;
218  normal_centroid4f = transformPC * normal_centroid4f;
219 
220  Eigen::Vector3f centroid3f (centroid4f[0], centroid4f[1], centroid4f[2]);
221 
222  Eigen::Vector4f farthest_away;
223  pcl::getMaxDistance (*grid, indices.indices, centroid4f, farthest_away);
224  farthest_away[3] = 0;
225 
226  float max_dist = (farthest_away - centroid4f).norm ();
227 
228  pcl::demeanPointCloud (*grid, centroid4f, *grid);
229 
230  Eigen::Matrix4f center_mat;
231  center_mat.setIdentity (4, 4);
232  center_mat (0, 3) = -centroid4f[0];
233  center_mat (1, 3) = -centroid4f[1];
234  center_mat (2, 3) = -centroid4f[2];
235 
236  Eigen::Matrix3f scatter;
237  scatter.setZero ();
238  float sum_w = 0.f;
239 
240  //for (int k = 0; k < static_cast<intgrid->points[k].getVector3fMap ();> (grid->points.size ()); k++)
241  for (int k = 0; k < static_cast<int> (indices.indices.size ()); k++)
242  {
243  Eigen::Vector3f pvector = grid->points[indices.indices[k]].getVector3fMap ();
244  float d_k = (pvector).norm ();
245  float w = (max_dist - d_k);
246  Eigen::Vector3f diff = (pvector);
247  Eigen::Matrix3f mat = diff * diff.transpose ();
248  scatter = scatter + mat * w;
249  sum_w += w;
250  }
251 
252  scatter /= sum_w;
253 
254  Eigen::JacobiSVD <Eigen::MatrixXf> svd (scatter, Eigen::ComputeFullV);
255  Eigen::Vector3f evx = svd.matrixV ().col (0);
256  Eigen::Vector3f evy = svd.matrixV ().col (1);
257  Eigen::Vector3f evz = svd.matrixV ().col (2);
258  Eigen::Vector3f evxminus = evx * -1;
259  Eigen::Vector3f evyminus = evy * -1;
260  Eigen::Vector3f evzminus = evz * -1;
261 
262  float s_xplus, s_xminus, s_yplus, s_yminus;
263  s_xplus = s_xminus = s_yplus = s_yminus = 0.f;
264 
265  //disambiguate rf using all points
266  for (int k = 0; k < static_cast<int> (grid->points.size ()); k++)
267  {
268  Eigen::Vector3f pvector = grid->points[k].getVector3fMap ();
269  float dist_x, dist_y;
270  dist_x = std::abs (evx.dot (pvector));
271  dist_y = std::abs (evy.dot (pvector));
272 
273  if ((pvector).dot (evx) >= 0)
274  s_xplus += dist_x;
275  else
276  s_xminus += dist_x;
277 
278  if ((pvector).dot (evy) >= 0)
279  s_yplus += dist_y;
280  else
281  s_yminus += dist_y;
282 
283  }
284 
285  if (s_xplus < s_xminus)
286  evx = evxminus;
287 
288  if (s_yplus < s_yminus)
289  evy = evyminus;
290 
291  //select the axis that could be disambiguated more easily
292  float fx, fy;
293  float max_x = static_cast<float> (std::max (s_xplus, s_xminus));
294  float min_x = static_cast<float> (std::min (s_xplus, s_xminus));
295  float max_y = static_cast<float> (std::max (s_yplus, s_yminus));
296  float min_y = static_cast<float> (std::min (s_yplus, s_yminus));
297 
298  fx = (min_x / max_x);
299  fy = (min_y / max_y);
300 
301  Eigen::Vector3f normal3f = Eigen::Vector3f (normal_centroid4f[0], normal_centroid4f[1], normal_centroid4f[2]);
302  if (normal3f.dot (evz) < 0)
303  evz = evzminus;
304 
305  //if fx/y close to 1, it was hard to disambiguate
306  //what if both are equally easy or difficult to disambiguate, namely fy == fx or very close
307 
308  float max_axis = std::max (fx, fy);
309  float min_axis = std::min (fx, fy);
310 
311  if ((min_axis / max_axis) > axis_ratio_)
312  {
313  PCL_WARN("Both axis are equally easy/difficult to disambiguate\n");
314 
315  Eigen::Vector3f evy_copy = evy;
316  Eigen::Vector3f evxminus = evx * -1;
317  Eigen::Vector3f evyminus = evy * -1;
318 
319  if (min_axis > min_axis_value_)
320  {
321  //combination of all possibilities
322  evy = evx.cross (evz);
323  Eigen::Matrix4f trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
324  transformations.push_back (trans);
325 
326  evx = evxminus;
327  evy = evx.cross (evz);
328  trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
329  transformations.push_back (trans);
330 
331  evx = evy_copy;
332  evy = evx.cross (evz);
333  trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
334  transformations.push_back (trans);
335 
336  evx = evyminus;
337  evy = evx.cross (evz);
338  trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
339  transformations.push_back (trans);
340 
341  }
342  else
343  {
344  //1-st case (evx selected)
345  evy = evx.cross (evz);
346  Eigen::Matrix4f trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
347  transformations.push_back (trans);
348 
349  //2-nd case (evy selected)
350  evx = evy_copy;
351  evy = evx.cross (evz);
352  trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
353  transformations.push_back (trans);
354  }
355  }
356  else
357  {
358  if (fy < fx)
359  {
360  evx = evy;
361  fx = fy;
362  }
363 
364  evy = evx.cross (evz);
365  Eigen::Matrix4f trans = createTransFromAxes (evx, evy, evz, transformPC, center_mat);
366  transformations.push_back (trans);
367 
368  }
369 
370  return true;
371 }
372 
373 //////////////////////////////////////////////////////////////////////////////////////////////
374 template<typename PointInT, typename PointNT, typename PointOutT> void
376  std::vector<pcl::PointIndices> & cluster_indices)
377 {
378  PointCloudOut ourcvfh_output;
379  for (size_t i = 0; i < centroids_dominant_orientations_.size (); i++)
380  {
381 
382  std::vector < Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> > transformations;
384  sgurf (centroids_dominant_orientations_[i], dominant_normals_[i], processed, transformations, grid, cluster_indices[i]);
385 
386  for (size_t t = 0; t < transformations.size (); t++)
387  {
388 
389  pcl::transformPointCloud (*processed, *grid, transformations[t]);
390  transforms_.push_back (transformations[t]);
391  valid_transforms_.push_back (true);
392 
393  std::vector < Eigen::VectorXf > quadrants (8);
394  int size_hists = 13;
395  int num_hists = 8;
396  for (int k = 0; k < num_hists; k++)
397  quadrants[k].setZero (size_hists);
398 
399  Eigen::Vector4f centroid_p;
400  centroid_p.setZero ();
401  Eigen::Vector4f max_pt;
402  pcl::getMaxDistance (*grid, centroid_p, max_pt);
403  max_pt[3] = 0;
404  double distance_normalization_factor = (centroid_p - max_pt).norm ();
405 
406  float hist_incr;
407  if (normalize_bins_)
408  hist_incr = 100.0f / static_cast<float> (grid->points.size () - 1);
409  else
410  hist_incr = 1.0f;
411 
412  float * weights = new float[num_hists];
413  float sigma = 0.01f; //1cm
414  float sigma_sq = sigma * sigma;
415 
416  for (int k = 0; k < static_cast<int> (grid->points.size ()); k++)
417  {
418  Eigen::Vector4f p = grid->points[k].getVector4fMap ();
419  p[3] = 0.f;
420  float d = p.norm ();
421 
422  //compute weight for all octants
423  float wx = 1.f - std::exp (-((p[0] * p[0]) / (2.f * sigma_sq))); //how is the weight distributed among two semi-cubes
424  float wy = 1.f - std::exp (-((p[1] * p[1]) / (2.f * sigma_sq)));
425  float wz = 1.f - std::exp (-((p[2] * p[2]) / (2.f * sigma_sq)));
426 
427  //distribute the weights using the x-coordinate
428  if (p[0] >= 0)
429  {
430  for (size_t ii = 0; ii <= 3; ii++)
431  weights[ii] = 0.5f - wx * 0.5f;
432 
433  for (size_t ii = 4; ii <= 7; ii++)
434  weights[ii] = 0.5f + wx * 0.5f;
435  }
436  else
437  {
438  for (size_t ii = 0; ii <= 3; ii++)
439  weights[ii] = 0.5f + wx * 0.5f;
440 
441  for (size_t ii = 4; ii <= 7; ii++)
442  weights[ii] = 0.5f - wx * 0.5f;
443  }
444 
445  //distribute the weights using the y-coordinate
446  if (p[1] >= 0)
447  {
448  for (size_t ii = 0; ii <= 1; ii++)
449  weights[ii] *= 0.5f - wy * 0.5f;
450  for (size_t ii = 4; ii <= 5; ii++)
451  weights[ii] *= 0.5f - wy * 0.5f;
452 
453  for (size_t ii = 2; ii <= 3; ii++)
454  weights[ii] *= 0.5f + wy * 0.5f;
455 
456  for (size_t ii = 6; ii <= 7; ii++)
457  weights[ii] *= 0.5f + wy * 0.5f;
458  }
459  else
460  {
461  for (size_t ii = 0; ii <= 1; ii++)
462  weights[ii] *= 0.5f + wy * 0.5f;
463  for (size_t ii = 4; ii <= 5; ii++)
464  weights[ii] *= 0.5f + wy * 0.5f;
465 
466  for (size_t ii = 2; ii <= 3; ii++)
467  weights[ii] *= 0.5f - wy * 0.5f;
468 
469  for (size_t ii = 6; ii <= 7; ii++)
470  weights[ii] *= 0.5f - wy * 0.5f;
471  }
472 
473  //distribute the weights using the z-coordinate
474  if (p[2] >= 0)
475  {
476  for (size_t ii = 0; ii <= 7; ii += 2)
477  weights[ii] *= 0.5f - wz * 0.5f;
478 
479  for (size_t ii = 1; ii <= 7; ii += 2)
480  weights[ii] *= 0.5f + wz * 0.5f;
481 
482  }
483  else
484  {
485  for (size_t ii = 0; ii <= 7; ii += 2)
486  weights[ii] *= 0.5f + wz * 0.5f;
487 
488  for (size_t ii = 1; ii <= 7; ii += 2)
489  weights[ii] *= 0.5f - wz * 0.5f;
490  }
491 
492  int h_index = static_cast<int> (std::floor (size_hists * (d / distance_normalization_factor)));
493  for (int j = 0; j < num_hists; j++)
494  quadrants[j][h_index] += hist_incr * weights[j];
495 
496  }
497 
498  //copy to the cvfh signature
499  PointCloudOut vfh_signature;
500  vfh_signature.points.resize (1);
501  vfh_signature.width = vfh_signature.height = 1;
502  for (int d = 0; d < 308; ++d)
503  vfh_signature.points[0].histogram[d] = output.points[i].histogram[d];
504 
505  int pos = 45 * 3;
506  for (int k = 0; k < num_hists; k++)
507  {
508  for (int ii = 0; ii < size_hists; ii++, pos++)
509  {
510  vfh_signature.points[0].histogram[pos] = quadrants[k][ii];
511  }
512  }
513 
514  ourcvfh_output.points.push_back (vfh_signature.points[0]);
515 
516  delete[] weights;
517  }
518  }
519 
520  output = ourcvfh_output;
521 }
522 
523 //////////////////////////////////////////////////////////////////////////////////////////////
524 template<typename PointInT, typename PointNT, typename PointOutT> void
526 {
527  if (refine_clusters_ <= 0.f)
528  refine_clusters_ = 1.f;
529 
530  // Check if input was set
531  if (!normals_)
532  {
533  PCL_ERROR ("[pcl::%s::computeFeature] No input dataset containing normals was given!\n", getClassName ().c_str ());
534  output.width = output.height = 0;
535  output.points.clear ();
536  return;
537  }
538  if (normals_->points.size () != surface_->points.size ())
539  {
540  PCL_ERROR ("[pcl::%s::computeFeature] The number of points in the input dataset differs from the number of points in the dataset containing the normals!\n", getClassName ().c_str ());
541  output.width = output.height = 0;
542  output.points.clear ();
543  return;
544  }
545 
546  centroids_dominant_orientations_.clear ();
547  clusters_.clear ();
548  transforms_.clear ();
549  dominant_normals_.clear ();
550 
551  // ---[ Step 0: remove normals with high curvature
552  std::vector<int> indices_out;
553  std::vector<int> indices_in;
554  filterNormalsWithHighCurvature (*normals_, *indices_, indices_out, indices_in, curv_threshold_);
555 
557  normals_filtered_cloud->width = static_cast<uint32_t> (indices_in.size ());
558  normals_filtered_cloud->height = 1;
559  normals_filtered_cloud->points.resize (normals_filtered_cloud->width);
560 
561  std::vector<int> indices_from_nfc_to_indices;
562  indices_from_nfc_to_indices.resize (indices_in.size ());
563 
564  for (size_t i = 0; i < indices_in.size (); ++i)
565  {
566  normals_filtered_cloud->points[i].x = surface_->points[indices_in[i]].x;
567  normals_filtered_cloud->points[i].y = surface_->points[indices_in[i]].y;
568  normals_filtered_cloud->points[i].z = surface_->points[indices_in[i]].z;
569  //normals_filtered_cloud->points[i].getNormalVector4fMap() = normals_->points[indices_in[i]].getNormalVector4fMap();
570  indices_from_nfc_to_indices[i] = indices_in[i];
571  }
572 
573  std::vector<pcl::PointIndices> clusters;
574 
575  if (normals_filtered_cloud->points.size () >= min_points_)
576  {
577  //recompute normals and use them for clustering
578  {
579  KdTreePtr normals_tree_filtered (new pcl::search::KdTree<pcl::PointNormal> (false));
580  normals_tree_filtered->setInputCloud (normals_filtered_cloud);
582  n3d.setRadiusSearch (radius_normals_);
583  n3d.setSearchMethod (normals_tree_filtered);
584  n3d.setInputCloud (normals_filtered_cloud);
585  n3d.compute (*normals_filtered_cloud);
586  }
587 
588  KdTreePtr normals_tree (new pcl::search::KdTree<pcl::PointNormal> (false));
589  normals_tree->setInputCloud (normals_filtered_cloud);
590 
591  extractEuclideanClustersSmooth (*normals_filtered_cloud, *normals_filtered_cloud, cluster_tolerance_, normals_tree, clusters,
592  eps_angle_threshold_, static_cast<unsigned int> (min_points_));
593 
594  std::vector<pcl::PointIndices> clusters_filtered;
595  int cluster_filtered_idx = 0;
596  for (size_t i = 0; i < clusters.size (); i++)
597  {
598 
600  pcl::PointIndices pi_cvfh;
601  pcl::PointIndices pi_filtered;
602 
603  clusters_.push_back (pi);
604  clusters_filtered.push_back (pi_filtered);
605 
606  Eigen::Vector4f avg_normal = Eigen::Vector4f::Zero ();
607  Eigen::Vector4f avg_centroid = Eigen::Vector4f::Zero ();
608 
609  for (size_t j = 0; j < clusters[i].indices.size (); j++)
610  {
611  avg_normal += normals_filtered_cloud->points[clusters[i].indices[j]].getNormalVector4fMap ();
612  avg_centroid += normals_filtered_cloud->points[clusters[i].indices[j]].getVector4fMap ();
613  }
614 
615  avg_normal /= static_cast<float> (clusters[i].indices.size ());
616  avg_centroid /= static_cast<float> (clusters[i].indices.size ());
617  avg_normal.normalize ();
618 
619  Eigen::Vector3f avg_norm (avg_normal[0], avg_normal[1], avg_normal[2]);
620  Eigen::Vector3f avg_dominant_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
621 
622  for (size_t j = 0; j < clusters[i].indices.size (); j++)
623  {
624  //decide if normal should be added
625  double dot_p = avg_normal.dot (normals_filtered_cloud->points[clusters[i].indices[j]].getNormalVector4fMap ());
626  if (fabs (acos (dot_p)) < (eps_angle_threshold_ * refine_clusters_))
627  {
628  clusters_[cluster_filtered_idx].indices.push_back (indices_from_nfc_to_indices[clusters[i].indices[j]]);
629  clusters_filtered[cluster_filtered_idx].indices.push_back (clusters[i].indices[j]);
630  }
631  }
632 
633  //remove last cluster if no points found...
634  if (clusters_[cluster_filtered_idx].indices.size () == 0)
635  {
636  clusters_.erase (clusters_.end ());
637  clusters_filtered.erase (clusters_filtered.end ());
638  }
639  else
640  cluster_filtered_idx++;
641  }
642 
643  clusters = clusters_filtered;
644 
645  }
646 
648  vfh.setInputCloud (surface_);
649  vfh.setInputNormals (normals_);
650  vfh.setIndices (indices_);
651  vfh.setSearchMethod (this->tree_);
652  vfh.setUseGivenNormal (true);
653  vfh.setUseGivenCentroid (true);
654  vfh.setNormalizeBins (normalize_bins_);
655  output.height = 1;
656 
657  // ---[ Step 1b : check if any dominant cluster was found
658  if (clusters.size () > 0)
659  { // ---[ Step 1b.1 : If yes, compute CVFH using the cluster information
660 
661  for (size_t i = 0; i < clusters.size (); ++i) //for each cluster
662 
663  {
664  Eigen::Vector4f avg_normal = Eigen::Vector4f::Zero ();
665  Eigen::Vector4f avg_centroid = Eigen::Vector4f::Zero ();
666 
667  for (size_t j = 0; j < clusters[i].indices.size (); j++)
668  {
669  avg_normal += normals_filtered_cloud->points[clusters[i].indices[j]].getNormalVector4fMap ();
670  avg_centroid += normals_filtered_cloud->points[clusters[i].indices[j]].getVector4fMap ();
671  }
672 
673  avg_normal /= static_cast<float> (clusters[i].indices.size ());
674  avg_centroid /= static_cast<float> (clusters[i].indices.size ());
675  avg_normal.normalize ();
676 
677  Eigen::Vector3f avg_norm (avg_normal[0], avg_normal[1], avg_normal[2]);
678  Eigen::Vector3f avg_dominant_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
679 
680  //append normal and centroid for the clusters
681  dominant_normals_.push_back (avg_norm);
682  centroids_dominant_orientations_.push_back (avg_dominant_centroid);
683  }
684 
685  //compute modified VFH for all dominant clusters and add them to the list!
686  output.points.resize (dominant_normals_.size ());
687  output.width = static_cast<uint32_t> (dominant_normals_.size ());
688 
689  for (size_t i = 0; i < dominant_normals_.size (); ++i)
690  {
691  //configure VFH computation for CVFH
692  vfh.setNormalToUse (dominant_normals_[i]);
693  vfh.setCentroidToUse (centroids_dominant_orientations_[i]);
695  vfh.compute (vfh_signature);
696  output.points[i] = vfh_signature.points[0];
697  }
698 
699  //finish filling the descriptor with the shape distribution
700  PointInTPtr cloud_input (new pcl::PointCloud<PointInT>);
701  pcl::copyPointCloud (*surface_, *indices_, *cloud_input);
702  computeRFAndShapeDistribution (cloud_input, output, clusters_); //this will set transforms_
703  }
704  else
705  { // ---[ Step 1b.1 : If no, compute a VFH using all the object points
706 
707  PCL_WARN("No clusters were found in the surface... using VFH...\n");
708  Eigen::Vector4f avg_centroid;
709  pcl::compute3DCentroid (*surface_, avg_centroid);
710  Eigen::Vector3f cloud_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
711  centroids_dominant_orientations_.push_back (cloud_centroid);
712 
713  //configure VFH computation using all object points
714  vfh.setCentroidToUse (cloud_centroid);
715  vfh.setUseGivenNormal (false);
716 
718  vfh.compute (vfh_signature);
719 
720  output.points.resize (1);
721  output.width = 1;
722 
723  output.points[0] = vfh_signature.points[0];
724  Eigen::Matrix4f id = Eigen::Matrix4f::Identity ();
725  transforms_.push_back (id);
726  valid_transforms_.push_back (false);
727  }
728 }
729 
730 #define PCL_INSTANTIATE_OURCVFHEstimation(T,NT,OutT) template class PCL_EXPORTS pcl::OURCVFHEstimation<T,NT,OutT>;
731 
732 #endif // PCL_FEATURES_IMPL_OURCVFH_H_