Point Cloud Library (PCL)  1.9.1-dev
hv_go.hpp
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2012 Aitor Aldoma, Federico Tombari
6  *
7  * All rights reserved.
8  *
9  * Redistribution and use in source and binary forms, with or without
10  * modification, are permitted provided that the following conditions
11  * are met:
12  *
13  * * Redistributions of source code must retain the above copyright
14  * notice, this list of conditions and the following disclaimer.
15  * * Redistributions in binary form must reproduce the above
16  * copyright notice, this list of conditions and the following
17  * disclaimer in the documentation and/or other materials provided
18  * with the distribution.
19  * * Neither the name of Willow Garage, Inc. nor the names of its
20  * contributors may be used to endorse or promote products derived
21  * from this software without specific prior written permission.
22  *
23  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
26  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
27  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
28  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
29  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
30  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
31  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
32  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
33  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34  * POSSIBILITY OF SUCH DAMAGE.
35  */
36 
37 #ifndef PCL_RECOGNITION_IMPL_HV_GO_HPP_
38 #define PCL_RECOGNITION_IMPL_HV_GO_HPP_
39 
40 #include <pcl/recognition/hv/hv_go.h>
41 #include <numeric>
42 #include <pcl/common/time.h>
43 #include <pcl/point_types.h>
44 
45 template<typename PointT, typename NormalT>
46 inline void extractEuclideanClustersSmooth(const typename pcl::PointCloud<PointT> &cloud, const typename pcl::PointCloud<NormalT> &normals, float tolerance,
47  const typename pcl::search::Search<PointT>::Ptr &tree, std::vector<pcl::PointIndices> &clusters, double eps_angle, float curvature_threshold,
48  unsigned int min_pts_per_cluster, unsigned int max_pts_per_cluster = (std::numeric_limits<int>::max) ())
49 {
50 
51  if (tree->getInputCloud ()->points.size () != cloud.points.size ())
52  {
53  PCL_ERROR("[pcl::extractEuclideanClusters] Tree built for a different point cloud dataset\n");
54  return;
55  }
56  if (cloud.points.size () != normals.points.size ())
57  {
58  PCL_ERROR("[pcl::extractEuclideanClusters] Number of points in the input point cloud different than normals!\n");
59  return;
60  }
61 
62  // Create a bool vector of processed point indices, and initialize it to false
63  std::vector<bool> processed (cloud.points.size (), false);
64 
65  std::vector<int> nn_indices;
66  std::vector<float> nn_distances;
67  // Process all points in the indices vector
68  int size = static_cast<int> (cloud.points.size ());
69  for (int i = 0; i < size; ++i)
70  {
71  if (processed[i])
72  continue;
73 
74  std::vector<unsigned int> seed_queue;
75  int sq_idx = 0;
76  seed_queue.push_back (i);
77 
78  processed[i] = true;
79 
80  while (sq_idx < static_cast<int> (seed_queue.size ()))
81  {
82 
83  if (normals.points[seed_queue[sq_idx]].curvature > curvature_threshold)
84  {
85  sq_idx++;
86  continue;
87  }
88 
89  // Search for sq_idx
90  if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances))
91  {
92  sq_idx++;
93  continue;
94  }
95 
96  for (size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx
97  {
98  if (processed[nn_indices[j]]) // Has this point been processed before ?
99  continue;
100 
101  if (normals.points[nn_indices[j]].curvature > curvature_threshold)
102  {
103  continue;
104  }
105 
106  //processed[nn_indices[j]] = true;
107  // [-1;1]
108 
109  double dot_p = normals.points[seed_queue[sq_idx]].normal[0] * normals.points[nn_indices[j]].normal[0]
110  + normals.points[seed_queue[sq_idx]].normal[1] * normals.points[nn_indices[j]].normal[1]
111  + normals.points[seed_queue[sq_idx]].normal[2] * normals.points[nn_indices[j]].normal[2];
112 
113  if (fabs (acos (dot_p)) < eps_angle)
114  {
115  processed[nn_indices[j]] = true;
116  seed_queue.push_back (nn_indices[j]);
117  }
118  }
119 
120  sq_idx++;
121  }
122 
123  // If this queue is satisfactory, add to the clusters
124  if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster)
125  {
127  r.indices.resize (seed_queue.size ());
128  for (size_t j = 0; j < seed_queue.size (); ++j)
129  r.indices[j] = seed_queue[j];
130 
131  std::sort (r.indices.begin (), r.indices.end ());
132  r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ());
133 
134  r.header = cloud.header;
135  clusters.push_back (r); // We could avoid a copy by working directly in the vector
136  }
137  }
138 }
139 
140 template<typename ModelT, typename SceneT>
141 mets::gol_type pcl::GlobalHypothesesVerification<ModelT, SceneT>::evaluateSolution(const std::vector<bool> & active, int changed)
142 {
143  float sign = 1.f;
144  //update explained_by_RM
145  if (active[changed])
146  {
147  //it has been activated
148  updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_,
149  explained_by_RM_distance_weighted, 1.f);
150  updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights,
151  unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, 1.f);
152  updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, 1.f);
153  } else
154  {
155  //it has been deactivated
156  updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_,
157  explained_by_RM_distance_weighted, -1.f);
158  updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights,
159  unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, -1.f);
160  updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, -1.f);
161  sign = -1.f;
162  }
163 
164  int duplicity = getDuplicity ();
165  float good_info = getExplainedValue ();
166 
167  float unexplained_info = getPreviousUnexplainedValue ();
168  float bad_info = static_cast<float> (getPreviousBadInfo ())
169  + (recognition_models_[changed]->outliers_weight_ * static_cast<float> (recognition_models_[changed]->bad_information_)) * sign;
170 
171  setPreviousBadInfo (bad_info);
172 
173  int n_active_hyp = 0;
174  for(const bool &i : active) {
175  if(i)
176  n_active_hyp++;
177  }
178 
179  float duplicity_cm = static_cast<float> (getDuplicityCM ()) * w_occupied_multiple_cm_;
180  return static_cast<mets::gol_type> ((good_info - bad_info - static_cast<float> (duplicity) - unexplained_info - duplicity_cm - static_cast<float> (n_active_hyp)) * -1.f); //return the dual to our max problem
181 }
182 
183 ///////////////////////////////////////////////////////////////////////////////////////////////////
184 template<typename ModelT, typename SceneT>
186 {
187  //clear stuff
188  recognition_models_.clear ();
189  unexplained_by_RM_neighboorhods.clear ();
190  explained_by_RM_distance_weighted.clear ();
191  explained_by_RM_.clear ();
192  mask_.clear ();
193  indices_.clear (),
194  complete_cloud_occupancy_by_RM_.clear ();
195 
196  // initialize mask to false
197  mask_.resize (complete_models_.size ());
198  for (size_t i = 0; i < complete_models_.size (); i++)
199  mask_[i] = false;
200 
201  indices_.resize (complete_models_.size ());
202 
203  NormalEstimator_ n3d;
204  scene_normals_.reset (new pcl::PointCloud<pcl::Normal> ());
205 
207  normals_tree->setInputCloud (scene_cloud_downsampled_);
208 
209  n3d.setRadiusSearch (radius_normals_);
210  n3d.setSearchMethod (normals_tree);
211  n3d.setInputCloud (scene_cloud_downsampled_);
212  n3d.compute (*scene_normals_);
213 
214  //check nans...
215  int j = 0;
216  for (size_t i = 0; i < scene_normals_->points.size (); ++i)
217  {
218  if (!std::isfinite (scene_normals_->points[i].normal_x) || !std::isfinite (scene_normals_->points[i].normal_y)
219  || !std::isfinite (scene_normals_->points[i].normal_z))
220  continue;
221 
222  scene_normals_->points[j] = scene_normals_->points[i];
223  scene_cloud_downsampled_->points[j] = scene_cloud_downsampled_->points[i];
224 
225  j++;
226  }
227 
228  scene_normals_->points.resize (j);
229  scene_normals_->width = j;
230  scene_normals_->height = 1;
231 
232  scene_cloud_downsampled_->points.resize (j);
233  scene_cloud_downsampled_->width = j;
234  scene_cloud_downsampled_->height = 1;
235 
236  explained_by_RM_.resize (scene_cloud_downsampled_->points.size (), 0);
237  explained_by_RM_distance_weighted.resize (scene_cloud_downsampled_->points.size (), 0.f);
238  unexplained_by_RM_neighboorhods.resize (scene_cloud_downsampled_->points.size (), 0.f);
239 
240  //compute segmentation of the scene if detect_clutter_
241  if (detect_clutter_)
242  {
243  //initialize kdtree for search
244  scene_downsampled_tree_.reset (new pcl::search::KdTree<SceneT>);
245  scene_downsampled_tree_->setInputCloud (scene_cloud_downsampled_);
246 
247  std::vector<pcl::PointIndices> clusters;
248  double eps_angle_threshold = 0.2;
249  int min_points = 20;
250  float curvature_threshold = 0.045f;
251 
252  extractEuclideanClustersSmooth<SceneT, pcl::Normal> (*scene_cloud_downsampled_, *scene_normals_, inliers_threshold_ * 2.f, scene_downsampled_tree_,
253  clusters, eps_angle_threshold, curvature_threshold, min_points);
254 
255  clusters_cloud_.reset (new pcl::PointCloud<pcl::PointXYZI>);
256  clusters_cloud_->points.resize (scene_cloud_downsampled_->points.size ());
257  clusters_cloud_->width = scene_cloud_downsampled_->width;
258  clusters_cloud_->height = 1;
259 
260  for (size_t i = 0; i < scene_cloud_downsampled_->points.size (); i++)
261  {
262  pcl::PointXYZI p;
263  p.getVector3fMap () = scene_cloud_downsampled_->points[i].getVector3fMap ();
264  p.intensity = 0.f;
265  clusters_cloud_->points[i] = p;
266  }
267 
268  float intens_incr = 100.f / static_cast<float> (clusters.size ());
269  float intens = intens_incr;
270  for (const auto &cluster : clusters)
271  {
272  for (const auto &vertex : cluster.indices)
273  {
274  clusters_cloud_->points[vertex].intensity = intens;
275  }
276 
277  intens += intens_incr;
278  }
279  }
280 
281  //compute cues
282  {
283  pcl::ScopeTime tcues ("Computing cues");
284  recognition_models_.resize (complete_models_.size ());
285  int valid = 0;
286  for (int i = 0; i < static_cast<int> (complete_models_.size ()); i++)
287  {
288  //create recognition model
289  recognition_models_[valid].reset (new RecognitionModel ());
290  if(addModel (visible_models_[i], complete_models_[i], recognition_models_[valid])) {
291  indices_[valid] = i;
292  valid++;
293  }
294  }
295 
296  recognition_models_.resize(valid);
297  indices_.resize(valid);
298  }
299 
300  //compute the bounding boxes for the models
301  ModelT min_pt_all, max_pt_all;
302  min_pt_all.x = min_pt_all.y = min_pt_all.z = std::numeric_limits<float>::max ();
303  max_pt_all.x = max_pt_all.y = max_pt_all.z = (std::numeric_limits<float>::max () - 0.001f) * -1;
304 
305  for (size_t i = 0; i < recognition_models_.size (); i++)
306  {
307  ModelT min_pt, max_pt;
308  pcl::getMinMax3D (*complete_models_[indices_[i]], min_pt, max_pt);
309  if (min_pt.x < min_pt_all.x)
310  min_pt_all.x = min_pt.x;
311 
312  if (min_pt.y < min_pt_all.y)
313  min_pt_all.y = min_pt.y;
314 
315  if (min_pt.z < min_pt_all.z)
316  min_pt_all.z = min_pt.z;
317 
318  if (max_pt.x > max_pt_all.x)
319  max_pt_all.x = max_pt.x;
320 
321  if (max_pt.y > max_pt_all.y)
322  max_pt_all.y = max_pt.y;
323 
324  if (max_pt.z > max_pt_all.z)
325  max_pt_all.z = max_pt.z;
326  }
327 
328  int size_x, size_y, size_z;
329  size_x = static_cast<int> (std::ceil (std::abs (max_pt_all.x - min_pt_all.x) / res_occupancy_grid_)) + 1;
330  size_y = static_cast<int> (std::ceil (std::abs (max_pt_all.y - min_pt_all.y) / res_occupancy_grid_)) + 1;
331  size_z = static_cast<int> (std::ceil (std::abs (max_pt_all.z - min_pt_all.z) / res_occupancy_grid_)) + 1;
332 
333  complete_cloud_occupancy_by_RM_.resize (size_x * size_y * size_z, 0);
334 
335  for (size_t i = 0; i < recognition_models_.size (); i++)
336  {
337 
338  std::map<int, bool> banned;
339  std::map<int, bool>::iterator banned_it;
340 
341  for (size_t j = 0; j < complete_models_[indices_[i]]->points.size (); j++)
342  {
343  int pos_x, pos_y, pos_z;
344  pos_x = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].x - min_pt_all.x) / res_occupancy_grid_));
345  pos_y = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].y - min_pt_all.y) / res_occupancy_grid_));
346  pos_z = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].z - min_pt_all.z) / res_occupancy_grid_));
347 
348  int idx = pos_z * size_x * size_y + pos_y * size_x + pos_x;
349  banned_it = banned.find (idx);
350  if (banned_it == banned.end ())
351  {
352  complete_cloud_occupancy_by_RM_[idx]++;
353  recognition_models_[i]->complete_cloud_occupancy_indices_.push_back (idx);
354  banned[idx] = true;
355  }
356  }
357  }
358 
359  {
360  pcl::ScopeTime tcues ("Computing clutter cues");
361 #pragma omp parallel for schedule(dynamic, 4) num_threads(omp_get_num_procs())
362  for (int j = 0; j < static_cast<int> (recognition_models_.size ()); j++)
363  computeClutterCue (recognition_models_[j]);
364  }
365 
366  cc_.clear ();
367  n_cc_ = 1;
368  cc_.resize (n_cc_);
369  for (size_t i = 0; i < recognition_models_.size (); i++)
370  cc_[0].push_back (static_cast<int> (i));
371 
372 }
373 
374 template<typename ModelT, typename SceneT>
375 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::SAOptimize(std::vector<int> & cc_indices, std::vector<bool> & initial_solution)
376 {
377 
378  //temporal copy of recogniton_models_
379  std::vector<RecognitionModelPtr> recognition_models_copy;
380  recognition_models_copy = recognition_models_;
381 
382  recognition_models_.clear ();
383 
384  for (const int &cc_index : cc_indices)
385  {
386  recognition_models_.push_back (recognition_models_copy[cc_index]);
387  }
388 
389  for (size_t j = 0; j < recognition_models_.size (); j++)
390  {
391  RecognitionModelPtr recog_model = recognition_models_[j];
392  for (size_t i = 0; i < recog_model->explained_.size (); i++)
393  {
394  explained_by_RM_[recog_model->explained_[i]]++;
395  explained_by_RM_distance_weighted[recog_model->explained_[i]] += recog_model->explained_distances_[i];
396  }
397 
398  if (detect_clutter_)
399  {
400  for (size_t i = 0; i < recog_model->unexplained_in_neighborhood.size (); i++)
401  {
402  unexplained_by_RM_neighboorhods[recog_model->unexplained_in_neighborhood[i]] += recog_model->unexplained_in_neighborhood_weights[i];
403  }
404  }
405  }
406 
407  int occupied_multiple = 0;
408  for(size_t i=0; i < complete_cloud_occupancy_by_RM_.size(); i++) {
409  if(complete_cloud_occupancy_by_RM_[i] > 1) {
410  occupied_multiple+=complete_cloud_occupancy_by_RM_[i];
411  }
412  }
413 
414  setPreviousDuplicityCM(occupied_multiple);
415  //do optimization
416  //Define model SAModel, initial solution is all models activated
417 
418  int duplicity;
419  float good_information_ = getTotalExplainedInformation (explained_by_RM_, explained_by_RM_distance_weighted, &duplicity);
420  float bad_information_ = 0;
421  float unexplained_in_neighboorhod = getUnexplainedInformationInNeighborhood (unexplained_by_RM_neighboorhods, explained_by_RM_);
422 
423  for (size_t i = 0; i < initial_solution.size (); i++)
424  {
425  if (initial_solution[i])
426  bad_information_ += recognition_models_[i]->outliers_weight_ * static_cast<float> (recognition_models_[i]->bad_information_);
427  }
428 
429  setPreviousExplainedValue (good_information_);
430  setPreviousDuplicity (duplicity);
431  setPreviousBadInfo (bad_information_);
432  setPreviousUnexplainedValue (unexplained_in_neighboorhod);
433 
434  SAModel model;
435  model.cost_ = static_cast<mets::gol_type> ((good_information_ - bad_information_
436  - static_cast<float> (duplicity)
437  - static_cast<float> (occupied_multiple) * w_occupied_multiple_cm_
438  - static_cast<float> (recognition_models_.size ())
439  - unexplained_in_neighboorhod) * -1.f);
440 
441  model.setSolution (initial_solution);
442  model.setOptimizer (this);
443  SAModel best (model);
444 
445  move_manager neigh (static_cast<int> (cc_indices.size ()));
446 
447  mets::best_ever_solution best_recorder (best);
448  mets::noimprove_termination_criteria noimprove (max_iterations_);
449  mets::linear_cooling linear_cooling;
450  mets::simulated_annealing<move_manager> sa (model, best_recorder, neigh, noimprove, linear_cooling, initial_temp_, 1e-7, 2);
451  sa.setApplyAndEvaluate(true);
452 
453  {
454  pcl::ScopeTime t ("SA search...");
455  sa.search ();
456  }
457 
458  best_seen_ = static_cast<const SAModel&> (best_recorder.best_seen ());
459  for (size_t i = 0; i < best_seen_.solution_.size (); i++)
460  {
461  initial_solution[i] = best_seen_.solution_[i];
462  }
463 
464  recognition_models_ = recognition_models_copy;
465 
466 }
467 
468 ///////////////////////////////////////////////////////////////////////////////////////////////////
469 template<typename ModelT, typename SceneT>
471 {
472  initialize ();
473 
474  //for each connected component, find the optimal solution
475  for (int c = 0; c < n_cc_; c++)
476  {
477  //TODO: Check for trivial case...
478  //TODO: Check also the number of hypotheses and use exhaustive enumeration if smaller than 10
479  std::vector<bool> subsolution (cc_[c].size (), true);
480  SAOptimize (cc_[c], subsolution);
481  for (size_t i = 0; i < subsolution.size (); i++)
482  {
483  mask_[indices_[cc_[c][i]]] = (subsolution[i]);
484  }
485  }
486 }
487 
488 template<typename ModelT, typename SceneT>
490  typename pcl::PointCloud<ModelT>::ConstPtr & complete_model, RecognitionModelPtr & recog_model)
491 {
492  //voxelize model cloud
493  recog_model->cloud_.reset (new pcl::PointCloud<ModelT> ());
494  recog_model->complete_cloud_.reset (new pcl::PointCloud<ModelT> ());
495 
496  float size_model = resolution_;
497  pcl::VoxelGrid<ModelT> voxel_grid;
498  voxel_grid.setInputCloud (model);
499  voxel_grid.setLeafSize (size_model, size_model, size_model);
500  voxel_grid.filter (*(recog_model->cloud_));
501 
502  pcl::VoxelGrid<ModelT> voxel_grid2;
503  voxel_grid2.setInputCloud (complete_model);
504  voxel_grid2.setLeafSize (size_model, size_model, size_model);
505  voxel_grid2.filter (*(recog_model->complete_cloud_));
506 
507  {
508  //check nans...
509  int j = 0;
510  for (size_t i = 0; i < recog_model->cloud_->points.size (); ++i)
511  {
512  if (!std::isfinite (recog_model->cloud_->points[i].x) || !std::isfinite (recog_model->cloud_->points[i].y)
513  || !std::isfinite (recog_model->cloud_->points[i].z))
514  continue;
515 
516  recog_model->cloud_->points[j] = recog_model->cloud_->points[i];
517  j++;
518  }
519 
520  recog_model->cloud_->points.resize (j);
521  recog_model->cloud_->width = j;
522  recog_model->cloud_->height = 1;
523  }
524 
525  if (recog_model->cloud_->points.empty ())
526  {
527  PCL_WARN("The model cloud has no points..\n");
528  return false;
529  }
530 
531  //compute normals unless given (now do it always...)
534  recog_model->normals_.reset (new pcl::PointCloud<pcl::Normal> ());
535  normals_tree->setInputCloud (recog_model->cloud_);
536  n3d.setRadiusSearch (radius_normals_);
537  n3d.setSearchMethod (normals_tree);
538  n3d.setInputCloud ((recog_model->cloud_));
539  n3d.compute (*(recog_model->normals_));
540 
541  //check nans...
542  int j = 0;
543  for (size_t i = 0; i < recog_model->normals_->points.size (); ++i)
544  {
545  if (!std::isfinite (recog_model->normals_->points[i].normal_x) || !std::isfinite (recog_model->normals_->points[i].normal_y)
546  || !std::isfinite (recog_model->normals_->points[i].normal_z))
547  continue;
548 
549  recog_model->normals_->points[j] = recog_model->normals_->points[i];
550  recog_model->cloud_->points[j] = recog_model->cloud_->points[i];
551  j++;
552  }
553 
554  recog_model->normals_->points.resize (j);
555  recog_model->normals_->width = j;
556  recog_model->normals_->height = 1;
557 
558  recog_model->cloud_->points.resize (j);
559  recog_model->cloud_->width = j;
560  recog_model->cloud_->height = 1;
561 
562  std::vector<int> explained_indices;
563  std::vector<float> outliers_weight;
564  std::vector<float> explained_indices_distances;
565  std::vector<float> unexplained_indices_weights;
566 
567  std::vector<int> nn_indices;
568  std::vector<float> nn_distances;
569 
570  std::map<int, boost::shared_ptr<std::vector<std::pair<int, float> > > > model_explains_scene_points; //which point i from the scene is explained by a points j_k with dist d_k from the model
571 
572  outliers_weight.resize (recog_model->cloud_->points.size ());
573  recog_model->outlier_indices_.resize (recog_model->cloud_->points.size ());
574 
575  size_t o = 0;
576  for (size_t i = 0; i < recog_model->cloud_->points.size (); i++)
577  {
578  if (!scene_downsampled_tree_->radiusSearch (recog_model->cloud_->points[i], inliers_threshold_, nn_indices, nn_distances, std::numeric_limits<int>::max ()))
579  {
580  //outlier
581  outliers_weight[o] = regularizer_;
582  recog_model->outlier_indices_[o] = static_cast<int> (i);
583  o++;
584  } else
585  {
586  for (size_t k = 0; k < nn_distances.size (); k++)
587  {
588  std::pair<int, float> pair = std::make_pair (i, nn_distances[k]); //i is a index to a model point and then distance
589  auto it = model_explains_scene_points.find (nn_indices[k]);
590  if (it == model_explains_scene_points.end ())
591  {
592  boost::shared_ptr < std::vector<std::pair<int, float> > > vec (new std::vector<std::pair<int, float> > ());
593  vec->push_back (pair);
594  model_explains_scene_points[nn_indices[k]] = vec;
595  } else
596  {
597  it->second->push_back (pair);
598  }
599  }
600  }
601  }
602 
603  outliers_weight.resize (o);
604  recog_model->outlier_indices_.resize (o);
605 
606  recog_model->outliers_weight_ = (std::accumulate (outliers_weight.begin (), outliers_weight.end (), 0.f) / static_cast<float> (outliers_weight.size ()));
607  if (outliers_weight.empty ())
608  recog_model->outliers_weight_ = 1.f;
609 
610  pcl::IndicesPtr indices_scene (new std::vector<int>);
611  //go through the map and keep the closest model point in case that several model points explain a scene point
612 
613  int p = 0;
614 
615  for (auto it = model_explains_scene_points.cbegin (); it != model_explains_scene_points.cend (); it++, p++)
616  {
617  size_t closest = 0;
618  float min_d = std::numeric_limits<float>::min ();
619  for (size_t i = 0; i < it->second->size (); i++)
620  {
621  if (it->second->at (i).second > min_d)
622  {
623  min_d = it->second->at (i).second;
624  closest = i;
625  }
626  }
627 
628  float d = it->second->at (closest).second;
629  float d_weight = -(d * d / (inliers_threshold_)) + 1;
630 
631  //it->first is index to scene point
632  //using normals to weight inliers
633  Eigen::Vector3f scene_p_normal = scene_normals_->points[it->first].getNormalVector3fMap ();
634  Eigen::Vector3f model_p_normal = recog_model->normals_->points[it->second->at (closest).first].getNormalVector3fMap ();
635  float dotp = scene_p_normal.dot (model_p_normal) * 1.f; //[-1,1] from antiparallel trough perpendicular to parallel
636 
637  if (dotp < 0.f)
638  dotp = 0.f;
639 
640  explained_indices.push_back (it->first);
641  explained_indices_distances.push_back (d_weight * dotp);
642 
643  }
644 
645  recog_model->bad_information_ = static_cast<int> (recog_model->outlier_indices_.size ());
646  recog_model->explained_ = explained_indices;
647  recog_model->explained_distances_ = explained_indices_distances;
648 
649  return true;
650 }
651 
652 template<typename ModelT, typename SceneT>
653 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::computeClutterCue(RecognitionModelPtr & recog_model)
654 {
655  if (detect_clutter_)
656  {
657 
658  float rn_sqr = radius_neighborhood_GO_ * radius_neighborhood_GO_;
659  std::vector<int> nn_indices;
660  std::vector<float> nn_distances;
661 
662  std::vector < std::pair<int, int> > neighborhood_indices; //first is indices to scene point and second is indices to explained_ scene points
663  for (int i = 0; i < static_cast<int> (recog_model->explained_.size ()); i++)
664  {
665  if (scene_downsampled_tree_->radiusSearch (scene_cloud_downsampled_->points[recog_model->explained_[i]], radius_neighborhood_GO_, nn_indices,
666  nn_distances, std::numeric_limits<int>::max ()))
667  {
668  for (size_t k = 0; k < nn_distances.size (); k++)
669  {
670  if (nn_indices[k] != i)
671  neighborhood_indices.emplace_back (nn_indices[k], i);
672  }
673  }
674  }
675 
676  //sort neighborhood indices by id
677  std::sort (neighborhood_indices.begin (), neighborhood_indices.end (),
678  boost::bind (&std::pair<int, int>::first, _1) < boost::bind (&std::pair<int, int>::first, _2));
679 
680  //erase duplicated unexplained points
681  neighborhood_indices.erase (
682  std::unique (neighborhood_indices.begin (), neighborhood_indices.end (),
683  boost::bind (&std::pair<int, int>::first, _1) == boost::bind (&std::pair<int, int>::first, _2)), neighborhood_indices.end ());
684 
685  //sort explained points
686  std::vector<int> exp_idces (recog_model->explained_);
687  std::sort (exp_idces.begin (), exp_idces.end ());
688 
689  recog_model->unexplained_in_neighborhood.resize (neighborhood_indices.size ());
690  recog_model->unexplained_in_neighborhood_weights.resize (neighborhood_indices.size ());
691 
692  size_t p = 0;
693  size_t j = 0;
694  for (const auto &neighborhood_index : neighborhood_indices)
695  {
696  if ((j < exp_idces.size ()) && (neighborhood_index.first == exp_idces[j]))
697  {
698  //this index is explained by the hypothesis so ignore it, advance j
699  j++;
700  } else
701  {
702  //indices_in_nb[i] < exp_idces[j]
703  //recog_model->unexplained_in_neighborhood.push_back(neighborhood_indices[i]);
704  recog_model->unexplained_in_neighborhood[p] = neighborhood_index.first;
705 
706  if (clusters_cloud_->points[recog_model->explained_[neighborhood_index.second]].intensity != 0.f
707  && (clusters_cloud_->points[recog_model->explained_[neighborhood_index.second]].intensity
708  == clusters_cloud_->points[neighborhood_index.first].intensity))
709  {
710 
711  recog_model->unexplained_in_neighborhood_weights[p] = clutter_regularizer_;
712 
713  } else
714  {
715  //neighborhood_indices[i].first gives the index to the scene point and second to the explained scene point by the model causing this...
716  //calculate weight of this clutter point based on the distance of the scene point and the model point causing it
717  float d = static_cast<float> (pow (
718  (scene_cloud_downsampled_->points[recog_model->explained_[neighborhood_index.second]].getVector3fMap ()
719  - scene_cloud_downsampled_->points[neighborhood_index.first].getVector3fMap ()).norm (), 2));
720  float d_weight = -(d / rn_sqr) + 1; //points that are close have a strong weight*/
721 
722  //using normals to weight clutter points
723  Eigen::Vector3f scene_p_normal = scene_normals_->points[neighborhood_index.first].getNormalVector3fMap ();
724  Eigen::Vector3f model_p_normal = scene_normals_->points[recog_model->explained_[neighborhood_index.second]].getNormalVector3fMap ();
725  float dotp = scene_p_normal.dot (model_p_normal); //[-1,1] from antiparallel trough perpendicular to parallel
726 
727  if (dotp < 0)
728  dotp = 0.f;
729 
730  recog_model->unexplained_in_neighborhood_weights[p] = d_weight * dotp;
731  }
732  p++;
733  }
734  }
735 
736  recog_model->unexplained_in_neighborhood_weights.resize (p);
737  recog_model->unexplained_in_neighborhood.resize (p);
738  }
739 }
740 
741 #define PCL_INSTANTIATE_GoHV(T1,T2) template class PCL_EXPORTS pcl::GlobalHypothesesVerification<T1,T2>;
742 
743 #endif /* PCL_RECOGNITION_IMPL_HV_GO_HPP_ */
744 
std::vector< PointT, Eigen::aligned_allocator< PointT > > points
The point data.
Definition: point_cloud.h:409
boost::shared_ptr< std::vector< int > > IndicesPtr
Definition: pcl_base.h:60
virtual int radiusSearch(const PointT &point, double radius, std::vector< int > &k_indices, std::vector< float > &k_sqr_distances, unsigned int max_nn=0) const =0
Search for all the nearest neighbors of the query point in a given radius.
Class to measure the time spent in a scope.
Definition: time.h:110
std::vector< int > indices
Definition: PointIndices.h:19
void setRadiusSearch(double radius)
Set the sphere radius that is to be used for determining the nearest neighbors used for the feature e...
Definition: feature.h:199
void filter(PointCloud &output)
Calls the filtering method and returns the filtered dataset in output.
Definition: filter.h:130
VoxelGrid assembles a local 3D grid over a given PointCloud, and downsamples + filters the data...
Definition: voxel_grid.h:177
NormalEstimation estimates local surface properties (surface normals and curvatures)at each 3D point...
Definition: normal_3d.h:241
boost::shared_ptr< KdTree< PointT, Tree > > Ptr
Definition: kdtree.h:78
boost::shared_ptr< pcl::search::Search< PointT > > Ptr
Definition: search.h:80
void setInputCloud(const PointCloudConstPtr &cloud, const IndicesConstPtr &indices=IndicesConstPtr()) override
Provide a pointer to the input dataset.
Definition: kdtree.hpp:77
void getMinMax3D(const pcl::PointCloud< PointT > &cloud, PointT &min_pt, PointT &max_pt)
Get the minimum and maximum values on each of the 3 (x-y-z) dimensions in a given pointcloud...
Definition: common.hpp:242
::pcl::PCLHeader header
Definition: PointIndices.h:17
boost::shared_ptr< const PointCloud< PointT > > ConstPtr
Definition: point_cloud.h:428
virtual PointCloudConstPtr getInputCloud() const
Get a pointer to the input point cloud dataset.
Definition: search.h:124
PointCloud represents the base class in PCL for storing collections of 3D points. ...
pcl::PCLHeader header
The point cloud header.
Definition: point_cloud.h:406
void setSearchMethod(const KdTreePtr &tree)
Provide a pointer to the search object.
Definition: feature.h:165
virtual void setInputCloud(const PointCloudConstPtr &cloud)
Provide a pointer to the input dataset.
Definition: pcl_base.hpp:65
void compute(PointCloudOut &output)
Base method for feature estimation for all points given in <setInputCloud (), setIndices ()> using th...
Definition: feature.hpp:189
A hypothesis verification method proposed in "A Global Hypotheses Verification Method for 3D Object R...
Definition: hv_go.h:32
void setInputCloud(const PointCloudConstPtr &cloud) override
Provide a pointer to the input dataset.
Definition: normal_3d.h:330
void setLeafSize(const Eigen::Vector4f &leaf_size)
Set the voxel grid leaf size.
Definition: voxel_grid.h:222