Point Cloud Library (PCL)  1.9.1-dev
hv_go.hpp
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2012 Aitor Aldoma, Federico Tombari
6  *
7  * All rights reserved.
8  *
9  * Redistribution and use in source and binary forms, with or without
10  * modification, are permitted provided that the following conditions
11  * are met:
12  *
13  * * Redistributions of source code must retain the above copyright
14  * notice, this list of conditions and the following disclaimer.
15  * * Redistributions in binary form must reproduce the above
16  * copyright notice, this list of conditions and the following
17  * disclaimer in the documentation and/or other materials provided
18  * with the distribution.
19  * * Neither the name of Willow Garage, Inc. nor the names of its
20  * contributors may be used to endorse or promote products derived
21  * from this software without specific prior written permission.
22  *
23  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
26  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
27  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
28  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
29  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
30  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
31  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
32  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
33  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34  * POSSIBILITY OF SUCH DAMAGE.
35  */
36 
37 #ifndef PCL_RECOGNITION_IMPL_HV_GO_HPP_
38 #define PCL_RECOGNITION_IMPL_HV_GO_HPP_
39 
40 #include <pcl/recognition/hv/hv_go.h>
41 #include <pcl/common/time.h>
42 #include <pcl/point_types.h>
43 
44 #include <memory>
45 #include <numeric>
46 
47 template<typename PointT, typename NormalT>
48 inline void extractEuclideanClustersSmooth(const typename pcl::PointCloud<PointT> &cloud, const typename pcl::PointCloud<NormalT> &normals, float tolerance,
49  const typename pcl::search::Search<PointT>::Ptr &tree, std::vector<pcl::PointIndices> &clusters, double eps_angle, float curvature_threshold,
50  unsigned int min_pts_per_cluster, unsigned int max_pts_per_cluster = (std::numeric_limits<int>::max) ())
51 {
52 
53  if (tree->getInputCloud ()->points.size () != cloud.points.size ())
54  {
55  PCL_ERROR("[pcl::extractEuclideanClusters] Tree built for a different point cloud dataset\n");
56  return;
57  }
58  if (cloud.points.size () != normals.points.size ())
59  {
60  PCL_ERROR("[pcl::extractEuclideanClusters] Number of points in the input point cloud different than normals!\n");
61  return;
62  }
63 
64  // Create a bool vector of processed point indices, and initialize it to false
65  std::vector<bool> processed (cloud.points.size (), false);
66 
67  std::vector<int> nn_indices;
68  std::vector<float> nn_distances;
69  // Process all points in the indices vector
70  int size = static_cast<int> (cloud.points.size ());
71  for (int i = 0; i < size; ++i)
72  {
73  if (processed[i])
74  continue;
75 
76  std::vector<unsigned int> seed_queue;
77  int sq_idx = 0;
78  seed_queue.push_back (i);
79 
80  processed[i] = true;
81 
82  while (sq_idx < static_cast<int> (seed_queue.size ()))
83  {
84 
85  if (normals.points[seed_queue[sq_idx]].curvature > curvature_threshold)
86  {
87  sq_idx++;
88  continue;
89  }
90 
91  // Search for sq_idx
92  if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances))
93  {
94  sq_idx++;
95  continue;
96  }
97 
98  for (size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx
99  {
100  if (processed[nn_indices[j]]) // Has this point been processed before ?
101  continue;
102 
103  if (normals.points[nn_indices[j]].curvature > curvature_threshold)
104  {
105  continue;
106  }
107 
108  //processed[nn_indices[j]] = true;
109  // [-1;1]
110 
111  double dot_p = normals.points[seed_queue[sq_idx]].normal[0] * normals.points[nn_indices[j]].normal[0]
112  + normals.points[seed_queue[sq_idx]].normal[1] * normals.points[nn_indices[j]].normal[1]
113  + normals.points[seed_queue[sq_idx]].normal[2] * normals.points[nn_indices[j]].normal[2];
114 
115  if (std::abs (std::acos (dot_p)) < eps_angle)
116  {
117  processed[nn_indices[j]] = true;
118  seed_queue.push_back (nn_indices[j]);
119  }
120  }
121 
122  sq_idx++;
123  }
124 
125  // If this queue is satisfactory, add to the clusters
126  if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster)
127  {
129  r.indices.resize (seed_queue.size ());
130  for (size_t j = 0; j < seed_queue.size (); ++j)
131  r.indices[j] = seed_queue[j];
132 
133  std::sort (r.indices.begin (), r.indices.end ());
134  r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ());
135 
136  r.header = cloud.header;
137  clusters.push_back (r); // We could avoid a copy by working directly in the vector
138  }
139  }
140 }
141 
142 template<typename ModelT, typename SceneT>
143 mets::gol_type pcl::GlobalHypothesesVerification<ModelT, SceneT>::evaluateSolution(const std::vector<bool> & active, int changed)
144 {
145  float sign = 1.f;
146  //update explained_by_RM
147  if (active[changed])
148  {
149  //it has been activated
150  updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_,
151  explained_by_RM_distance_weighted, 1.f);
152  updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights,
153  unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, 1.f);
154  updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, 1.f);
155  } else
156  {
157  //it has been deactivated
158  updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_,
159  explained_by_RM_distance_weighted, -1.f);
160  updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights,
161  unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, -1.f);
162  updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, -1.f);
163  sign = -1.f;
164  }
165 
166  int duplicity = getDuplicity ();
167  float good_info = getExplainedValue ();
168 
169  float unexplained_info = getPreviousUnexplainedValue ();
170  float bad_info = static_cast<float> (getPreviousBadInfo ())
171  + (recognition_models_[changed]->outliers_weight_ * static_cast<float> (recognition_models_[changed]->bad_information_)) * sign;
172 
173  setPreviousBadInfo (bad_info);
174 
175  int n_active_hyp = 0;
176  for(const bool &i : active) {
177  if(i)
178  n_active_hyp++;
179  }
180 
181  float duplicity_cm = static_cast<float> (getDuplicityCM ()) * w_occupied_multiple_cm_;
182  return static_cast<mets::gol_type> ((good_info - bad_info - static_cast<float> (duplicity) - unexplained_info - duplicity_cm - static_cast<float> (n_active_hyp)) * -1.f); //return the dual to our max problem
183 }
184 
185 ///////////////////////////////////////////////////////////////////////////////////////////////////
186 template<typename ModelT, typename SceneT>
188 {
189  //clear stuff
190  recognition_models_.clear ();
191  unexplained_by_RM_neighboorhods.clear ();
192  explained_by_RM_distance_weighted.clear ();
193  explained_by_RM_.clear ();
194  mask_.clear ();
195  indices_.clear (),
196  complete_cloud_occupancy_by_RM_.clear ();
197 
198  // initialize mask to false
199  mask_.resize (complete_models_.size ());
200  for (size_t i = 0; i < complete_models_.size (); i++)
201  mask_[i] = false;
202 
203  indices_.resize (complete_models_.size ());
204 
205  NormalEstimator_ n3d;
206  scene_normals_.reset (new pcl::PointCloud<pcl::Normal> ());
207 
209  normals_tree->setInputCloud (scene_cloud_downsampled_);
210 
211  n3d.setRadiusSearch (radius_normals_);
212  n3d.setSearchMethod (normals_tree);
213  n3d.setInputCloud (scene_cloud_downsampled_);
214  n3d.compute (*scene_normals_);
215 
216  //check nans...
217  int j = 0;
218  for (size_t i = 0; i < scene_normals_->points.size (); ++i)
219  {
220  if (!std::isfinite (scene_normals_->points[i].normal_x) || !std::isfinite (scene_normals_->points[i].normal_y)
221  || !std::isfinite (scene_normals_->points[i].normal_z))
222  continue;
223 
224  scene_normals_->points[j] = scene_normals_->points[i];
225  scene_cloud_downsampled_->points[j] = scene_cloud_downsampled_->points[i];
226 
227  j++;
228  }
229 
230  scene_normals_->points.resize (j);
231  scene_normals_->width = j;
232  scene_normals_->height = 1;
233 
234  scene_cloud_downsampled_->points.resize (j);
235  scene_cloud_downsampled_->width = j;
236  scene_cloud_downsampled_->height = 1;
237 
238  explained_by_RM_.resize (scene_cloud_downsampled_->points.size (), 0);
239  explained_by_RM_distance_weighted.resize (scene_cloud_downsampled_->points.size (), 0.f);
240  unexplained_by_RM_neighboorhods.resize (scene_cloud_downsampled_->points.size (), 0.f);
241 
242  //compute segmentation of the scene if detect_clutter_
243  if (detect_clutter_)
244  {
245  //initialize kdtree for search
246  scene_downsampled_tree_.reset (new pcl::search::KdTree<SceneT>);
247  scene_downsampled_tree_->setInputCloud (scene_cloud_downsampled_);
248 
249  std::vector<pcl::PointIndices> clusters;
250  double eps_angle_threshold = 0.2;
251  int min_points = 20;
252  float curvature_threshold = 0.045f;
253 
254  extractEuclideanClustersSmooth<SceneT, pcl::Normal> (*scene_cloud_downsampled_, *scene_normals_, inliers_threshold_ * 2.f, scene_downsampled_tree_,
255  clusters, eps_angle_threshold, curvature_threshold, min_points);
256 
257  clusters_cloud_.reset (new pcl::PointCloud<pcl::PointXYZI>);
258  clusters_cloud_->points.resize (scene_cloud_downsampled_->points.size ());
259  clusters_cloud_->width = scene_cloud_downsampled_->width;
260  clusters_cloud_->height = 1;
261 
262  for (size_t i = 0; i < scene_cloud_downsampled_->points.size (); i++)
263  {
264  pcl::PointXYZI p;
265  p.getVector3fMap () = scene_cloud_downsampled_->points[i].getVector3fMap ();
266  p.intensity = 0.f;
267  clusters_cloud_->points[i] = p;
268  }
269 
270  float intens_incr = 100.f / static_cast<float> (clusters.size ());
271  float intens = intens_incr;
272  for (const auto &cluster : clusters)
273  {
274  for (const auto &vertex : cluster.indices)
275  {
276  clusters_cloud_->points[vertex].intensity = intens;
277  }
278 
279  intens += intens_incr;
280  }
281  }
282 
283  //compute cues
284  {
285  pcl::ScopeTime tcues ("Computing cues");
286  recognition_models_.resize (complete_models_.size ());
287  int valid = 0;
288  for (int i = 0; i < static_cast<int> (complete_models_.size ()); i++)
289  {
290  //create recognition model
291  recognition_models_[valid].reset (new RecognitionModel ());
292  if(addModel (visible_models_[i], complete_models_[i], recognition_models_[valid])) {
293  indices_[valid] = i;
294  valid++;
295  }
296  }
297 
298  recognition_models_.resize(valid);
299  indices_.resize(valid);
300  }
301 
302  //compute the bounding boxes for the models
303  ModelT min_pt_all, max_pt_all;
304  min_pt_all.x = min_pt_all.y = min_pt_all.z = std::numeric_limits<float>::max ();
305  max_pt_all.x = max_pt_all.y = max_pt_all.z = (std::numeric_limits<float>::max () - 0.001f) * -1;
306 
307  for (size_t i = 0; i < recognition_models_.size (); i++)
308  {
309  ModelT min_pt, max_pt;
310  pcl::getMinMax3D (*complete_models_[indices_[i]], min_pt, max_pt);
311  if (min_pt.x < min_pt_all.x)
312  min_pt_all.x = min_pt.x;
313 
314  if (min_pt.y < min_pt_all.y)
315  min_pt_all.y = min_pt.y;
316 
317  if (min_pt.z < min_pt_all.z)
318  min_pt_all.z = min_pt.z;
319 
320  if (max_pt.x > max_pt_all.x)
321  max_pt_all.x = max_pt.x;
322 
323  if (max_pt.y > max_pt_all.y)
324  max_pt_all.y = max_pt.y;
325 
326  if (max_pt.z > max_pt_all.z)
327  max_pt_all.z = max_pt.z;
328  }
329 
330  int size_x, size_y, size_z;
331  size_x = static_cast<int> (std::ceil (std::abs (max_pt_all.x - min_pt_all.x) / res_occupancy_grid_)) + 1;
332  size_y = static_cast<int> (std::ceil (std::abs (max_pt_all.y - min_pt_all.y) / res_occupancy_grid_)) + 1;
333  size_z = static_cast<int> (std::ceil (std::abs (max_pt_all.z - min_pt_all.z) / res_occupancy_grid_)) + 1;
334 
335  complete_cloud_occupancy_by_RM_.resize (size_x * size_y * size_z, 0);
336 
337  for (size_t i = 0; i < recognition_models_.size (); i++)
338  {
339 
340  std::map<int, bool> banned;
341  std::map<int, bool>::iterator banned_it;
342 
343  for (size_t j = 0; j < complete_models_[indices_[i]]->points.size (); j++)
344  {
345  int pos_x, pos_y, pos_z;
346  pos_x = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].x - min_pt_all.x) / res_occupancy_grid_));
347  pos_y = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].y - min_pt_all.y) / res_occupancy_grid_));
348  pos_z = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].z - min_pt_all.z) / res_occupancy_grid_));
349 
350  int idx = pos_z * size_x * size_y + pos_y * size_x + pos_x;
351  banned_it = banned.find (idx);
352  if (banned_it == banned.end ())
353  {
354  complete_cloud_occupancy_by_RM_[idx]++;
355  recognition_models_[i]->complete_cloud_occupancy_indices_.push_back (idx);
356  banned[idx] = true;
357  }
358  }
359  }
360 
361  {
362  pcl::ScopeTime tcues ("Computing clutter cues");
363 #pragma omp parallel for schedule(dynamic, 4) num_threads(omp_get_num_procs())
364  for (int j = 0; j < static_cast<int> (recognition_models_.size ()); j++)
365  computeClutterCue (recognition_models_[j]);
366  }
367 
368  cc_.clear ();
369  n_cc_ = 1;
370  cc_.resize (n_cc_);
371  for (size_t i = 0; i < recognition_models_.size (); i++)
372  cc_[0].push_back (static_cast<int> (i));
373 
374 }
375 
376 template<typename ModelT, typename SceneT>
377 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::SAOptimize(std::vector<int> & cc_indices, std::vector<bool> & initial_solution)
378 {
379 
380  //temporal copy of recogniton_models_
381  std::vector<RecognitionModelPtr> recognition_models_copy;
382  recognition_models_copy = recognition_models_;
383 
384  recognition_models_.clear ();
385 
386  for (const int &cc_index : cc_indices)
387  {
388  recognition_models_.push_back (recognition_models_copy[cc_index]);
389  }
390 
391  for (size_t j = 0; j < recognition_models_.size (); j++)
392  {
393  RecognitionModelPtr recog_model = recognition_models_[j];
394  for (size_t i = 0; i < recog_model->explained_.size (); i++)
395  {
396  explained_by_RM_[recog_model->explained_[i]]++;
397  explained_by_RM_distance_weighted[recog_model->explained_[i]] += recog_model->explained_distances_[i];
398  }
399 
400  if (detect_clutter_)
401  {
402  for (size_t i = 0; i < recog_model->unexplained_in_neighborhood.size (); i++)
403  {
404  unexplained_by_RM_neighboorhods[recog_model->unexplained_in_neighborhood[i]] += recog_model->unexplained_in_neighborhood_weights[i];
405  }
406  }
407  }
408 
409  int occupied_multiple = 0;
410  for(size_t i=0; i < complete_cloud_occupancy_by_RM_.size(); i++) {
411  if(complete_cloud_occupancy_by_RM_[i] > 1) {
412  occupied_multiple+=complete_cloud_occupancy_by_RM_[i];
413  }
414  }
415 
416  setPreviousDuplicityCM(occupied_multiple);
417  //do optimization
418  //Define model SAModel, initial solution is all models activated
419 
420  int duplicity;
421  float good_information_ = getTotalExplainedInformation (explained_by_RM_, explained_by_RM_distance_weighted, &duplicity);
422  float bad_information_ = 0;
423  float unexplained_in_neighboorhod = getUnexplainedInformationInNeighborhood (unexplained_by_RM_neighboorhods, explained_by_RM_);
424 
425  for (size_t i = 0; i < initial_solution.size (); i++)
426  {
427  if (initial_solution[i])
428  bad_information_ += recognition_models_[i]->outliers_weight_ * static_cast<float> (recognition_models_[i]->bad_information_);
429  }
430 
431  setPreviousExplainedValue (good_information_);
432  setPreviousDuplicity (duplicity);
433  setPreviousBadInfo (bad_information_);
434  setPreviousUnexplainedValue (unexplained_in_neighboorhod);
435 
436  SAModel model;
437  model.cost_ = static_cast<mets::gol_type> ((good_information_ - bad_information_
438  - static_cast<float> (duplicity)
439  - static_cast<float> (occupied_multiple) * w_occupied_multiple_cm_
440  - static_cast<float> (recognition_models_.size ())
441  - unexplained_in_neighboorhod) * -1.f);
442 
443  model.setSolution (initial_solution);
444  model.setOptimizer (this);
445  SAModel best (model);
446 
447  move_manager neigh (static_cast<int> (cc_indices.size ()));
448 
449  mets::best_ever_solution best_recorder (best);
450  mets::noimprove_termination_criteria noimprove (max_iterations_);
451  mets::linear_cooling linear_cooling;
452  mets::simulated_annealing<move_manager> sa (model, best_recorder, neigh, noimprove, linear_cooling, initial_temp_, 1e-7, 2);
453  sa.setApplyAndEvaluate(true);
454 
455  {
456  pcl::ScopeTime t ("SA search...");
457  sa.search ();
458  }
459 
460  best_seen_ = static_cast<const SAModel&> (best_recorder.best_seen ());
461  for (size_t i = 0; i < best_seen_.solution_.size (); i++)
462  {
463  initial_solution[i] = best_seen_.solution_[i];
464  }
465 
466  recognition_models_ = recognition_models_copy;
467 
468 }
469 
470 ///////////////////////////////////////////////////////////////////////////////////////////////////
471 template<typename ModelT, typename SceneT>
473 {
474  initialize ();
475 
476  //for each connected component, find the optimal solution
477  for (int c = 0; c < n_cc_; c++)
478  {
479  //TODO: Check for trivial case...
480  //TODO: Check also the number of hypotheses and use exhaustive enumeration if smaller than 10
481  std::vector<bool> subsolution (cc_[c].size (), true);
482  SAOptimize (cc_[c], subsolution);
483  for (size_t i = 0; i < subsolution.size (); i++)
484  {
485  mask_[indices_[cc_[c][i]]] = (subsolution[i]);
486  }
487  }
488 }
489 
490 template<typename ModelT, typename SceneT>
492  typename pcl::PointCloud<ModelT>::ConstPtr & complete_model, RecognitionModelPtr & recog_model)
493 {
494  //voxelize model cloud
495  recog_model->cloud_.reset (new pcl::PointCloud<ModelT> ());
496  recog_model->complete_cloud_.reset (new pcl::PointCloud<ModelT> ());
497 
498  float size_model = resolution_;
499  pcl::VoxelGrid<ModelT> voxel_grid;
500  voxel_grid.setInputCloud (model);
501  voxel_grid.setLeafSize (size_model, size_model, size_model);
502  voxel_grid.filter (*(recog_model->cloud_));
503 
504  pcl::VoxelGrid<ModelT> voxel_grid2;
505  voxel_grid2.setInputCloud (complete_model);
506  voxel_grid2.setLeafSize (size_model, size_model, size_model);
507  voxel_grid2.filter (*(recog_model->complete_cloud_));
508 
509  {
510  //check nans...
511  int j = 0;
512  for (size_t i = 0; i < recog_model->cloud_->points.size (); ++i)
513  {
514  if (!std::isfinite (recog_model->cloud_->points[i].x) || !std::isfinite (recog_model->cloud_->points[i].y)
515  || !std::isfinite (recog_model->cloud_->points[i].z))
516  continue;
517 
518  recog_model->cloud_->points[j] = recog_model->cloud_->points[i];
519  j++;
520  }
521 
522  recog_model->cloud_->points.resize (j);
523  recog_model->cloud_->width = j;
524  recog_model->cloud_->height = 1;
525  }
526 
527  if (recog_model->cloud_->points.empty ())
528  {
529  PCL_WARN("The model cloud has no points..\n");
530  return false;
531  }
532 
533  //compute normals unless given (now do it always...)
536  recog_model->normals_.reset (new pcl::PointCloud<pcl::Normal> ());
537  normals_tree->setInputCloud (recog_model->cloud_);
538  n3d.setRadiusSearch (radius_normals_);
539  n3d.setSearchMethod (normals_tree);
540  n3d.setInputCloud ((recog_model->cloud_));
541  n3d.compute (*(recog_model->normals_));
542 
543  //check nans...
544  int j = 0;
545  for (size_t i = 0; i < recog_model->normals_->points.size (); ++i)
546  {
547  if (!std::isfinite (recog_model->normals_->points[i].normal_x) || !std::isfinite (recog_model->normals_->points[i].normal_y)
548  || !std::isfinite (recog_model->normals_->points[i].normal_z))
549  continue;
550 
551  recog_model->normals_->points[j] = recog_model->normals_->points[i];
552  recog_model->cloud_->points[j] = recog_model->cloud_->points[i];
553  j++;
554  }
555 
556  recog_model->normals_->points.resize (j);
557  recog_model->normals_->width = j;
558  recog_model->normals_->height = 1;
559 
560  recog_model->cloud_->points.resize (j);
561  recog_model->cloud_->width = j;
562  recog_model->cloud_->height = 1;
563 
564  std::vector<int> explained_indices;
565  std::vector<float> outliers_weight;
566  std::vector<float> explained_indices_distances;
567  std::vector<float> unexplained_indices_weights;
568 
569  std::vector<int> nn_indices;
570  std::vector<float> nn_distances;
571 
572  std::map<int, std::shared_ptr<std::vector<std::pair<int, float>>>> model_explains_scene_points; //which point i from the scene is explained by a points j_k with dist d_k from the model
573 
574  outliers_weight.resize (recog_model->cloud_->points.size ());
575  recog_model->outlier_indices_.resize (recog_model->cloud_->points.size ());
576 
577  size_t o = 0;
578  for (size_t i = 0; i < recog_model->cloud_->points.size (); i++)
579  {
580  if (!scene_downsampled_tree_->radiusSearch (recog_model->cloud_->points[i], inliers_threshold_, nn_indices, nn_distances, std::numeric_limits<int>::max ()))
581  {
582  //outlier
583  outliers_weight[o] = regularizer_;
584  recog_model->outlier_indices_[o] = static_cast<int> (i);
585  o++;
586  } else
587  {
588  for (size_t k = 0; k < nn_distances.size (); k++)
589  {
590  std::pair<int, float> pair = std::make_pair (i, nn_distances[k]); //i is a index to a model point and then distance
591  auto it = model_explains_scene_points.find (nn_indices[k]);
592  if (it == model_explains_scene_points.end ())
593  {
594  std::shared_ptr<std::vector<std::pair<int, float>>> vec (new std::vector<std::pair<int, float>> ());
595  vec->push_back (pair);
596  model_explains_scene_points[nn_indices[k]] = vec;
597  } else
598  {
599  it->second->push_back (pair);
600  }
601  }
602  }
603  }
604 
605  outliers_weight.resize (o);
606  recog_model->outlier_indices_.resize (o);
607 
608  recog_model->outliers_weight_ = (std::accumulate (outliers_weight.begin (), outliers_weight.end (), 0.f) / static_cast<float> (outliers_weight.size ()));
609  if (outliers_weight.empty ())
610  recog_model->outliers_weight_ = 1.f;
611 
612  pcl::IndicesPtr indices_scene (new std::vector<int>);
613  //go through the map and keep the closest model point in case that several model points explain a scene point
614 
615  int p = 0;
616 
617  for (auto it = model_explains_scene_points.cbegin (); it != model_explains_scene_points.cend (); it++, p++)
618  {
619  size_t closest = 0;
620  float min_d = std::numeric_limits<float>::min ();
621  for (size_t i = 0; i < it->second->size (); i++)
622  {
623  if (it->second->at (i).second > min_d)
624  {
625  min_d = it->second->at (i).second;
626  closest = i;
627  }
628  }
629 
630  float d = it->second->at (closest).second;
631  float d_weight = -(d * d / (inliers_threshold_)) + 1;
632 
633  //it->first is index to scene point
634  //using normals to weight inliers
635  Eigen::Vector3f scene_p_normal = scene_normals_->points[it->first].getNormalVector3fMap ();
636  Eigen::Vector3f model_p_normal = recog_model->normals_->points[it->second->at (closest).first].getNormalVector3fMap ();
637  float dotp = scene_p_normal.dot (model_p_normal) * 1.f; //[-1,1] from antiparallel trough perpendicular to parallel
638 
639  if (dotp < 0.f)
640  dotp = 0.f;
641 
642  explained_indices.push_back (it->first);
643  explained_indices_distances.push_back (d_weight * dotp);
644 
645  }
646 
647  recog_model->bad_information_ = static_cast<int> (recog_model->outlier_indices_.size ());
648  recog_model->explained_ = explained_indices;
649  recog_model->explained_distances_ = explained_indices_distances;
650 
651  return true;
652 }
653 
654 template<typename ModelT, typename SceneT>
655 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::computeClutterCue(RecognitionModelPtr & recog_model)
656 {
657  if (detect_clutter_)
658  {
659 
660  float rn_sqr = radius_neighborhood_GO_ * radius_neighborhood_GO_;
661  std::vector<int> nn_indices;
662  std::vector<float> nn_distances;
663 
664  std::vector < std::pair<int, int> > neighborhood_indices; //first is indices to scene point and second is indices to explained_ scene points
665  for (int i = 0; i < static_cast<int> (recog_model->explained_.size ()); i++)
666  {
667  if (scene_downsampled_tree_->radiusSearch (scene_cloud_downsampled_->points[recog_model->explained_[i]], radius_neighborhood_GO_, nn_indices,
668  nn_distances, std::numeric_limits<int>::max ()))
669  {
670  for (size_t k = 0; k < nn_distances.size (); k++)
671  {
672  if (nn_indices[k] != i)
673  neighborhood_indices.emplace_back (nn_indices[k], i);
674  }
675  }
676  }
677 
678  //sort neighborhood indices by id
679  std::sort (neighborhood_indices.begin (), neighborhood_indices.end (),
680  [] (const auto& p1, const auto& p2) { return p1.first < p2.first; });
681 
682  //erase duplicated unexplained points
683  neighborhood_indices.erase (
684  std::unique (neighborhood_indices.begin (), neighborhood_indices.end (),
685  [] (const auto& p1, const auto& p2) { return p1.first == p2.first; }), neighborhood_indices.end ());
686 
687  //sort explained points
688  std::vector<int> exp_idces (recog_model->explained_);
689  std::sort (exp_idces.begin (), exp_idces.end ());
690 
691  recog_model->unexplained_in_neighborhood.resize (neighborhood_indices.size ());
692  recog_model->unexplained_in_neighborhood_weights.resize (neighborhood_indices.size ());
693 
694  size_t p = 0;
695  size_t j = 0;
696  for (const auto &neighborhood_index : neighborhood_indices)
697  {
698  if ((j < exp_idces.size ()) && (neighborhood_index.first == exp_idces[j]))
699  {
700  //this index is explained by the hypothesis so ignore it, advance j
701  j++;
702  } else
703  {
704  //indices_in_nb[i] < exp_idces[j]
705  //recog_model->unexplained_in_neighborhood.push_back(neighborhood_indices[i]);
706  recog_model->unexplained_in_neighborhood[p] = neighborhood_index.first;
707 
708  if (clusters_cloud_->points[recog_model->explained_[neighborhood_index.second]].intensity != 0.f
709  && (clusters_cloud_->points[recog_model->explained_[neighborhood_index.second]].intensity
710  == clusters_cloud_->points[neighborhood_index.first].intensity))
711  {
712 
713  recog_model->unexplained_in_neighborhood_weights[p] = clutter_regularizer_;
714 
715  } else
716  {
717  //neighborhood_indices[i].first gives the index to the scene point and second to the explained scene point by the model causing this...
718  //calculate weight of this clutter point based on the distance of the scene point and the model point causing it
719  float d = static_cast<float> (pow (
720  (scene_cloud_downsampled_->points[recog_model->explained_[neighborhood_index.second]].getVector3fMap ()
721  - scene_cloud_downsampled_->points[neighborhood_index.first].getVector3fMap ()).norm (), 2));
722  float d_weight = -(d / rn_sqr) + 1; //points that are close have a strong weight*/
723 
724  //using normals to weight clutter points
725  Eigen::Vector3f scene_p_normal = scene_normals_->points[neighborhood_index.first].getNormalVector3fMap ();
726  Eigen::Vector3f model_p_normal = scene_normals_->points[recog_model->explained_[neighborhood_index.second]].getNormalVector3fMap ();
727  float dotp = scene_p_normal.dot (model_p_normal); //[-1,1] from antiparallel trough perpendicular to parallel
728 
729  if (dotp < 0)
730  dotp = 0.f;
731 
732  recog_model->unexplained_in_neighborhood_weights[p] = d_weight * dotp;
733  }
734  p++;
735  }
736  }
737 
738  recog_model->unexplained_in_neighborhood_weights.resize (p);
739  recog_model->unexplained_in_neighborhood.resize (p);
740  }
741 }
742 
743 #define PCL_INSTANTIATE_GoHV(T1,T2) template class PCL_EXPORTS pcl::GlobalHypothesesVerification<T1,T2>;
744 
745 #endif /* PCL_RECOGNITION_IMPL_HV_GO_HPP_ */
746 
std::vector< PointT, Eigen::aligned_allocator< PointT > > points
The point data.
Definition: point_cloud.h:411
virtual int radiusSearch(const PointT &point, double radius, std::vector< int > &k_indices, std::vector< float > &k_sqr_distances, unsigned int max_nn=0) const =0
Search for all the nearest neighbors of the query point in a given radius.
Class to measure the time spent in a scope.
Definition: time.h:110
std::vector< int > indices
Definition: PointIndices.h:19
void setRadiusSearch(double radius)
Set the sphere radius that is to be used for determining the nearest neighbors used for the feature e...
Definition: feature.h:200
void filter(PointCloud &output)
Calls the filtering method and returns the filtered dataset in output.
Definition: filter.h:130
VoxelGrid assembles a local 3D grid over a given PointCloud, and downsamples + filters the data...
Definition: voxel_grid.h:177
NormalEstimation estimates local surface properties (surface normals and curvatures)at each 3D point...
Definition: normal_3d.h:242
boost::shared_ptr< Indices > IndicesPtr
Definition: pcl_base.h:61
void setInputCloud(const PointCloudConstPtr &cloud, const IndicesConstPtr &indices=IndicesConstPtr()) override
Provide a pointer to the input dataset.
Definition: kdtree.hpp:77
boost::shared_ptr< KdTree< PointT, Tree > > Ptr
Definition: kdtree.h:78
void getMinMax3D(const pcl::PointCloud< PointT > &cloud, PointT &min_pt, PointT &max_pt)
Get the minimum and maximum values on each of the 3 (x-y-z) dimensions in a given pointcloud...
Definition: common.hpp:242
::pcl::PCLHeader header
Definition: PointIndices.h:17
virtual PointCloudConstPtr getInputCloud() const
Get a pointer to the input point cloud dataset.
Definition: search.h:124
PointCloud represents the base class in PCL for storing collections of 3D points. ...
pcl::PCLHeader header
The point cloud header.
Definition: point_cloud.h:408
void setSearchMethod(const KdTreePtr &tree)
Provide a pointer to the search object.
Definition: feature.h:166
boost::shared_ptr< const PointCloud< PointT > > ConstPtr
Definition: point_cloud.h:430
virtual void setInputCloud(const PointCloudConstPtr &cloud)
Provide a pointer to the input dataset.
Definition: pcl_base.hpp:65
void compute(PointCloudOut &output)
Base method for feature estimation for all points given in <setInputCloud (), setIndices ()> using th...
Definition: feature.hpp:192
A hypothesis verification method proposed in "A Global Hypotheses Verification Method for 3D Object R...
Definition: hv_go.h:34
boost::shared_ptr< pcl::search::Search< PointT > > Ptr
Definition: search.h:80
void setInputCloud(const PointCloudConstPtr &cloud) override
Provide a pointer to the input dataset.
Definition: normal_3d.h:331
void setLeafSize(const Eigen::Vector4f &leaf_size)
Set the voxel grid leaf size.
Definition: voxel_grid.h:222