Harris3D
pcl的原理Harris3d详见:链接
更加准确的Harris3d论文:《Harris 3D: a robust extension of the Harris operator for interest
point detection on 3D meshes》
- 核心代码
pcl::HarrisKeypoint3D<PointType,OUTPointType> detector;//<输入点云类型,输出类型>
detector.setNonMaxSupression(true);//开启极大值抑制
detector.setRadiusSearch(resolution);//搜索半径
detector.setThreshold(1e-7); //阈值
detector.setInputCloud(cloud);
detector.setSearchMethod(tree);
- 实例
#include <pcl/point_cloud.h>
#include <pcl/keypoints/harris_3d.h>
#include <pcl/point_types.h>
#include <pcl/io/pcd_io.h>
#include <pcl/search/flann_search.h>
#include <pcl/octree/octree_search.h>
#include <pcl/octree/octree.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/io/ply_io.h>
#include <chrono>
#define PointType pcl::PointXYZ
#define OUTPointType pcl::PointXYZI
using namespace std;
using namespace pcl;
boost::shared_ptr<pcl::visualization::PCLVisualizer> RGB_viwer(pcl::PointCloud<PointType>::Ptr cloud,
pcl::PointCloud<OUTPointType>::Ptr cloud1){
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer(new pcl::visualization::PCLVisualizer("3D Viewer"));
int v1(0);
viewer->createViewPort(0.0, 0.0, 0.5, 1.0, v1);
viewer->addPointCloud<PointType>(cloud,"simple cloud",v1);
//设置点云属性
viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE,3,"simple cloud",v1);
int v2(0);
viewer->createViewPort(0.5, 0.0, 1, 1.0, v2);
pcl::visualization::PointCloudColorHandlerCustom<OUTPointType> single_color1(cloud1,0,255,0);
viewer->addPointCloud<OUTPointType>(cloud1,single_color1,"XYZ add color1", v2);
viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE,3,"XYZ add color1", v2);
viewer->addCoordinateSystem(1.0);
return (viewer);
};
using std::chrono::system_clock;
int main(int argc, const char** argv) {
PointCloud<PointType>::Ptr cloud(new PointCloud<PointType>() );
PointCloud<OUTPointType>::Ptr cloudout(new PointCloud<OUTPointType>() );
search::KdTree<PointType>::Ptr tree(new search::KdTree<PointType>());
//io::loadPCDFile("/home/n1/notes/pcl/harris3d/test.pcd",*cloud);
io::loadPLYFile("/home/n1/notes/pcl/harris3d/init.ply",*cloud);
cout<<"点云大小:"<<cloud->size()<<endl;
//cout<<"点云大小:"<<cloud->size()<<endl;
double resolution=0.7;//分辨率
pcl::HarrisKeypoint3D<PointType,OUTPointType> detector;//<输入点云类型,输出类型>
detector.setNonMaxSupression(true);//开启极大值抑制
detector.setRadiusSearch(resolution);//搜索半径
detector.setThreshold(1e-7); //阈值
detector.setInputCloud(cloud);
detector.setSearchMethod(tree);
system_clock::time_point t1 = system_clock::now();
detector.compute(*cloudout);
system_clock::time_point t2 = system_clock::now();
std::chrono::duration<double, std::milli> time_diff = std::chrono::duration<double,std::milli>(t2-t1);
cout<<"点云大小:"<<cloudout->size()<<endl;
cout<<"运行时间:"<<time_diff.count()<<endl;
//可视化
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer ("3D Viewer"));
while (!viewer->wasStopped()){
viewer=RGB_viwer(cloud,cloudout);
resolution+=0.01;
detector.setRadiusSearch(resolution);//搜索半径
detector.compute(*cloudout);
cout<<"点云大小:"<<cloudout->size()<<endl;
viewer->spinOnce (10000);
boost::this_thread::sleep (boost::posix_time::microseconds (300000));
}
return 0;
}
Narf3D
在边缘提取使用的就是narf关键点,详见边缘提取
- 核心代码:
Eigen::Affine3f sensor_pose= (Eigen::Affine3f)Eigen::Translation3f(0.0f, 0.0f, 0.0f);
float noise_level=0.0;
float min_range=0.0;
int border_size=1;
std::cout<<"1:"<<endl;
pcl::RangeImage::CoordinateFrame coordinate_frame=pcl::RangeImage::CAMERA_FRAME;
boost::shared_ptr<pcl::RangeImage> range_image_ptr(new pcl::RangeImage());
float angulareslution=(float)(0.5f*(M_PI/180.0f)); //1度
range_image_ptr->createFromPointCloud(*cloud,angulareslution,pcl::deg2rad(360.0f),pcl::deg2rad(180.0f),
sensor_pose,coordinate_frame,noise_level,min_range,border_size);
range_image_ptr->integrateFarRanges(far_ranges);
int support_size=1;
pcl::RangeImageBorderExtractor narfextrator;//特征点
pcl::NarfKeypoint narf_keyPoints_detector(&narfextrator);//初始化描述子
pcl::RangeImage& range_image = *range_image_ptr;
narf_keyPoints_detector.setRangeImage(&range_image); //设置输入点云
narf_keyPoints_detector.getParameters().support_size=support_size;//设置支撑平面大小
narf_keyPoints_detector.getParameters().max_no_of_interest_points=-1;//返回最大关键点个数
narf_keyPoints_detector.getParameters().min_distance_between_interest_points=0.08;//两个关键点之间的最小距离 min_distance_between_interest_points*support_size
narf_keyPoints_detector.getParameters().optimal_distance_to_high_surface_change=0.10;//变化大小optimal_distance_to_high_surface_change*support_size
narf_keyPoints_detector.getParameters().min_interest_value=0.30;//考虑作为关键的最小值
narf_keyPoints_detector.getParameters().optimal_range_image_patch_size=10;//提取块大小
narf_keyPoints_detector.getParameters().distance_for_additional_points=10;//低于该范围叶会提取关键点包多小于min_interest_value distance_for_additional_points*support_size
narf_keyPoints_detector.getParameters().add_points_on_straight_edges=false;//作当为true 时位于straight edges的关键点也会备选取
narf_keyPoints_detector.getParameters().do_non_maximum_suppression=true;//极大值抑制
narf_keyPoints_detector.getParameters().no_of_polynomial_approximations_per_point=true;//使用双变量函数拟合求解关键点
narf_keyPoints_detector.getParameters().max_no_of_threads=1;//求解操作的最大线程数
narf_keyPoints_detector.getParameters().use_recursive_scale_reduction=false;//使用降采样提高提取速度
narf_keyPoints_detector.getParameters().calculate_sparse_interest_image=true;//是否开启启发式算法提取关键点
pcl::PointCloud<int> keypoint_indices; //索引值
narf_keyPoints_detector.compute(keypoint_indices);
- 实例
#include <pcl/keypoints/narf_keypoint.h>
#include <pcl/io/pcd_io.h>
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <pcl/range_image/range_image.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/features/range_image_border_extractor.h>
#include <pcl/keypoints/narf_keypoint.h>
#include <pcl/filters/statistical_outlier_removal.h>
#include <Eigen/Eigen>
using namespace std;
using namespace pcl;
typedef pcl::PointXYZ PointType;
boost::shared_ptr<pcl::visualization::PCLVisualizer> RGB_viwer(pcl::PointCloud<PointType>::Ptr cloud,
pcl::PointCloud<PointType>::Ptr cloud1
){
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer(new pcl::visualization::PCLVisualizer("3D Viewer"));
int v1(0);
viewer->addPointCloud<PointType>(cloud,"simple cloud");
//设置点云属性
viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE,1,"simple cloud");
pcl::visualization::PointCloudColorHandlerCustom<PointType> single_color1(cloud1,0,255,0);
viewer->addPointCloud<PointType>(cloud1,single_color1,"XYZ add color1");
viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE,3,"XYZ add color1");
return (viewer);
}
int main(int argc, const char** argv) {
PointCloud<PointType>::Ptr cloud(new PointCloud<PointType>());
pcl::PointCloud<pcl::PointWithViewpoint> far_ranges;
io::loadPCDFile("/home/n1/notes/pcl/narf3d/test.pcd",*cloud);
io::loadPCDFile("/home/n1/notes/pcl/narf3d/test.pcd",far_ranges);
StatisticalOutlierRemoval<PointXYZ> SOR;// 初始化对像
SOR.setInputCloud(cloud);
SOR.setMeanK(50); ///最临近距离求取均值点的个数
SOR.setStddevMulThresh(1); //设置阈值 阈值=均值+设置系数*标准差 小于阈值为内点,大于阈值为离群点
SOR.filter(*cloud);
Eigen::Affine3f sensor_pose= (Eigen::Affine3f)Eigen::Translation3f(0.0f, 0.0f, 0.0f);
float noise_level=0.0;
float min_range=0.0;
int border_size=1;
std::cout<<"1:"<<endl;
pcl::RangeImage::CoordinateFrame coordinate_frame=pcl::RangeImage::CAMERA_FRAME;
boost::shared_ptr<pcl::RangeImage> range_image_ptr(new pcl::RangeImage());
float angulareslution=(float)(0.5f*(M_PI/180.0f)); //1度
range_image_ptr->createFromPointCloud(*cloud,angulareslution,pcl::deg2rad(360.0f),pcl::deg2rad(180.0f),
sensor_pose,coordinate_frame,noise_level,min_range,border_size);
range_image_ptr->integrateFarRanges(far_ranges);
int support_size=1;
pcl::RangeImageBorderExtractor narfextrator;//特征点
pcl::NarfKeypoint narf_keyPoints_detector(&narfextrator);//初始化描述子
pcl::RangeImage& range_image = *range_image_ptr;
narf_keyPoints_detector.setRangeImage(&range_image); //设置输入点云
narf_keyPoints_detector.getParameters().support_size=support_size;//设置支撑平面大小
narf_keyPoints_detector.getParameters().max_no_of_interest_points=-1;//返回最大关键点个数
narf_keyPoints_detector.getParameters().min_distance_between_interest_points=0.08;//两个关键点之间的最小距离 min_distance_between_interest_points*support_size
narf_keyPoints_detector.getParameters().optimal_distance_to_high_surface_change=0.10;//变化大小optimal_distance_to_high_surface_change*support_size
narf_keyPoints_detector.getParameters().min_interest_value=0.30;//考虑作为关键的最小值
narf_keyPoints_detector.getParameters().optimal_range_image_patch_size=10;//提取块大小
narf_keyPoints_detector.getParameters().distance_for_additional_points=10;//低于该范围叶会提取关键点包多小于min_interest_value distance_for_additional_points*support_size
narf_keyPoints_detector.getParameters().add_points_on_straight_edges=false;//作当为true 时位于straight edges的关键点也会备选取
narf_keyPoints_detector.getParameters().do_non_maximum_suppression=true;//极大值抑制
narf_keyPoints_detector.getParameters().no_of_polynomial_approximations_per_point=true;//使用双变量函数拟合求解关键点
narf_keyPoints_detector.getParameters().max_no_of_threads=1;//求解操作的最大线程数
narf_keyPoints_detector.getParameters().use_recursive_scale_reduction=false;//使用降采样提高提取速度
narf_keyPoints_detector.getParameters().calculate_sparse_interest_image=true;//是否开启启发式算法提取关键点
pcl::PointCloud<int> keypoint_indices; //索引值
narf_keyPoints_detector.compute(keypoint_indices);
std::cout<<"keypoint_indices.size():"<<keypoint_indices.size()<<endl;
pcl::PointCloud<PointType>::Ptr keypoints_ptr(new pcl::PointCloud<PointType>());//特征点
keypoints_ptr->points.resize(keypoint_indices.points.size());
for(size_t i=0;i<keypoint_indices.points.size();++i)
{
keypoints_ptr->points[i].x=range_image_ptr->points[keypoint_indices.points[i]].x;
keypoints_ptr->points[i].y=range_image_ptr->points[keypoint_indices.points[i]].y;
keypoints_ptr->points[i].z=range_image_ptr->points[keypoint_indices.points[i]].z;
}
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer(new pcl::visualization::PCLVisualizer("3D Viewer"));
viewer=RGB_viwer(cloud,keypoints_ptr);
while (!viewer->wasStopped()){
viewer->spinOnce (100);
boost::this_thread::sleep (boost::posix_time::microseconds (100000));
}
return 0;
}
SIFT
从图像领域的《Distinctive image features from scale-invariant keypoints》拓展
翻译:详见链接1
简略链接2
- 简要
- Dog空间:类似与在图像中,遍历点云进行高斯模糊,找到某一点固定的半径,在半径使用高斯权重得到一个新强度值,得到一个尺度空间,类似的得到多个尺度空间,临近的尺度空间求差,就得到了4维的DOG空间
- 寻找局部最大值:类似的,在4维空间中,寻找每一个点的高斯差值在其k个邻域内,是否具有局部最大值/最小值,如果有则为关键点
- 这样我们将会得到一个关键点+尺度
- pcl中的sift只提取了关键点并没有计算描述符
- 核心代码
float min_scale=0.01;
int n_octaves=6;
int n_scales_pre_octave=4;
int min_constast=0.01;
pcl::SIFTKeypoint<PointXYZ,PointWithScale> SIFT; //创建对象
PointCloud<PointWithScale>::Ptr result(new PointCloud<PointWithScale>()); //结果
pcl::search::KdTree<PointXYZ>::Ptr tree(new pcl::search::KdTree<PointXYZ>());
SIFT.setInputCloud(cloud); //设置输入点云
SIFT.setSearchMethod(tree); //设置搜索方法
SIFT.setScales(min_scale,n_octaves,n_scales_pre_octave);//设置最小尺度,层数,每层尺度空间个数
SIFT.setMinimumContrast(min_constast); //阈值
SIFT.compute(*result);
- 计算narf描述子
std::vector<int> keypoint_indices2;//赋值索引
keypoint_indices2.resize (keypoint_indices.points.size ());
for (unsigned int i=0; i<keypoint_indices.size (); ++i)
keypoint_indices2[i]=keypoint_indices.points[i];
pcl::NarfDescriptor narf_descriptor (&range_image, &keypoint_indices2);//计算描述子 深度图像 搜索点索引
narf_descriptor.getParameters ().support_size = support_size;
bool rotation_invariant = true;
narf_descriptor.getParameters ().rotation_invariant = rotation_invariant;//使用旋转不变版本
pcl::PointCloud<pcl::Narf36> narf_descriptors;//描述子
narf_descriptor.compute(narf_descriptors);
cout << "共提取了: "<<narf_descriptors.size ()<<" 待提取点大小 "
<<keypoint_indices.points.size ()<< " .\n";
- 实例
#include <iostream>
#include <pcl/io/pcd_io.h>
#include <pcl/keypoints/sift_keypoint.h>
#include <pcl/features/normal_3d.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/octree/octree.h>
#include <pcl/visualization/pcl_visualizer.h>
using namespace pcl;
using namespace std;
typedef pcl::PointXYZ PointType;
boost::shared_ptr<pcl::visualization::PCLVisualizer> RGB_viwer(pcl::PointCloud<PointType>::Ptr cloud,
pcl::PointCloud<PointWithScale>::Ptr cloud1
){
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer(new pcl::visualization::PCLVisualizer("3D Viewer"));
int v1(0);
viewer->addPointCloud<PointType>(cloud,"simple cloud");
//设置点云属性
viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE,1,"simple cloud");
pcl::visualization::PointCloudColorHandlerCustom<PointWithScale> single_color1(cloud1,0,255,0);
viewer->addPointCloud<PointWithScale>(cloud1,single_color1,"XYZ add color1");
viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE,3,"XYZ add color1");
return (viewer);
}
//使用z方向替换强度值用来计算尺度空间,即回值,还可以用法线、曲率代替
//对于PointXYZI,PointXYZRGB,PointXYZRGBA无需构造该结构
namespace pcl
{
template<>
struct SIFTKeypointFieldSelector<PointXYZ>
{
inline float
operator () (const PointXYZ &p) const
{
return p.z;
}
};
}
int main(int argc, const char** argv) {
PointCloud<PointXYZ>::Ptr cloud(new PointCloud<PointXYZ>());
io::loadPCDFile("/home/n1/notes/pcl/sift3d/pig.pcd",*cloud);
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer(new pcl::visualization::PCLVisualizer("3D Viewer"));
float min_scale=0.01;
int n_octaves=6;
int n_scales_pre_octave=4;
int min_constast=0.01;
pcl::SIFTKeypoint<PointXYZ,PointWithScale> SIFT; //创建对象
PointCloud<PointWithScale>::Ptr result(new PointCloud<PointWithScale>()); //结果
pcl::search::KdTree<PointXYZ>::Ptr tree(new pcl::search::KdTree<PointXYZ>());
SIFT.setInputCloud(cloud); //设置输入点云
SIFT.setSearchMethod(tree); //设置搜索方法
SIFT.setScales(min_scale,n_octaves,n_scales_pre_octave);//设置最小尺度,层数,每层尺度空间个数
SIFT.setMinimumContrast(min_constast); //阈值
SIFT.compute(*result);
viewer=RGB_viwer(cloud,result);
while (!viewer->wasStopped()){
viewer->spinOnce (100);
boost::this_thread::sleep (boost::posix_time::microseconds (100000));
}
return 0;
}
更多推荐
PCL 点云学习八(关键点)
发布评论