762 lines
24 KiB
C++
Executable File
762 lines
24 KiB
C++
Executable File
#include "Libapi3d.h"
|
||
#include "IniHelper.h"
|
||
#include <iostream>
|
||
#include <vector>
|
||
#include <cmath>
|
||
#include <chrono>
|
||
#include "Log.h"
|
||
#include "LidarHelper.h"
|
||
#include "Log.h"
|
||
#include "LidarHelper.h"
|
||
|
||
|
||
static void delay_second(double second)
|
||
{
|
||
clock_t start_time;
|
||
start_time = clock();
|
||
for (; (clock() - start_time) < second * CLOCKS_PER_SEC;);
|
||
}
|
||
|
||
#ifndef _WIN32
|
||
static bool bCouldIsRecv = false;
|
||
static bool bCouldIsGet = false;
|
||
static uint32_t need_points_size = 30 * 10000;
|
||
static float points_data[40 * 10000 * 4];
|
||
//static uint32_t cur_point_num = 0;
|
||
static std::atomic<uint32_t> cur_point_num(0);
|
||
static void GetLidarData(uint8_t handle, LivoxEthPacket* data, uint32_t data_num, void* client_data)
|
||
{
|
||
if (data) {
|
||
/** Parsing the timestamp and the point cloud data. */ uint64_t cur_timestamp = *((uint64_t*)(data->timestamp));
|
||
if (data->data_type == kCartesian) {
|
||
LivoxRawPoint* p_point_data = (LivoxRawPoint*)data->data;
|
||
}
|
||
else if (data->data_type == kSpherical) {
|
||
LivoxSpherPoint* p_point_data = (LivoxSpherPoint*)data->data;
|
||
}
|
||
else if (data->data_type == kExtendCartesian) {
|
||
LivoxExtendRawPoint* p_point_data = (LivoxExtendRawPoint*)data->data;
|
||
#ifdef _WIN32
|
||
LivoxExtendRawPoint point_array[10 * 10000];
|
||
#else
|
||
LivoxExtendRawPoint point_array[data_num];
|
||
#endif
|
||
//uint32_t* converted_data = (uint32_t*)malloc(data_num * 4 * sizeof(uint32_t));
|
||
if (!bCouldIsRecv)
|
||
{
|
||
uint32_t _cur_point_num = cur_point_num++;
|
||
//cur_point_num += data_num;
|
||
_cur_point_num = _cur_point_num * data_num;
|
||
need_points_size = G(cloud_need_points_size);
|
||
uint32_t _offset = _cur_point_num * 4;
|
||
|
||
for (uint32_t i = 0; i < data_num; ++i)
|
||
{
|
||
_offset = _offset + i * 4;
|
||
points_data[_offset] = p_point_data[i].x / 1000.0;
|
||
points_data[_offset + 1] = p_point_data[i].y / 1000.0;
|
||
points_data[_offset + 2] = p_point_data[i].z / 1000.0;
|
||
points_data[_offset + 3] = (uint32_t)(p_point_data[i].reflectivity << 8 | p_point_data[i].tag);
|
||
}
|
||
|
||
uint32_t _cur_count = _cur_point_num + data_num;
|
||
if ((_cur_count % 100000) <= 100)
|
||
std::cout << _cur_count << " need data size : " << need_points_size << std::endl;
|
||
if (_cur_count < need_points_size)
|
||
return;
|
||
else
|
||
bCouldIsRecv = true;
|
||
}
|
||
else
|
||
return;
|
||
|
||
// 等待点云拷贝结束
|
||
//__p_point_data = p_point_data;
|
||
//__data_num = data_num;
|
||
while (true)
|
||
{
|
||
if (!bCouldIsGet)
|
||
{
|
||
delay_second(0.01);
|
||
continue;
|
||
}
|
||
cur_point_num = 0;
|
||
uint32_t* p = (uint32_t*)points_data;
|
||
memset(p , 0, need_points_size * 4);
|
||
bCouldIsGet = false;
|
||
break;
|
||
}
|
||
}
|
||
else if (data->data_type == kExtendSpherical) {
|
||
LivoxExtendSpherPoint* p_point_data = (LivoxExtendSpherPoint*)data->data;
|
||
}
|
||
else if (data->data_type == kDualExtendCartesian) {
|
||
LivoxDualExtendRawPoint* p_point_data = (LivoxDualExtendRawPoint*)data->data;
|
||
}
|
||
else if (data->data_type == kDualExtendSpherical) {
|
||
LivoxDualExtendSpherPoint* p_point_data = (LivoxDualExtendSpherPoint*)data->data;
|
||
}
|
||
else if (data->data_type == kImu) {
|
||
LivoxImuPoint* p_point_data = (LivoxImuPoint*)data->data;
|
||
}
|
||
else if (data->data_type == kTripleExtendCartesian) {
|
||
LivoxTripleExtendRawPoint* p_point_data = (LivoxTripleExtendRawPoint*)data->data;
|
||
}
|
||
else if (data->data_type == kTripleExtendSpherical) {
|
||
LivoxTripleExtendSpherPoint* p_point_data = (LivoxTripleExtendSpherPoint*)data->data;
|
||
}
|
||
// printf("data_type %d\n", data->data_type);
|
||
}
|
||
}
|
||
#else
|
||
/** Extend cartesian coordinate format. */
|
||
typedef struct {
|
||
int32_t x; /**< X axis, Unit:mm */
|
||
int32_t y; /**< Y axis, Unit:mm */
|
||
int32_t z; /**< Z axis, Unit:mm */
|
||
uint8_t reflectivity; /**< Reflectivity */
|
||
uint8_t tag; /**< Tag */
|
||
} LivoxExtendRawPoint;
|
||
typedef struct {
|
||
uint8_t version; /**< Packet protocol version. */
|
||
uint8_t slot; /**< Slot number used for connecting LiDAR. */
|
||
uint8_t id; /**< LiDAR id. */
|
||
uint8_t rsvd; /**< Reserved. */
|
||
uint32_t err_code; /**< Device error status indicator information. */
|
||
uint8_t timestamp_type; /**< Timestamp type. */
|
||
/** Point cloud coordinate format, refer to \ref PointDataType . */
|
||
uint8_t data_type;
|
||
uint8_t timestamp[8]; /**< Nanosecond or UTC format timestamp. */
|
||
uint8_t data[1]; /**< Point cloud data. */
|
||
} LivoxEthPacket;
|
||
static bool bCouldIsRecv = false;
|
||
static bool bCouldIsGet = false;
|
||
static uint32_t need_points_size = 30 * 10000;
|
||
static float points_data[40 * 10000];
|
||
//static uint32_t cur_data_num = 0;
|
||
static std::atomic<uint32_t> cur_point_num(0);
|
||
typedef void (*FuncGetLidarData)(uint8_t handle, LivoxEthPacket* data, uint32_t data_num, void* client_data);
|
||
FuncGetLidarData GetLidarData;
|
||
#endif // !_WIN32
|
||
|
||
|
||
// 采集点云数据
|
||
static int lidar_indxx = 0;
|
||
int Cal3D::LibapiCapCloudPoints(std::shared_ptr<geometry::PointCloud>& cloud_ptr)
|
||
{
|
||
int ret = CAL_OK;
|
||
|
||
if (G(lidar_cap_fake) == "true")
|
||
{
|
||
if (G(fake_lidar_fpath) == "")
|
||
assert(0);
|
||
else
|
||
{
|
||
// 读取fake文件,获取点云数据
|
||
ret = LibapiReadCloudPointsPly(G(fake_lidar_fpath), cloud_ptr);
|
||
if (ret != CAL_OK)
|
||
{
|
||
return ret;
|
||
}
|
||
if (cloud_ptr == nullptr)
|
||
{
|
||
return CAL_NONE_PTR;
|
||
}
|
||
if (cloud_ptr.get()->points_.size() == 0)
|
||
{
|
||
return CAL_3D_CLOUD_POINTS_IS_0;
|
||
}
|
||
}
|
||
}
|
||
else
|
||
{
|
||
// cloud_ptr = cap demo ...
|
||
if (lidar_indxx == 0)
|
||
{
|
||
Loger(LEVEL_INFOR, "CLidarHelper::Get()->Init start");
|
||
ret = CLidarHelper::Get()->Init((void*)GetLidarData);
|
||
Loger(LEVEL_INFOR, "CLidarHelper::Get()->Init end");
|
||
if (ret != CAL_OK)
|
||
{
|
||
Loger(LEVEL_INFOR, "CLidarHelper::Get()->Init() %d", ret);
|
||
return ret;
|
||
}
|
||
lidar_indxx = 1;
|
||
}
|
||
else
|
||
{
|
||
Loger(LEVEL_INFOR, "CLidarHelper::Get()->Start start");
|
||
ret = CLidarHelper::Get()->Start();
|
||
Loger(LEVEL_INFOR, "CLidarHelper::Get()->Start end");
|
||
if (ret != CAL_OK)
|
||
{
|
||
Loger(LEVEL_INFOR, "CLidarHelper::Get()->Start() %d", ret);
|
||
return ret;
|
||
}
|
||
}
|
||
|
||
// 写入点云对象
|
||
Loger(LEVEL_INFOR, "cloud_ptr.get()->points_. get data start");
|
||
geometry::PointCloud* pCloud = cloud_ptr.get();
|
||
// int size = cloud_ptr.get()->points_.size() + __data_num ;
|
||
// cloud_ptr.get()->points_.resize(size * 3 * sizeof(float));
|
||
// wait数据
|
||
while (true)
|
||
{
|
||
if (!bCouldIsRecv)
|
||
{
|
||
delay_second(0.05);
|
||
continue;
|
||
}
|
||
delay_second(0.01);
|
||
break;
|
||
}
|
||
pCloud->points_.resize(need_points_size);
|
||
for (uint32_t i = 0; i < need_points_size; ++i) {
|
||
pCloud->points_[i][0] = points_data[i * 4];
|
||
pCloud->points_[i][1] = points_data[i * 4 + 1];
|
||
pCloud->points_[i][2] = points_data[i * 4 + 2];
|
||
//pCloud->points_.push_back({ (float)point[0], (float)point[1], (float)point[2] });
|
||
}
|
||
if (G(save_cload) == "true")
|
||
{
|
||
io::WritePointCloud("output.ply", *pCloud);
|
||
}
|
||
|
||
bCouldIsGet = true;
|
||
delay_second(0.01);
|
||
bCouldIsRecv = false; //开始接收数据
|
||
|
||
Loger(LEVEL_INFOR, "cloud_ptr.get()->points_. get data end, data size is %d", pCloud->points_.size());
|
||
|
||
Loger(LEVEL_INFOR, "CLidarHelper::Get()->Stop start");
|
||
ret = CLidarHelper::Get()->Stop();
|
||
if (ret != CAL_OK)
|
||
{
|
||
Loger(LEVEL_INFOR, "CLidarHelper::Get()->Stop() %d", ret);
|
||
return ret;
|
||
}
|
||
Loger(LEVEL_INFOR, "CLidarHelper::Get()->Stop end");
|
||
}
|
||
|
||
return CAL_OK;
|
||
}
|
||
|
||
// 读取点云数据
|
||
int Cal3D::LibapiReadCloudPointsPly(const std::string plyPath, std::shared_ptr<geometry::PointCloud>& cloud_ptr)
|
||
{
|
||
int ret = CAL_OK;
|
||
|
||
cloud_ptr = io::CreatePointCloudFromFile(plyPath);
|
||
|
||
/*if (!io::ReadPointCloud(plyPath, *cloud_ptr))*/
|
||
if (cloud_ptr == nullptr)
|
||
{
|
||
Loger(LEVEL_INFOR, "Cal3D::LibapiReadCloudPointsPly Failed to read %s", plyPath.c_str());
|
||
return CAL_READFILE_FAILED;
|
||
}
|
||
|
||
if (cloud_ptr->IsEmpty())
|
||
{
|
||
Loger(LEVEL_INFOR, "cloud_ptr->IsEmpty() cloud_ptr : %s", plyPath.c_str());
|
||
return CAL_PRAMA_EMPUTY;
|
||
assert(0);
|
||
}
|
||
|
||
Loger(LEVEL_INFOR, "cloud_ptr read success: %s", plyPath.c_str());
|
||
return ret;
|
||
}
|
||
|
||
// 保存点云数据
|
||
int Cal3D::LibapiSaveCloudPointsPly(std::string savaPath, std::shared_ptr<geometry::PointCloud>& cloud_ptr)
|
||
{
|
||
if (cloud_ptr->IsEmpty())
|
||
assert(0);
|
||
|
||
int ret = CAL_OK;
|
||
|
||
if (!io::WritePointCloud(savaPath, *cloud_ptr))
|
||
{
|
||
return CAL_WRITEFILE_FAILED;
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
|
||
// 点云数据筛选
|
||
// 按照 point[0],point[1],point[2] = x, y, z
|
||
int Cal3D::LibapiFilterCloudPointsByDist(std::shared_ptr<geometry::PointCloud>& src_cloud_ptr,
|
||
std::shared_ptr<geometry::PointCloud>& dst_cloud_ptr, float minusx, float x, float minusy, float y, float minusz, float z)
|
||
{
|
||
if (src_cloud_ptr->IsEmpty())
|
||
assert(0);
|
||
|
||
int ret = CAL_OK;
|
||
|
||
std::chrono::high_resolution_clock::time_point m_start = std::chrono::high_resolution_clock::now();
|
||
|
||
// 遍历点云中的每个点
|
||
for (int i = 0; i < src_cloud_ptr->points_.size(); ++i) {
|
||
Eigen::Vector3d point_3d = src_cloud_ptr->points_[i];
|
||
if (minusx < point_3d.x() && point_3d.x() < x
|
||
&& minusy < point_3d.y() && point_3d.y() < y
|
||
&& point_3d.z() < z)
|
||
{
|
||
Eigen::Vector3d dst_point_3d = Eigen::Vector3d(point_3d.x(), point_3d.y(), point_3d.z());
|
||
dst_cloud_ptr->points_.push_back(dst_point_3d);
|
||
}
|
||
}
|
||
|
||
if (dst_cloud_ptr->IsEmpty())
|
||
assert(0);
|
||
|
||
std::chrono::high_resolution_clock::time_point m_end = std::chrono::high_resolution_clock::now();
|
||
std::chrono::duration<double> elapsex = m_end - m_start;
|
||
std::cout << "Cal3D::LibapiFilterCloudPointsByDist time is : " << elapsex.count() << std::endl;
|
||
return ret;
|
||
}
|
||
|
||
// 点云修正
|
||
int Cal3D::LibapiCorrectCloudPoint(std::shared_ptr<geometry::PointCloud>& src_cloud_ptr,
|
||
std::shared_ptr<geometry::PointCloud>& dst_cloud_ptr, int node)
|
||
{
|
||
if (src_cloud_ptr->IsEmpty())
|
||
assert(0);
|
||
|
||
std::chrono::high_resolution_clock::time_point m_start = std::chrono::high_resolution_clock::now();
|
||
|
||
int ret = CAL_OK;
|
||
|
||
float kk = G(kk);
|
||
float r;
|
||
for (int i = 0; i < src_cloud_ptr->points_.size(); ++i)
|
||
{
|
||
Eigen::Vector3d point_3d = src_cloud_ptr->points_[i];
|
||
//r = sqrt(pow(point_3d[0], 2) + pow(point_3d[1], 2) + pow(point_3d[2], 2));
|
||
//point_3d[0] = point_3d[0] + point_3d[0] / r * kk;
|
||
//point_3d[1] = point_3d[1] + point_3d[1] / r * kk;
|
||
//point_3d[2] = point_3d[2] + point_3d[2] / r * kk;
|
||
point_3d[2] -= kk;
|
||
dst_cloud_ptr->points_.push_back(point_3d);
|
||
}
|
||
|
||
if (dst_cloud_ptr->IsEmpty())
|
||
assert(0);
|
||
|
||
std::chrono::high_resolution_clock::time_point m_end = std::chrono::high_resolution_clock::now();
|
||
std::chrono::duration<double> elapsex = m_end - m_start;
|
||
std::cout << "Cal3D::LibapiCorrectCloudPoint time is : " << elapsex.count() << std::endl;
|
||
return ret;
|
||
}
|
||
|
||
// 点云转数组
|
||
int Cal3D::LibapiCloudPointToVec(std::shared_ptr<geometry::PointCloud>& src_cloud_ptr,
|
||
std::vector<cv::Point3d>& dst_3d)
|
||
{
|
||
if (src_cloud_ptr->IsEmpty())
|
||
{
|
||
Loger(LEVEL_INFOR, "src_cloud_ptr->IsEmpty()");
|
||
return CAL_PRAMA_EMPUTY;
|
||
//assert(0);
|
||
}
|
||
|
||
std::chrono::high_resolution_clock::time_point m_start = std::chrono::high_resolution_clock::now();
|
||
|
||
int ret = CAL_OK;
|
||
|
||
for (int i = 0; i < src_cloud_ptr->points_.size(); ++i)
|
||
{
|
||
Eigen::Vector3d point_3d = src_cloud_ptr->points_[i];
|
||
dst_3d.push_back(cv::Point3d(point_3d[0], point_3d[1], point_3d[2]));
|
||
}
|
||
|
||
std::chrono::high_resolution_clock::time_point m_end = std::chrono::high_resolution_clock::now();
|
||
std::chrono::duration<double> elapsex = m_end - m_start;
|
||
std::cout << "Cal3D::LibapiCloudPointToVec time is : " << elapsex.count() << std::endl;
|
||
|
||
return ret;
|
||
}
|
||
|
||
// 点云数据矩阵变换
|
||
int Cal3D::LibapiRotAndTrans(std::shared_ptr<geometry::PointCloud>& cloud_ptr)
|
||
{
|
||
std::chrono::high_resolution_clock::time_point m_start = std::chrono::high_resolution_clock::now();
|
||
|
||
int ret = CAL_OK;
|
||
|
||
//float y = 1.91032;
|
||
//float p = -1.5938;
|
||
//float r = -0.321605;
|
||
|
||
Eigen::Matrix3d Rz = cloud_ptr->GetRotationMatrixFromXYZ({ 0, 0, G(y) });
|
||
Eigen::Matrix3d Ry = cloud_ptr->GetRotationMatrixFromXYZ({ 0, G(p), 0 });
|
||
Eigen::Matrix3d Rx = cloud_ptr->GetRotationMatrixFromXYZ({ G(r), 0, 0 });
|
||
|
||
cloud_ptr->Rotate(Rx, { 1, 0, 0 });
|
||
cloud_ptr->Rotate(Ry, { 0, 1, 0 });
|
||
cloud_ptr->Rotate(Rz, { 0, 0, 1 });
|
||
|
||
Eigen::Vector3d T_vector = Eigen::Vector3d({ G(tx), G(ty), G(tz) });
|
||
|
||
cloud_ptr->Translate(T_vector);
|
||
|
||
std::chrono::high_resolution_clock::time_point m_end = std::chrono::high_resolution_clock::now();
|
||
std::chrono::duration<double> elapsex = m_end - m_start;
|
||
std::cout << "Cal3D::LibapiRotAndTrans time is : " << elapsex.count() << std::endl;
|
||
|
||
return ret;
|
||
}
|
||
|
||
// 点云数据投影
|
||
// im 必须是去畸变图像 undistort image
|
||
int Cal3D::LibapiCloudPointsPorject(std::vector<cv::Point3d>& src_points3D,
|
||
cv::Mat& cameraMatrix, std::vector<cv::Point2d>& dst_points2D, cv::Mat* im_2D3D, bool doTest, cv::Mat& test_src_im)
|
||
{
|
||
if (src_points3D.size() == 0 || cameraMatrix.empty())
|
||
assert(0);
|
||
|
||
if (doTest && test_src_im.empty()) assert(0);
|
||
|
||
std::chrono::high_resolution_clock::time_point m_start = std::chrono::high_resolution_clock::now();
|
||
|
||
int ret = CAL_OK;
|
||
|
||
cv::Mat rvec, tvec;
|
||
rvec.create(1, 3, cv::DataType<float>::type);
|
||
rvec.at<float>(0) = 0.0;
|
||
rvec.at<float>(1) = 0.0;
|
||
rvec.at<float>(2) = 0.0;
|
||
tvec.create(3, 1, cv::DataType<float>::type);
|
||
tvec.at<float>(0) = 0.0;
|
||
tvec.at<float>(1) = 0.0;
|
||
tvec.at<float>(2) = 0.0;
|
||
cv::Mat distCoeffs = (cv::Mat_<double>(1, 5) << 0.0, 0.0, 0.0, 0.0, 0.0);
|
||
|
||
// 投影点
|
||
cv::projectPoints(src_points3D, rvec, tvec, cameraMatrix, distCoeffs, dst_points2D);
|
||
|
||
if (dst_points2D.size() == 0)
|
||
assert(0);
|
||
|
||
cv::Mat test_im = cv::Mat::zeros(im_2D3D->size(), im_2D3D->type());
|
||
int ch = im_2D3D->channels();;
|
||
float* im_2D3D_row_ptr;
|
||
uchar* test_im_row_ptr;
|
||
for (int i = 0; i < dst_points2D.size(); ++i)
|
||
{
|
||
if (0 <= dst_points2D[i].x && dst_points2D[i].x < im_2D3D->cols && 0 <= dst_points2D[i].y && dst_points2D[i].y < im_2D3D->rows)
|
||
{
|
||
im_2D3D_row_ptr = (float*)im_2D3D->ptr<float>(int(dst_points2D[i].y));
|
||
im_2D3D_row_ptr[(int)dst_points2D[i].x * ch] = src_points3D[i].x;
|
||
im_2D3D_row_ptr[(int)dst_points2D[i].x * ch + 1] = src_points3D[i].y;
|
||
im_2D3D_row_ptr[(int)dst_points2D[i].x * ch + 2] = src_points3D[i].z;
|
||
|
||
//std::cout << im_2D3D_row_ptr[(int)dst_points2D[i].x * ch] << std::endl;
|
||
//std::cout << im_2D3D_row_ptr[(int)dst_points2D[i].x * ch+1] << std::endl;
|
||
//std::cout << im_2D3D_row_ptr[(int)dst_points2D[i].x * ch+2] << std::endl;
|
||
|
||
if (doTest)
|
||
{
|
||
test_im_row_ptr = (uchar*)test_im.ptr<uchar>(int(dst_points2D[i].y));
|
||
test_im_row_ptr[(int)dst_points2D[i].x * ch] = 255;
|
||
}
|
||
}
|
||
}
|
||
|
||
// for test
|
||
if (doTest)
|
||
{
|
||
cv::Mat image_test_red = cv::Mat::zeros(test_src_im.size(), test_src_im.type());
|
||
cv::Mat image_test_mask;
|
||
image_test_red.setTo(cv::Scalar(0, 0, 255));
|
||
cv::cvtColor(test_im, test_im, cv::COLOR_RGB2GRAY);
|
||
cv::threshold(test_im, image_test_mask, 1, 255, cv::THRESH_BINARY);
|
||
cv::Mat kernel = getStructuringElement(cv::MORPH_RECT, cv::Size(9, 9), cv::Point(-1, -1));
|
||
cv::dilate(image_test_mask, image_test_mask, kernel);
|
||
cv::bitwise_and(image_test_red, test_src_im, test_src_im, image_test_mask);
|
||
#ifdef WIN32
|
||
cv::resize(test_src_im, test_src_im, cv::Size(int(934), int(700)));
|
||
cv::imshow("test_src_im", test_src_im);
|
||
cv::waitKey(0);
|
||
#endif
|
||
}
|
||
|
||
std::chrono::high_resolution_clock::time_point m_end = std::chrono::high_resolution_clock::now();
|
||
std::chrono::duration<double> elapsex = m_end - m_start;
|
||
std::cout << "Cal3D::LibapiCloudPointsPorject time is : " << elapsex.count() << std::endl;
|
||
|
||
return ret;
|
||
}
|
||
|
||
int Cal3D::LibapiCloudPointsPorject(
|
||
std::vector<cv::Point3d>& src_points3D,
|
||
cv::Mat& cameraMatrix,
|
||
std::vector <std::vector<cv::Point2f>>& vecConners,
|
||
cv::Mat* im2D3D,
|
||
std::vector<std::vector<cv::Point2f>>& vecInConners2D,
|
||
std::vector<std::vector<cv::Point3f>>& vecInConners3D)
|
||
{
|
||
if (src_points3D.size() == 0 || cameraMatrix.empty())
|
||
{
|
||
Loger(LEVEL_INFOR, "src_points3D.size() == 0 || cameraMatrix.empty()");
|
||
return CAL_PRAMA_EMPUTY;
|
||
//assert(0);
|
||
}
|
||
|
||
|
||
std::chrono::high_resolution_clock::time_point m_start = std::chrono::high_resolution_clock::now();
|
||
|
||
int ret = CAL_OK;
|
||
|
||
cv::Mat rvec, tvec;
|
||
rvec.create(1, 3, cv::DataType<float>::type);
|
||
rvec.at<float>(0) = 0.0;
|
||
rvec.at<float>(1) = 0.0;
|
||
rvec.at<float>(2) = 0.0;
|
||
tvec.create(3, 1, cv::DataType<float>::type);
|
||
tvec.at<float>(0) = 0.0;
|
||
tvec.at<float>(1) = 0.0;
|
||
tvec.at<float>(2) = 0.0;
|
||
cv::Mat distCoeffs = (cv::Mat_<double>(1, 5) << 0.0, 0.0, 0.0, 0.0, 0.0);
|
||
|
||
// 投影点
|
||
std::vector<cv::Point2d> dstPoints2D;
|
||
cv::projectPoints(src_points3D, rvec, tvec, cameraMatrix, distCoeffs, dstPoints2D);
|
||
|
||
if (dstPoints2D.empty())
|
||
{
|
||
Loger(LEVEL_INFOR, "dstPoints2D.empty()");
|
||
return CAL_3D_PROJECT_POINTS_IS_0;
|
||
//assert(0);
|
||
}
|
||
|
||
int ch = 3;
|
||
float* im2D3DRowPtr;
|
||
cv::Point2f p2d;
|
||
std::vector<int> counts(vecConners.size());
|
||
for (int r = 0; r < vecConners.size(); ++r)
|
||
counts[r] = 0;
|
||
for (int i = 0; i < dstPoints2D.size(); ++i)
|
||
{
|
||
if (0 <= dstPoints2D[i].x && dstPoints2D[i].x < im2D3D->cols && 0 <= dstPoints2D[i].y && dstPoints2D[i].y < im2D3D->rows)
|
||
{
|
||
im2D3DRowPtr = (float*)im2D3D->ptr<float>(int(dstPoints2D[i].y));
|
||
im2D3DRowPtr[(int)dstPoints2D[i].x * ch] = src_points3D[i].x;
|
||
im2D3DRowPtr[(int)dstPoints2D[i].x * ch + 1] = src_points3D[i].y;
|
||
im2D3DRowPtr[(int)dstPoints2D[i].x * ch + 2] = src_points3D[i].z;
|
||
p2d.x = dstPoints2D[i].x;
|
||
p2d.y = dstPoints2D[i].y;
|
||
|
||
for (int r = 0; r < vecConners.size(); ++r)
|
||
{
|
||
// 在ROI检测中,未正常检测到4个角点
|
||
if (vecConners[r].size() != 4)
|
||
continue;
|
||
|
||
// 待优化
|
||
// 收缩4个角点
|
||
std::vector<cv::Point2f> vecConnersTemp;
|
||
vecConnersTemp.push_back({ vecConners[r][0].x + 100.0f,vecConners[r][0].y + 100.0f });
|
||
vecConnersTemp.push_back({ vecConners[r][1].x - 100.0f,vecConners[r][1].y + 100.0f });
|
||
vecConnersTemp.push_back({ vecConners[r][2].x - 100.0f,vecConners[r][2].y - 100.0f });
|
||
vecConnersTemp.push_back({ vecConners[r][3].x + 100.0f,vecConners[r][3].y - 100.0f });
|
||
|
||
// 返回正值:点在多边形内部
|
||
// 返回零:点在多边形上
|
||
// 返回负值:点在多边形外部
|
||
if (cv::pointPolygonTest(vecConnersTemp, p2d, false) >= 0)
|
||
{
|
||
counts[r]++;
|
||
if ((counts[r] % 5) == 0)
|
||
{
|
||
cv::Point3f p3d = { (float)src_points3D[i].x, (float)src_points3D[i].y, (float)src_points3D[i].z };
|
||
vecInConners3D[r].push_back(p3d);
|
||
vecInConners2D[r].push_back(p2d);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return ret;
|
||
}
|
||
|
||
//int main() {
|
||
// // 假设 img 是一个 CV_32FC3 类型的图像
|
||
// cv::Mat img = cv::Mat::ones(480, 640, CV_32FC3) * 0.0001; // 示例数据,值很小
|
||
//
|
||
// // 获取最小值和最大值
|
||
// double minVal, maxVal;
|
||
// cv::minMaxLoc(img.reshape(1), &minVal, &maxVal); // 将图像展平到单通道以获得总体 min/max
|
||
//
|
||
// // 如果 minVal 和 maxVal 接近,可以直接设置一个范围(例如 0 到 1)
|
||
// if (maxVal - minVal < 1e-6) {
|
||
// maxVal = minVal + 1.0;
|
||
// }
|
||
//
|
||
// // 手动进行归一化,将图像值扩展到 0 到 255 的范围
|
||
// cv::Mat img_normalized;
|
||
// img.convertTo(img_normalized, CV_32FC3, 1.0 / (maxVal - minVal), -minVal / (maxVal - minVal));
|
||
// img_normalized.convertTo(img_normalized, CV_8UC3, 255.0); // 转换为 8 位图像
|
||
//
|
||
// // 显示图像
|
||
// cv::imshow("Image", img_normalized);
|
||
// cv::waitKey(0);
|
||
//
|
||
// return 0;
|
||
//}
|
||
// 用于测试点云数据投影产生的2D3D数据
|
||
int Cal3D::LibapiCloudPointProjectTest(cv::Mat& im_2D3D)
|
||
{
|
||
int ret = CAL_OK;
|
||
|
||
#ifdef WIN32
|
||
cv::Mat tmp;
|
||
im_2D3D.convertTo(tmp, CV_8UC3, 255.0);
|
||
cv::resize(tmp, tmp, cv::Size(int(934), int(700)));
|
||
tmp = tmp * 1000;
|
||
cv::imshow("im_2D3D", tmp);
|
||
cv::waitKey(0);
|
||
#endif
|
||
|
||
return ret;
|
||
}
|
||
|
||
// 点云数据转换为2D3D点对数据
|
||
int Cal3D::LibapiCloudPointsToCMeasureInfo(cv::Mat& im_2D3D, CMeasureInfo& CMeasureInfo)
|
||
{
|
||
if (im_2D3D.empty())
|
||
assert(0);
|
||
|
||
int ret = CAL_OK;
|
||
|
||
//CMeasureInfo.im = &im_2D3D;
|
||
|
||
return 0;
|
||
}
|
||
|
||
static Eigen::Matrix3f cvMatToEigenMap(const cv::Mat& mat)
|
||
{
|
||
//if (mat.type() != CV_32FC1) {
|
||
// throw std::invalid_argument("输入矩阵必须是 CV_32FC1 类型");
|
||
//}
|
||
Eigen::Matrix3f eigenMat(mat.rows, mat.cols);
|
||
|
||
// 将数据逐元素拷贝到 Eigen 矩阵
|
||
for (int i = 0; i < mat.rows; ++i) {
|
||
for (int j = 0; j < mat.cols; ++j) {
|
||
eigenMat(i, j) = mat.at<double>(i, j);
|
||
}
|
||
}
|
||
|
||
return eigenMat;
|
||
}
|
||
|
||
// 通过H,计算2D坐标的3D坐标
|
||
int Cal3D::LibapiCalConner3D(const std::vector<cv::Point2f>& vec2D_2, cv::Mat& H,
|
||
std::vector<cv::Point3f> vec3d_n, std::vector<cv::Point3f>& vec3D_4)
|
||
{
|
||
int ret = CAL_OK;
|
||
|
||
open3d::geometry::PointCloud _3pts_cloud = open3d::geometry::PointCloud();
|
||
for (const auto& point : vec3d_n) {
|
||
_3pts_cloud.points_.push_back(Eigen::Vector3d(point.x, point.y, point.z));
|
||
}
|
||
//distance_threshold=0.01, ransac_n=3, num_iterations=500
|
||
float A, B, C, D;
|
||
std::vector<size_t> inliers;
|
||
std::tuple<Eigen::Vector4d, std::vector<size_t>> result = _3pts_cloud.SegmentPlane(0.01, 3, 500);
|
||
Eigen::Vector4d abcd = std::get<0>(result);
|
||
A = abcd[0], B = abcd[1], C = abcd[2], D = abcd[3];
|
||
inliers = std::get<1>(result);
|
||
|
||
float x, y, t;
|
||
Eigen::Matrix3f H_inv = cvMatToEigenMap(H).inverse();
|
||
Eigen::Matrix3f xy1;
|
||
Eigen::MatrixXf pts3d;
|
||
cv::Point3f p3d(3);
|
||
cv::Point3f p3d_next(3);
|
||
for (int i = 0; i < 4; ++i)
|
||
{
|
||
cv::Point2f p2d = vec2D_2[i];
|
||
x = p2d.x, y = p2d.y;
|
||
xy1(0) = x, xy1(1) = y, xy1(2) = 1;
|
||
pts3d = H_inv * xy1.conjugate();
|
||
pts3d(0) = pts3d(0) / pts3d(2);
|
||
pts3d(1) = pts3d(1) / pts3d(2);
|
||
pts3d(2) = pts3d(2) / pts3d(2);
|
||
t = -D / (A * pts3d(0) + B * pts3d(1) + C * pts3d(2));
|
||
pts3d(0) = t * pts3d(0);
|
||
pts3d(1) = t * pts3d(1);
|
||
pts3d(2) = t * pts3d(2);
|
||
p3d.x = pts3d(0);
|
||
p3d.y = pts3d(1);
|
||
p3d.z = pts3d(2);
|
||
vec3D_4.push_back(p3d);
|
||
}
|
||
|
||
for (int i = 0; i < 4; ++i)
|
||
{
|
||
p3d = vec3D_4[i];
|
||
p3d_next = vec3D_4[(i + 1) % 4];
|
||
float edges = sqrt(pow((p3d.x - p3d_next.x), 2)
|
||
+ pow((p3d.y - p3d_next.y), 2)
|
||
+ pow((p3d.z - p3d_next.z), 2));
|
||
std::cout << edges << std::endl;
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
|
||
//
|
||
//int main()
|
||
//{
|
||
// int ret = CAL_OK;
|
||
//
|
||
// utility::SetVerbosityLevel(utility::VerbosityLevel::Debug);
|
||
//
|
||
// auto cloud_ptr = std::make_shared<geometry::PointCloud>();
|
||
// auto cloud_ptr_by_dist = std::make_shared<geometry::PointCloud>();
|
||
// auto cloud_ptr_by_correct = std::make_shared<geometry::PointCloud>();
|
||
//
|
||
// ret = Cal3D::LibapiCapCloudPoints(cloud_ptr);
|
||
// if (ret != CAL_OK) assert(0);
|
||
//
|
||
// ret = Cal3D::LibapiRotAndTrans(cloud_ptr);
|
||
//
|
||
// ret = Cal3D::LibapiFilterCloudPointsByDist(cloud_ptr, cloud_ptr_by_dist,
|
||
// G(dminx), G(dmaxx), G(dminy), G(dmaxy), G(dminz), G(dmaxz));
|
||
// if (ret != CAL_OK) assert(0);
|
||
// if (cloud_ptr_by_dist->IsEmpty()) assert(0);
|
||
//
|
||
// ret = Cal3D::LibapiCorrectCloudPoint(cloud_ptr_by_dist, cloud_ptr_by_correct, 0);
|
||
//
|
||
// std::vector<cv::Point3d> vec_point_3d;
|
||
// ret = Cal3D::LibapiCloudPointToVec(cloud_ptr_by_correct, vec_point_3d);
|
||
//
|
||
// cv::Mat cameraMatrix = (cv::Mat_<double>(3, 3) << G(fx), 0, G(cx), 0, G(fy), G(cy), 0, 0, 1);
|
||
// std::vector<cv::Point2d> vec_point_2d;
|
||
// //cv::Mat test_src_im = cv::imread("output.png");
|
||
// cv::Mat test_src_im;
|
||
//
|
||
// //cv::Mat test_src_im;
|
||
// cv::Mat* im_2D3D = new cv::Mat(cv::Size(G(w), G(h)), CV_32FC3);
|
||
// im_2D3D->setTo(cv::Scalar(0, 0, 0)); // 设置为全黑背景
|
||
// ret = Cal3D::LibapiCloudPointsPorject(vec_point_3d, cameraMatrix, vec_point_2d, im_2D3D, false, test_src_im);
|
||
//
|
||
// // for test
|
||
// //ret = Cal3D::LibapiCloudPointProjectTest(*im_2D3D);
|
||
// //cv::FileStorage file("d:\\im_2D3D.yml", cv::FileStorage::WRITE);
|
||
// //std::string tag = "im_2D3D";
|
||
// //file << tag << *im_2D3D;
|
||
// //file.release();
|
||
//
|
||
// // 创建CMeasureInfo数据结构
|
||
// long id;
|
||
// ret = CalUtils::LibapiGenRandomId(id);
|
||
// std::string info = "abc";
|
||
// CMeasureInfo* CMeasureInfo_ptr = new CMeasureInfo(id, info, im_2D3D);
|
||
//
|
||
// visualization::DrawGeometries({ cloud_ptr_by_correct }, "TestFileFormat", 1920, 1080);
|
||
//
|
||
// return 0;
|
||
//}
|