2023-04-30 14:42:10 +02:00
|
|
|
#include <Slicer360ToPerspective.hpp>
|
2023-05-01 00:31:24 +02:00
|
|
|
#include <frame_conversions.hpp>
|
2023-04-30 14:42:10 +02:00
|
|
|
|
2023-05-01 13:41:51 +02:00
|
|
|
#ifndef DO_NOT_USE_OPENMP
|
|
|
|
|
#include <omp.h>
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
Slicer360ToPerspective::Slicer360ToPerspective(Image const& panorama_image_) : panorama_image(panorama_image_), output_image_width(2), interpolation_method(Image::InterpMethod::BILINEAR), output_folder(".")
|
|
|
|
|
{}
|
|
|
|
|
|
|
|
|
|
Slicer360ToPerspective & Slicer360ToPerspective::SetOutputImageSize(int width, int height) {
|
|
|
|
|
output_image_width = (width > 0) ? width : 1024;
|
|
|
|
|
output_image_height = (height > 0) ? height : output_image_width;
|
|
|
|
|
return *this;
|
2023-05-01 00:31:24 +02:00
|
|
|
}
|
|
|
|
|
|
2023-05-01 13:41:51 +02:00
|
|
|
Slicer360ToPerspective & Slicer360ToPerspective::SetInterpolationMethod(Image::InterpMethod const& interpolation_method_) {
|
|
|
|
|
interpolation_method = interpolation_method_;
|
|
|
|
|
return *this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Slicer360ToPerspective & Slicer360ToPerspective::SetOutputFolder(std::string const& folder) {
|
|
|
|
|
output_folder = folder;
|
|
|
|
|
return *this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Image Slicer360ToPerspective::ComputeCoverage(int width, bool raw) const {
|
2023-05-01 00:31:24 +02:00
|
|
|
int height = width/2;
|
|
|
|
|
Image coverage(width, height, 1);
|
|
|
|
|
|
|
|
|
|
// Compute the coverage of the 360-degree image by the cameras added to the slicer.
|
|
|
|
|
for(int i = 0; i < height; i++) {
|
|
|
|
|
for(int j = 0; j < width; j++) {
|
|
|
|
|
// Compute view vector in inertial frame from equirectangular coordinates
|
|
|
|
|
Eigen::Vector3d ray_dir_sph = equirectangular2sph(i, j, width, height);
|
|
|
|
|
Eigen::Vector3d ray_dir_cart = sph2cart(ray_dir_sph);
|
|
|
|
|
int count = 0;
|
|
|
|
|
for(Camera const& camera : cameras)
|
|
|
|
|
{
|
|
|
|
|
if(camera.IsPointVisibleInertialFrame(ray_dir_cart))
|
|
|
|
|
count++;
|
|
|
|
|
}
|
|
|
|
|
coverage.SetPixelValue(i, j, 0, count);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if(raw)
|
|
|
|
|
return coverage;
|
|
|
|
|
else
|
|
|
|
|
return coverage.Normalized().Colorized(Image::Colormap::PARULA);
|
|
|
|
|
}
|
2023-05-01 13:41:51 +02:00
|
|
|
|
|
|
|
|
Eigen::Vector3i Slicer360ToPerspective::ProjectToCameraPixel(Camera const& camera, int i, int j) const {
|
|
|
|
|
// Get the ray direction in inertial frame by projecting the pixel to the sphere
|
|
|
|
|
// Eigen::Vector2d Camera::PixelToNormalizedCoordinates(unsigned int i, unsigned int j, unsigned int width, unsigned int height) {
|
|
|
|
|
Eigen::Vector2d p_sensor = Camera::PixelToNormalizedCoordinates(i, j, output_image_width, output_image_height);
|
|
|
|
|
p_sensor[0] *= -1;
|
|
|
|
|
p_sensor[1] *= ((double)output_image_height)/((double)output_image_width);// Take aspect ratio into account.
|
|
|
|
|
|
|
|
|
|
// Compute ray direction from sensor coordinates
|
|
|
|
|
Eigen::Vector3d ray_dir = camera.ComputeRayDirInInertialFrame(p_sensor);
|
|
|
|
|
Eigen::Vector3d ray_dir_sph = cart2sph(ray_dir);
|
|
|
|
|
Eigen::Vector2d p_equi = sph2equirectangular_d(ray_dir_sph, output_image_width, output_image_height);
|
|
|
|
|
return panorama_image.GetPixelInterp(p_equi, interpolation_method);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Image Slicer360ToPerspective::ProjectToCamera(Camera const& camera, bool parallel) const {
|
|
|
|
|
Image image(output_image_width, output_image_height, 3);
|
|
|
|
|
if(parallel) {
|
|
|
|
|
#ifndef DO_NOT_USE_OPENMP
|
|
|
|
|
#pragma omp parallel for
|
|
|
|
|
#endif
|
|
|
|
|
for(int i = 0 ; i < output_image_height ; i++)
|
|
|
|
|
for(int j = 0 ; j < output_image_width ; j++)
|
|
|
|
|
image.SetPixel(i, j, ProjectToCameraPixel(camera, i, j));
|
|
|
|
|
} else {
|
|
|
|
|
for(int i = 0 ; i < output_image_height ; i++)
|
|
|
|
|
for(int j = 0 ; j < output_image_width ; j++)
|
|
|
|
|
image.SetPixel(i, j, ProjectToCameraPixel(camera, i, j));
|
|
|
|
|
}
|
|
|
|
|
return image;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<Image> Slicer360ToPerspective::ProjectToCameras(std::vector<Camera> const& cameras_, bool parallel) const {
|
|
|
|
|
std::vector<Image> images(cameras_.size());
|
|
|
|
|
if(parallel) {
|
|
|
|
|
#ifndef DO_NOT_USE_OPENMP
|
|
|
|
|
#pragma omp parallel for
|
|
|
|
|
#endif
|
|
|
|
|
for(unsigned int i = 0 ; i < cameras_.size(); i++)
|
|
|
|
|
images[i] = ProjectToCamera(cameras_[i], false);// Parallelization is already done at the camera level.
|
|
|
|
|
} else {
|
|
|
|
|
for(unsigned int i = 0 ; i < cameras_.size(); i++)
|
|
|
|
|
images[i] = ProjectToCamera(cameras_[i], false);// Parallelization is already done at the camera level.
|
|
|
|
|
}
|
|
|
|
|
return images;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<Image> Slicer360ToPerspective::ProjectToAllCameras(bool parallel) const {
|
|
|
|
|
return ProjectToCameras(cameras, parallel);
|
|
|
|
|
}
|