From 8f4a0b007e85474236e5d1942006316f006fcece Mon Sep 17 00:00:00 2001 From: Unknown Date: Thu, 25 Jul 2019 11:51:32 +0200 Subject: [PATCH 001/174] =?UTF-8?q?[software]=20New=20software=20for=20sti?= =?UTF-8?q?thing=20fisheye=20images=20in=20360=C2=B0=20panorama?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/software/utils/CMakeLists.txt | 12 + src/software/utils/main_fisheyeProjection.cpp | 400 ++++++++++++++++++ 2 files changed, 412 insertions(+) create mode 100644 src/software/utils/main_fisheyeProjection.cpp diff --git a/src/software/utils/CMakeLists.txt b/src/software/utils/CMakeLists.txt index 4c5d080c5f..d409635d51 100644 --- a/src/software/utils/CMakeLists.txt +++ b/src/software/utils/CMakeLists.txt @@ -173,4 +173,16 @@ alicevision_add_software(aliceVision_utils_split360Images ${Boost_LIBRARIES} ) + +# Project 180° fisheye images into equirectangular +alicevision_add_software(aliceVision_utils_fisheyeProjection + SOURCE main_fisheyeProjection.cpp + FOLDER ${FOLDER_SOFTWARE_UTILS} + LINKS aliceVision_system + aliceVision_numeric + aliceVision_image + ${OPENIMAGEIO_LIBRARIES} + ${Boost_LIBRARIES} +) + endif() # ALICEVISION_BUILD_SFM diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp new file mode 100644 index 0000000000..8aa821d649 --- /dev/null +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -0,0 +1,400 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2017 AliceVision contributors. +// Copyright (c) 2016 openMVG contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +// These constants define the current software version. +// They must be updated when the command line is changed. +#define ALICEVISION_SOFTWARE_VERSION_MAJOR 1 +#define ALICEVISION_SOFTWARE_VERSION_MINOR 0 + +using namespace aliceVision; + +namespace fs = boost::filesystem; +namespace po = boost::program_options; +namespace oiio = OIIO; + + +/** + * @brief Function to map 3D coordinates onto a 2D image according a spherical projection + */ +class SphericalMapping +{ +public: + + static Vec3 get3DPoint(const Vec2& pos2d, int width, int height) + { + const double x = pos2d(0) - width; + const double y = height/2.0 - pos2d(1); + + const double longitude = M_PI * 2.0 * x / width; // between -PI and PI + const double latitude = M_PI * y / height; // between -PI/2 and PI/2 + + const double Px = cos(latitude) * cos(longitude); + const double Py = cos(latitude) * sin(longitude); + const double Pz = sin(latitude); + + return Vec3(Px, Py, Pz); + } + + static Vec2 get2DCoordinates(const Vec3& ray, const int inSize) + { + const double Px = ray(0); + const double Py = ray(1); + const double Pz = ray(2); + +// float aperture = 2.0 * std::atan(36.0 / (8.0 * 2.0)); +// float aperture = 2.0 * asin(1.0 / (2.0 * fNumber)); + + float r = 2.0 * atan2(sqrt(Square(Px) + Square(Pz)), Py) / M_PI; + float theta = atan2(Pz, Px); + Vec2 v(r*cos(theta), r*sin(theta)); + return v * inSize/2; + } +}; + +float sigmoid(float x, float sigwidth, float sigMid) +{ + return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); +} + +void setFisheyeImage(image::Image& imageIn, const oiio::ParamValueList& metadata, oiio::ImageBuf& buffer, image::Image& imageAlpha) +{ + image::getBufferFromImage(imageIn, buffer); + buffer.specmod().extra_attribs = metadata; + bool correctOrientation = oiio::ImageBufAlgo::reorient(buffer, buffer); + + if(!correctOrientation) + { + ALICEVISION_LOG_ERROR("Can't set correct orientation of image"); + } + + std::size_t width = buffer.spec().width; + std::size_t height = buffer.spec().height; + + imageIn.resize(width, height, false); + buffer.get_pixels(buffer.roi(), buffer.spec().format, imageIn.data()); + + imageAlpha.resize(width, height, false); + + const float maxRadius = std::min(width, height) * 0.5 * 0.95; + const float blurWidth = maxRadius * 0.2; + const float blurMid = maxRadius * 0.95; + const Vec2i center(width/2, height/2); + + for(std::size_t x = 0; x < width; ++x) + { + for(std::size_t y = 0; y < height; ++y) + { + const image::RGBfColor& inPixel = imageIn(y, x); + image::RGBAfColor& alphaPixel = imageAlpha(y, x); + float dist = sqrt((x-center(0)) * (x-center(0)) + (y-center(1)) * (y-center(1))); + if(dist > maxRadius) + { + alphaPixel.a() = 0.f; + } + else + { + alphaPixel.r() = inPixel.r(); + alphaPixel.g() = inPixel.g(); + alphaPixel.b() = inPixel.b(); + alphaPixel.a() = sigmoid(dist, blurWidth, blurMid); + } + } + } +} + + +void fisheyeToEquirectangular(image::Image& imageIn, const int nbImages, int iter, const std::array, 3> rotations, image::Image& imageOut) +{ + std::size_t inWidth = imageIn.Width(); + std::size_t inHeight = imageIn.Height(); + std::size_t inSize = std::min(inWidth, inHeight); + + double xRotation = -5.0 + rotations[0].at(iter); + double zRotation = double(iter * -360.0 / nbImages) + rotations[2].at(iter); + double yRotation = double(-(180.0 + zRotation)/60.0) + rotations[1].at(iter); + + const image::Sampler2d sampler; + for(int j = 0; j < inSize; ++j) + { + for(int i = 0; i < 2 * inSize; ++i) + { + image::RGBAfColor pixel(0.f, 0.f, 0.f, 0.f); + + Vec3 ray = SphericalMapping::get3DPoint(Vec2(i,j), 2*inSize, inSize); + + ray = rotationXYZ(degreeToRadian(xRotation), degreeToRadian(yRotation), degreeToRadian(zRotation)) * ray; + const Vec2 x = SphericalMapping::get2DCoordinates(ray, inSize); + const Vec2 xFish(inWidth/2 - x(0), inHeight/2 - x(1)); + + pixel = sampler(imageIn, xFish(1), xFish(0)); + float alpha = pixel.a(); + + imageOut(j, i).r() = pixel.r()*alpha + imageOut(j, i).r()*(1.f-alpha); + imageOut(j, i).g() = pixel.g()*alpha + imageOut(j, i).g()*(1.f-alpha); + imageOut(j, i).b() = pixel.b()*alpha + imageOut(j, i).b()*(1.f-alpha); + imageOut(j, i).a() = pixel.a()*alpha + imageOut(j, i).a()*(1.f-alpha); + } + } +} + + +void stitchPanorama(const std::vector& imagePaths, const std::vector& metadatas, const std::array, 3> rotations, const std::string& outputFolder) +{ + int nbImages = imagePaths.size(); + image::Image imageOut; + oiio::ImageBuf bufferOut; + std::size_t inSize; + + for(int i=0; i imageIn; + image::Image imageAlpha; + + ALICEVISION_LOG_INFO("Converting " << imagePaths[i]); + + try + { + image::readImage(imagePaths[i], imageIn, image::EImageColorSpace::LINEAR); + } + catch(fs::filesystem_error& e) + { + ALICEVISION_LOG_ERROR("Can't open image file : " << imagePaths[i]); + } + + setFisheyeImage(imageIn, metadatas[i], buffer, imageAlpha); + + if(i == 0) + { + bufferOut = buffer; + inSize = std::min(bufferOut.spec().width, bufferOut.spec().height); + imageOut.resize(2*inSize, inSize, true, image::RGBAfColor(0.f, 0.f, 0.f, 0.f)); + } + + fisheyeToEquirectangular(imageAlpha, nbImages, i, rotations, imageOut); + } + + // save equirectangular image with fisheye's metadata + std::string outputPath; + if(outputFolder.back() == '/') + outputPath = std::string(outputFolder + "panorama.exr"); + else + outputPath = std::string(outputFolder + "/panorama.exr"); + image::writeImage(outputPath, imageOut, image::EImageColorSpace::AUTO, bufferOut.specmod().extra_attribs); + + ALICEVISION_LOG_INFO("Panorama successfully written as " << outputPath); +} + + +int main(int argc, char** argv) +{ + // command-line parameters + std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); + std::vector inputPath; // media file path list + std::string outputFolder; // output folder for panorama + std::vector xRotation; + std::vector yRotation; + std::vector zRotation; + + po::options_description allParams("This program is used to stitch multiple fisheye images into an equirectangular 360° panorama\n" + "AliceVision fisheyeProjection"); + + po::options_description requiredParams("Required parameters"); + requiredParams.add_options() + ("input,i", po::value>(&inputPath)->required()->multitoken(), + "List of fisheye images or a folder containing them.") + ("output,o", po::value(&outputFolder)->required(), + "Output folder to write equirectangular images"); + + po::options_description optionalParams("Optional parameters"); + optionalParams.add_options() + ("xRotation,x", po::value>(&xRotation)->multitoken(), + "Angles to rotate each image on axis x : horizontal axis on the panorama.") + ("yRotation,y", po::value>(&yRotation)->multitoken(), + "Angles to rotate each image on axis y : vertical axis on the panorama.") + ("zRotation,z", po::value>(&zRotation)->multitoken(), + "Angles to rotate each image on axis z : depth axis on the panorama."); + + po::options_description logParams("Log parameters"); + logParams.add_options() + ("verboseLevel,v", po::value(&verboseLevel)->default_value(verboseLevel), + "verbosity level (fatal, error, warning, info, debug, trace)."); + + allParams.add(requiredParams).add(optionalParams).add(logParams); + + po::variables_map vm; + try + { + po::store(po::parse_command_line(argc, argv, allParams), vm); + + if(vm.count("help") || (argc == 1)) + { + ALICEVISION_COUT(allParams); + return EXIT_SUCCESS; + } + po::notify(vm); + } + catch(boost::program_options::required_option& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + catch(boost::program_options::error& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + + ALICEVISION_COUT("Program called with the following parameters:"); + ALICEVISION_COUT(vm); + + // set verbose level + system::Logger::get()->setLogLevel(verboseLevel); + + // check output folder and update to its absolute path + { + const fs::path outDir = fs::absolute(outputFolder); + outputFolder = outDir.string(); + if(!fs::is_directory(outDir)) + { + ALICEVISION_LOG_ERROR("Can't find folder " << outputFolder); + return EXIT_FAILURE; + } + } + + std::vector imagePaths; + std::vector times; + std::vector metadatas; + + const boost::regex dateTimeExp( + // Syntax of Exif:DateTimeOriginal tag + "(?[\\d\\:]*) " // YYYY:MM:DD date separated with time with a space + "(?\\d*):" // HH: hour + "(?\\d*):" // MM: minutes + "(?\\d*)" // SS: seconds + ); + + for(const std::string& entry: inputPath) + { + const fs::path path = fs::absolute(entry); + if(fs::exists(path) && fs::is_directory(path)) + { + for(fs::directory_entry& file : boost::make_iterator_range(fs::directory_iterator(path), {})) + { + if(fs::is_regular_file(file.status())) + { + imagePaths.push_back(file.path().string()); + + oiio::ParamValueList metadata = image::readImageMetadata(file.path().string()); + metadatas.push_back(metadata); + std::string dateTime; + dateTime = metadata.get_string("Exif:DateTimeOriginal"); + + if(dateTime.empty()) + { + ALICEVISION_LOG_ERROR("Can't sort images : no time metadata in " << file.path().string()); + return EXIT_FAILURE; + } + + // convert time from string to float to sort images by time for future stitching + boost::smatch what; + if(boost::regex_search(dateTime, what, dateTimeExp)) + { + times.push_back(24.0 * std::stof(what["h"]) + 60.0 * std::stof(what["m"]) + std::stof(what["s"])); + } + } + } + } + + else if(fs::exists(path) && fs::is_regular_file(fs::path(entry))) + { + imagePaths.push_back(path.string()); + + oiio::ParamValueList metadata = image::readImageMetadata(entry); + metadatas.push_back(metadata); + std::string dateTime; + dateTime = metadata.get_string("Exif:DateTimeOriginal"); + if(dateTime.empty()) + { + ALICEVISION_LOG_ERROR("Can't sort images : no time metadata in " << entry); + return EXIT_FAILURE; + } + + // convert time from string to float to sort images by time for future stitching + boost::smatch what; + if(boost::regex_search(dateTime, what, dateTimeExp)) + { + times.push_back(24.0 * std::stof(what["h"]) + 60.0 * std::stof(what["m"]) + std::stof(what["s"])); + } + } + + else + { + ALICEVISION_LOG_ERROR("Can't find file or folder " << inputPath); + return EXIT_FAILURE; + } + } + + int nbImages = imagePaths.size(); + std::vector times_sorted = times; + std::vector imagePaths_sorted; + std::vector metadatas_sorted; + xRotation.resize(nbImages, 0.0); + yRotation.resize(nbImages, 0.0); + zRotation.resize(nbImages, 0.0); + const std::array, 3> rotations = {xRotation, yRotation, zRotation}; + + // sort images according to their metadata "DateTime" + std::sort(times_sorted.begin(), times_sorted.end()); + for(int i=0; i::iterator it = std::find(times.begin(), times.end(), times_sorted[i]); + if(it != times.end()) + { + std::size_t index = std::distance(times.begin(), it); + imagePaths_sorted.push_back(imagePaths.at(index)); + metadatas_sorted.push_back(metadatas.at(index)); + } + else + { + ALICEVISION_LOG_ERROR("sorting failed"); + return EXIT_FAILURE; + } + } + + ALICEVISION_LOG_INFO(nbImages << " file paths found."); + + stitchPanorama(imagePaths_sorted, metadatas_sorted, rotations, outputFolder); + + return EXIT_SUCCESS; +} From 737ed205edcf25567a78c7f95fab14273708ab9a Mon Sep 17 00:00:00 2001 From: Unknown Date: Thu, 25 Jul 2019 11:54:12 +0200 Subject: [PATCH 002/174] [cmake] add ILMBASE_INCLUDE_PATH to FindOpenImageIO --- src/cmake/FindOpenImageIO.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cmake/FindOpenImageIO.cmake b/src/cmake/FindOpenImageIO.cmake index 0dd3ca775f..d8b2a8ce60 100644 --- a/src/cmake/FindOpenImageIO.cmake +++ b/src/cmake/FindOpenImageIO.cmake @@ -134,7 +134,7 @@ endif (OPENIMAGEIO_LIBRARY AND # Set standard CMake FindPackage variables if found. if (OPENIMAGEIO_FOUND) - set(OPENIMAGEIO_INCLUDE_DIRS ${OPENIMAGEIO_INCLUDE_DIR} ${OPENEXR_INCLUDE_DIR}) + set(OPENIMAGEIO_INCLUDE_DIRS ${OPENIMAGEIO_INCLUDE_DIR} ${OPENEXR_INCLUDE_DIR} ${ILMBASE_INCLUDE_PATH}) set(OPENIMAGEIO_LIBRARIES ${OPENIMAGEIO_LIBRARY} ${ILMBASE_LIBRARIES}) endif (OPENIMAGEIO_FOUND) From d21c101c431ea937736b6ec99a5b1c4a56b312b0 Mon Sep 17 00:00:00 2001 From: Unknown Date: Thu, 25 Jul 2019 11:57:58 +0200 Subject: [PATCH 003/174] [image] add instantiation for float RGBA pixel type --- src/aliceVision/image/io.cpp | 15 +++++++++++++++ src/aliceVision/image/io.hpp | 3 +++ src/aliceVision/image/pixelTypes.hpp | 4 ++++ 3 files changed, 22 insertions(+) diff --git a/src/aliceVision/image/io.cpp b/src/aliceVision/image/io.cpp index 6688261f9f..b2efafc39b 100644 --- a/src/aliceVision/image/io.cpp +++ b/src/aliceVision/image/io.cpp @@ -135,6 +135,11 @@ void getBufferFromImage(Image& image, oiio::ImageBuf& buffer) getBufferFromImage(image, oiio::TypeDesc::UINT8, 1, buffer); } +void getBufferFromImage(Image& image, oiio::ImageBuf& buffer) +{ + getBufferFromImage(image, oiio::TypeDesc::FLOAT, 4, buffer); +} + void getBufferFromImage(Image& image, oiio::ImageBuf& buffer) { getBufferFromImage(image, oiio::TypeDesc::UINT8, 4, buffer); @@ -338,6 +343,11 @@ void readImage(const std::string& path, Image& image, EImageColor readImage(path, oiio::TypeDesc::UINT8, 1, image, imageColorSpace); } +void readImage(const std::string& path, Image& image, EImageColorSpace imageColorSpace) +{ + readImage(path, oiio::TypeDesc::FLOAT, 4, image, imageColorSpace); +} + void readImage(const std::string& path, Image& image, EImageColorSpace imageColorSpace) { readImage(path, oiio::TypeDesc::UINT8, 4, image, imageColorSpace); @@ -358,6 +368,11 @@ void writeImage(const std::string& path, const Image& image, EIma writeImage(path, oiio::TypeDesc::UINT8, 1, image, imageColorSpace, metadata); } +void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata) +{ + writeImage(path, oiio::TypeDesc::FLOAT, 4, image, imageColorSpace, metadata); +} + void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata) { writeImage(path, oiio::TypeDesc::UINT8, 4, image, imageColorSpace, metadata); diff --git a/src/aliceVision/image/io.hpp b/src/aliceVision/image/io.hpp index e394ce0a54..bdfaf9f8da 100644 --- a/src/aliceVision/image/io.hpp +++ b/src/aliceVision/image/io.hpp @@ -108,6 +108,7 @@ void readImageMetadata(const std::string& path, int& width, int& height, std::ma */ void getBufferFromImage(Image& image, oiio::ImageBuf& buffer); void getBufferFromImage(Image& image, oiio::ImageBuf& buffer); +void getBufferFromImage(Image& image, oiio::ImageBuf& buffer); void getBufferFromImage(Image& image, oiio::ImageBuf& buffer); void getBufferFromImage(Image& image, oiio::ImageBuf& buffer); void getBufferFromImage(Image& image, oiio::ImageBuf& buffer); @@ -120,6 +121,7 @@ void getBufferFromImage(Image& image, oiio::ImageBuf& buffer); */ void readImage(const std::string& path, Image& image, EImageColorSpace imageColorSpace); void readImage(const std::string& path, Image& image, EImageColorSpace imageColorSpace); +void readImage(const std::string& path, Image& image, EImageColorSpace imageColorSpace); void readImage(const std::string& path, Image& image, EImageColorSpace imageColorSpace); void readImage(const std::string& path, Image& image, EImageColorSpace imageColorSpace); void readImage(const std::string& path, Image& image, EImageColorSpace imageColorSpace); @@ -130,6 +132,7 @@ void readImage(const std::string& path, Image& image, EImageColorSpace * @param[in] image The output image buffer */ void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata = oiio::ParamValueList()); +void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata = oiio::ParamValueList()); void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata = oiio::ParamValueList()); void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata = oiio::ParamValueList()); void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata = oiio::ParamValueList()); diff --git a/src/aliceVision/image/pixelTypes.hpp b/src/aliceVision/image/pixelTypes.hpp index 4b2850d2dc..2f89783f8b 100644 --- a/src/aliceVision/image/pixelTypes.hpp +++ b/src/aliceVision/image/pixelTypes.hpp @@ -362,7 +362,11 @@ namespace aliceVision T( ( Z )( *this )( 3 ) * val ) ); } }; + + /// Instantiation for unsigned char color component typedef Rgba RGBAColor; + /// Instantiation for float color component + typedef Rgba RGBAfColor; const RGBColor WHITE( 255, 255, 255 ); const RGBColor BLACK( 0, 0, 0 ); From 5077007b7e810f968421e79e4760b1459f888e63 Mon Sep 17 00:00:00 2001 From: Unknown Date: Thu, 25 Jul 2019 12:02:07 +0200 Subject: [PATCH 004/174] [image] add RGBA default constructor with all channels set to zero --- src/aliceVision/image/pixelTypes.hpp | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/src/aliceVision/image/pixelTypes.hpp b/src/aliceVision/image/pixelTypes.hpp index 2f89783f8b..06f6a46b78 100644 --- a/src/aliceVision/image/pixelTypes.hpp +++ b/src/aliceVision/image/pixelTypes.hpp @@ -199,10 +199,9 @@ namespace aliceVision * @param blue component value * @param alpha component value */ - inline Rgba( const T red, const T green, const T blue, const T alpha = static_cast( 1 ) ) + inline Rgba( const T red, const T green, const T blue, const T alpha = T(1) ) : Base( red, green, blue, alpha ) { - } /** @@ -212,29 +211,38 @@ namespace aliceVision explicit inline Rgba( const Base& val ) : Base( val ) { - } /** - * @brief RGB constructor with default alpha value + * @brief RGBA constructor with default alpha value to 1 * @param val Value to set in each RGB component * @note This is equivalent to RGBA( val , val , val , 1 ) */ - explicit inline Rgba( const T val = 0 ) - : Base( val, val, val, static_cast( 1 ) ) + explicit inline Rgba( const T val ) + : Base( val, val, val, T(1) ) { + } + /** + * @brief Default RGBA constructor set all channels to zero (including the alpha channel) + * @warning The alpha channel is initialized to 0. + * It is used in generic/templated code like "sampler" + * which creates an empty color and accumulate color contributions. + */ + explicit inline Rgba() + : Base( T(0), T(0), T(0), T(0) ) + { } /** * @brief Copy constructor * @param val Source RGBA value */ - inline Rgba( const RGBColor & val ) - : Base( val.r(), val.g(), val.b(), static_cast( 1 ) ) + inline Rgba( const RGBColor & val, const T alpha = 1 ) + : Base( val.r(), val.g(), val.b(), alpha ) { - } + //-- construction method //------------------------------ From bdd2b910b8c1de3a80dc971f9c84757d917449b8 Mon Sep 17 00:00:00 2001 From: Unknown Date: Thu, 25 Jul 2019 12:05:51 +0200 Subject: [PATCH 005/174] [image][Sampler] bug fix : call to default constructor All channels need to be set to zero including alpha for RGBA colors --- src/aliceVision/image/Sampler.hpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/aliceVision/image/Sampler.hpp b/src/aliceVision/image/Sampler.hpp index ef0be3204a..4bef6feae8 100644 --- a/src/aliceVision/image/Sampler.hpp +++ b/src/aliceVision/image/Sampler.hpp @@ -430,7 +430,8 @@ struct Sampler2d _sampler( dx , coefs_x ) ; _sampler( dy , coefs_y ) ; - typename RealPixel::real_type res(0) ; + // Default color constructor init all channels to zero + typename RealPixel::real_type res; // integer position of sample (x,y) const int grid_x = static_cast( floor( x ) ); From c7e842c354d579ed07b6a94a46d53a300c55af0c Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Fri, 26 Jul 2019 16:13:02 +0200 Subject: [PATCH 006/174] [software] add doxygen doc --- src/software/utils/main_fisheyeProjection.cpp | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index 8aa821d649..d8e15f5255 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -79,11 +79,20 @@ class SphericalMapping } }; + float sigmoid(float x, float sigwidth, float sigMid) { return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); } + +/** + * @brief Function to set fisheye images with correct orientation and add an alpha channel + * @param[in] imageIn - input RGBf fisheye image + * @param[in] metadata - contains orientation information + * @param[out] buffer - to store input metadata in output image + * @param[out] imageAlpha - output RGBAf fisheye image correctly oriented + */ void setFisheyeImage(image::Image& imageIn, const oiio::ParamValueList& metadata, oiio::ImageBuf& buffer, image::Image& imageAlpha) { image::getBufferFromImage(imageIn, buffer); @@ -131,6 +140,14 @@ void setFisheyeImage(image::Image& imageIn, const oiio::ParamV } +/** + * @brief Project fisheye image into an equirectangular map and merge it to output panorama + * @param[in] imageIn - input RGBAf fisheye image + * @param[in] nbImages + * @param[in] iter - index of input image for merging into panorama + * @param[in] rotations - contains adjustment rotations on each image set by user + * @param[out] imageOut - output panorama which is incremented at each call + */ void fisheyeToEquirectangular(image::Image& imageIn, const int nbImages, int iter, const std::array, 3> rotations, image::Image& imageOut) { std::size_t inWidth = imageIn.Width(); @@ -167,6 +184,13 @@ void fisheyeToEquirectangular(image::Image& imageIn, const in void stitchPanorama(const std::vector& imagePaths, const std::vector& metadatas, const std::array, 3> rotations, const std::string& outputFolder) +/** + * @brief Load input images and call functions to stitch 360° panorama + * @param[in] imagePaths - input images paths + * @param[in] metadatas - input metadata for each image + * @param[in] rotations - contains adjustment rotations on each image set by user + * @param[out] outputFolder - output folder path to write panorama + */ { int nbImages = imagePaths.size(); image::Image imageOut; From 4f733fa410f19eda4ab993359eb262e15041f948 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Fri, 26 Jul 2019 16:13:54 +0200 Subject: [PATCH 007/174] [software] modify default rotation angles --- src/software/utils/main_fisheyeProjection.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index d8e15f5255..2960f83f3a 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -154,9 +154,9 @@ void fisheyeToEquirectangular(image::Image& imageIn, const in std::size_t inHeight = imageIn.Height(); std::size_t inSize = std::min(inWidth, inHeight); - double xRotation = -5.0 + rotations[0].at(iter); - double zRotation = double(iter * -360.0 / nbImages) + rotations[2].at(iter); - double yRotation = double(-(180.0 + zRotation)/60.0) + rotations[1].at(iter); + double zRotation = double(30.0 - iter * 360.0 / nbImages) + rotations[2].at(iter); + double xRotation = double(-abs(90.0 - abs(zRotation))/30.0) + rotations[0].at(iter); + double yRotation = rotations[1].at(iter); const image::Sampler2d sampler; for(int j = 0; j < inSize; ++j) From b4c5454b722eb1923c3da6c70e7401ccb66b01c2 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Fri, 26 Jul 2019 16:14:55 +0200 Subject: [PATCH 008/174] [software] add option to give complete output path insted of folder --- src/software/utils/main_fisheyeProjection.cpp | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index 2960f83f3a..43ec2e7d43 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -183,7 +183,6 @@ void fisheyeToEquirectangular(image::Image& imageIn, const in } -void stitchPanorama(const std::vector& imagePaths, const std::vector& metadatas, const std::array, 3> rotations, const std::string& outputFolder) /** * @brief Load input images and call functions to stitch 360° panorama * @param[in] imagePaths - input images paths @@ -191,6 +190,7 @@ void stitchPanorama(const std::vector& imagePaths, const std::vecto * @param[in] rotations - contains adjustment rotations on each image set by user * @param[out] outputFolder - output folder path to write panorama */ +void stitchPanorama(const std::vector& imagePaths, const std::vector& metadatas, const std::array, 3> rotations, std::string& outputPath) { int nbImages = imagePaths.size(); image::Image imageOut; @@ -227,11 +227,13 @@ void stitchPanorama(const std::vector& imagePaths, const std::vecto } // save equirectangular image with fisheye's metadata - std::string outputPath; - if(outputFolder.back() == '/') - outputPath = std::string(outputFolder + "panorama.exr"); - else - outputPath = std::string(outputFolder + "/panorama.exr"); + if(fs::is_directory(fs::path(outputPath))) + { + if(outputPath.back() == '/') + outputPath = std::string(outputPath + "panorama.exr"); + else + outputPath = std::string(outputPath + "/panorama.exr"); + } image::writeImage(outputPath, imageOut, image::EImageColorSpace::AUTO, bufferOut.specmod().extra_attribs); ALICEVISION_LOG_INFO("Panorama successfully written as " << outputPath); @@ -256,7 +258,7 @@ int main(int argc, char** argv) ("input,i", po::value>(&inputPath)->required()->multitoken(), "List of fisheye images or a folder containing them.") ("output,o", po::value(&outputFolder)->required(), - "Output folder to write equirectangular images"); + "Output panorama folder or complete path"); po::options_description optionalParams("Optional parameters"); optionalParams.add_options() @@ -309,11 +311,6 @@ int main(int argc, char** argv) { const fs::path outDir = fs::absolute(outputFolder); outputFolder = outDir.string(); - if(!fs::is_directory(outDir)) - { - ALICEVISION_LOG_ERROR("Can't find folder " << outputFolder); - return EXIT_FAILURE; - } } std::vector imagePaths; From 6a02f93e3a39d635783389e064a016c2283202dd Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Wed, 7 Aug 2019 18:40:16 +0200 Subject: [PATCH 009/174] [software] stitching: move buffer initialization --- src/software/utils/main_fisheyeProjection.cpp | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index 43ec2e7d43..4298df45a4 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -93,10 +93,8 @@ float sigmoid(float x, float sigwidth, float sigMid) * @param[out] buffer - to store input metadata in output image * @param[out] imageAlpha - output RGBAf fisheye image correctly oriented */ -void setFisheyeImage(image::Image& imageIn, const oiio::ParamValueList& metadata, oiio::ImageBuf& buffer, image::Image& imageAlpha) +void setFisheyeImage(image::Image& imageIn, oiio::ImageBuf& buffer, image::Image& imageAlpha) { - image::getBufferFromImage(imageIn, buffer); - buffer.specmod().extra_attribs = metadata; bool correctOrientation = oiio::ImageBufAlgo::reorient(buffer, buffer); if(!correctOrientation) @@ -214,7 +212,7 @@ void stitchPanorama(const std::vector& imagePaths, const std::vecto ALICEVISION_LOG_ERROR("Can't open image file : " << imagePaths[i]); } - setFisheyeImage(imageIn, metadatas[i], buffer, imageAlpha); + setFisheyeImage(imageIn, buffer, imageAlpha); if(i == 0) { From e5776706f3f61744d142bdaca25a3d45441f84cf Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Wed, 7 Aug 2019 18:42:16 +0200 Subject: [PATCH 010/174] [software] stitching: bug fix on buffer copy --- src/software/utils/main_fisheyeProjection.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index 4298df45a4..8264313b23 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -192,12 +192,13 @@ void stitchPanorama(const std::vector& imagePaths, const std::vecto { int nbImages = imagePaths.size(); image::Image imageOut; - oiio::ImageBuf bufferOut; + std::vector buffers(nbImages); + oiio::ImageBuf& bufferOut = buffers[0]; std::size_t inSize; for(int i=0; i imageIn; image::Image imageAlpha; @@ -212,11 +213,13 @@ void stitchPanorama(const std::vector& imagePaths, const std::vecto ALICEVISION_LOG_ERROR("Can't open image file : " << imagePaths[i]); } + image::getBufferFromImage(imageIn, buffer); + buffer.specmod().extra_attribs = metadatas[i]; + setFisheyeImage(imageIn, buffer, imageAlpha); if(i == 0) { - bufferOut = buffer; inSize = std::min(bufferOut.spec().width, bufferOut.spec().height); imageOut.resize(2*inSize, inSize, true, image::RGBAfColor(0.f, 0.f, 0.f, 0.f)); } From 0d76793decf7873bd897a3850448a1c6f2b518d3 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Wed, 7 Aug 2019 18:44:14 +0200 Subject: [PATCH 011/174] [software] stitching: change log info description --- src/software/utils/main_fisheyeProjection.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index 8264313b23..f7c365db14 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -202,7 +202,7 @@ void stitchPanorama(const std::vector& imagePaths, const std::vecto image::Image imageIn; image::Image imageAlpha; - ALICEVISION_LOG_INFO("Converting " << imagePaths[i]); + ALICEVISION_LOG_INFO("Projecting " << imagePaths[i] << " into equirectangular space"); try { From 4e49836d64a0cb54be2a6400ea9801d540bb9196 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Thu, 8 Aug 2019 17:27:10 +0200 Subject: [PATCH 012/174] [imageIO] add isSuported function to check input image extension --- src/aliceVision/image/io.cpp | 21 +++++++++++++++++++++ src/aliceVision/image/io.hpp | 8 ++++++++ 2 files changed, 29 insertions(+) diff --git a/src/aliceVision/image/io.cpp b/src/aliceVision/image/io.cpp index 45bd1140b2..caf2f8067f 100644 --- a/src/aliceVision/image/io.cpp +++ b/src/aliceVision/image/io.cpp @@ -74,6 +74,27 @@ std::istream& operator>>(std::istream& in, EImageFileType& imageFileType) return in; } +bool isSupported(const std::string &ext) +{ + static const std::string extensionList = oiio::get_string_attribute("extension_list"); + std::vector supportedExtensions; + + std::vector supportedFormat; + boost::split(supportedFormat, extensionList, boost::is_any_of(";"), boost::token_compress_on); + for(const std::string& format: supportedFormat) + { + std::vector extensions; + std::string str = format.substr(format.find(":")+1); + boost::split(extensions, str, boost::is_any_of(","), boost::token_compress_on); + for(std::string& extension: extensions) + supportedExtensions.push_back(extension.insert(0, ".")); + } + + const auto start = supportedExtensions.begin(); + const auto end = supportedExtensions.end(); + return (std::find(start, end, boost::to_lower_copy(ext)) != end); +} + // Warning: type conversion problems from string to param value, we may lose some metadata with string maps oiio::ParamValueList getMetadataFromMap(const std::map& metadataMap) { diff --git a/src/aliceVision/image/io.hpp b/src/aliceVision/image/io.hpp index aea55d0514..2ff4b370d8 100644 --- a/src/aliceVision/image/io.hpp +++ b/src/aliceVision/image/io.hpp @@ -13,6 +13,7 @@ #include #include +#include #include namespace oiio = OIIO; @@ -78,6 +79,13 @@ std::ostream& operator<<(std::ostream& os, EImageFileType imageFileType); */ std::istream& operator>>(std::istream& in, EImageFileType& imageFileType); +/** + * @brief Check if input image extension is supported by openImageIO ie exists in extension_list from imageio.h + * @param[in] ext - image extension + * @return true if valid extension + */ +bool isSupported(const std::string &ext); + /** * @brief convert a metadata string map into an oiio::ParamValueList * @param[in] metadataMap string map From 0c6925511a47e8f9613718f04b8f9b3eaf6b2772 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Thu, 8 Aug 2019 17:36:41 +0200 Subject: [PATCH 013/174] [software] stitchin: add log error message --- src/software/utils/main_fisheyeProjection.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index f7c365db14..ba07902ed8 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -387,6 +387,12 @@ int main(int argc, char** argv) } } + if(imagePaths.empty()) + { + ALICEVISION_LOG_ERROR("No valid image file found in input folder or paths"); + return EXIT_FAILURE; + } + int nbImages = imagePaths.size(); std::vector times_sorted = times; std::vector imagePaths_sorted; From 363a66480f1df8c0f6a9ccc28e5fd330cfcf9750 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Thu, 8 Aug 2019 18:06:47 +0200 Subject: [PATCH 014/174] [software] stitching: get only valid image files from input folder --- src/software/utils/main_fisheyeProjection.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index ba07902ed8..7172328cbb 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -333,7 +333,7 @@ int main(int argc, char** argv) { for(fs::directory_entry& file : boost::make_iterator_range(fs::directory_iterator(path), {})) { - if(fs::is_regular_file(file.status())) + if(image::isSupported(file.path().extension().string())) { imagePaths.push_back(file.path().string()); @@ -358,7 +358,7 @@ int main(int argc, char** argv) } } - else if(fs::exists(path) && fs::is_regular_file(fs::path(entry))) + else if(fs::exists(path) && image::isSupported(path.extension().string())) { imagePaths.push_back(path.string()); From 670b54dcafc9bed0d6e0fa12e8394d90135ac599 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Thu, 8 Aug 2019 18:08:22 +0200 Subject: [PATCH 015/174] [software] stitching: add parameter to adjust blur width in alpha mask --- src/software/utils/main_fisheyeProjection.cpp | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index 7172328cbb..28fe93c88b 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -93,7 +93,7 @@ float sigmoid(float x, float sigwidth, float sigMid) * @param[out] buffer - to store input metadata in output image * @param[out] imageAlpha - output RGBAf fisheye image correctly oriented */ -void setFisheyeImage(image::Image& imageIn, oiio::ImageBuf& buffer, image::Image& imageAlpha) +void setFisheyeImage(image::Image& imageIn, const float blurWidth_param, oiio::ImageBuf& buffer, image::Image& imageAlpha) { bool correctOrientation = oiio::ImageBufAlgo::reorient(buffer, buffer); @@ -110,9 +110,9 @@ void setFisheyeImage(image::Image& imageIn, oiio::ImageBuf& bu imageAlpha.resize(width, height, false); - const float maxRadius = std::min(width, height) * 0.5 * 0.95; - const float blurWidth = maxRadius * 0.2; - const float blurMid = maxRadius * 0.95; + const float maxRadius = std::min(width, height) * 0.5; + const float blurWidth = maxRadius * blurWidth_param; + const float blurMid = maxRadius - blurWidth/2.f; const Vec2i center(width/2, height/2); for(std::size_t x = 0; x < width; ++x) @@ -188,7 +188,7 @@ void fisheyeToEquirectangular(image::Image& imageIn, const in * @param[in] rotations - contains adjustment rotations on each image set by user * @param[out] outputFolder - output folder path to write panorama */ -void stitchPanorama(const std::vector& imagePaths, const std::vector& metadatas, const std::array, 3> rotations, std::string& outputPath) +void stitchPanorama(const std::vector& imagePaths, const std::vector& metadatas, const float blurWidth, const std::array, 3> rotations, std::string& outputPath) { int nbImages = imagePaths.size(); image::Image imageOut; @@ -216,7 +216,7 @@ void stitchPanorama(const std::vector& imagePaths, const std::vecto image::getBufferFromImage(imageIn, buffer); buffer.specmod().extra_attribs = metadatas[i]; - setFisheyeImage(imageIn, buffer, imageAlpha); + setFisheyeImage(imageIn, blurWidth, buffer, imageAlpha); if(i == 0) { @@ -247,6 +247,7 @@ int main(int argc, char** argv) std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); std::vector inputPath; // media file path list std::string outputFolder; // output folder for panorama + float blurWidth = 0.2f; std::vector xRotation; std::vector yRotation; std::vector zRotation; @@ -263,6 +264,8 @@ int main(int argc, char** argv) po::options_description optionalParams("Optional parameters"); optionalParams.add_options() + ("blurWidth,b", po::value(&blurWidth)->default_value(blurWidth), + "Blur width of alpha channel for all fisheye (between 0 and 1), determine the transitions sharpness.") ("xRotation,x", po::value>(&xRotation)->multitoken(), "Angles to rotate each image on axis x : horizontal axis on the panorama.") ("yRotation,y", po::value>(&yRotation)->multitoken(), @@ -422,7 +425,7 @@ int main(int argc, char** argv) ALICEVISION_LOG_INFO(nbImages << " file paths found."); - stitchPanorama(imagePaths_sorted, metadatas_sorted, rotations, outputFolder); + stitchPanorama(imagePaths_sorted, metadatas_sorted, blurWidth, rotations, outputFolder); return EXIT_SUCCESS; } From 9cf0d1d29c1a6d0de169b7f4db3ec2e9375cd90a Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Fri, 9 Aug 2019 18:15:17 +0200 Subject: [PATCH 016/174] [image] pixelTypes: replace all typedef by using --- src/aliceVision/image/pixelTypes.hpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/aliceVision/image/pixelTypes.hpp b/src/aliceVision/image/pixelTypes.hpp index 06f6a46b78..12f9767328 100644 --- a/src/aliceVision/image/pixelTypes.hpp +++ b/src/aliceVision/image/pixelTypes.hpp @@ -21,8 +21,8 @@ namespace aliceVision template class Rgb : public Eigen::Matrix { - typedef Eigen::Matrix Base; - typedef T TBase; + using Base = Eigen::Matrix; + using TBase = T; public: //------------------------------ @@ -177,9 +177,9 @@ namespace aliceVision }; /// Instantiation for unsigned char color component - typedef Rgb RGBColor; + using RGBColor = Rgb; /// Instantiation for float color component - typedef Rgb RGBfColor; + using RGBfColor = Rgb; /** * @brief RGBA templated pixel type @@ -187,7 +187,7 @@ namespace aliceVision template class Rgba : public Eigen::Matrix { - typedef Eigen::Matrix Base; + using Base = Eigen::Matrix; public: //------------------------------ @@ -372,9 +372,9 @@ namespace aliceVision }; /// Instantiation for unsigned char color component - typedef Rgba RGBAColor; + using RGBAColor = Rgba; /// Instantiation for float color component - typedef Rgba RGBAfColor; + using RGBAfColor = Rgba; const RGBColor WHITE( 255, 255, 255 ); const RGBColor BLACK( 0, 0, 0 ); From 25cffd450b103bf880b1a9c4bf6607deea142197 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Fri, 9 Aug 2019 18:17:05 +0200 Subject: [PATCH 017/174] [imageIO] add const --- src/aliceVision/image/io.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aliceVision/image/io.cpp b/src/aliceVision/image/io.cpp index caf2f8067f..00a1d8b700 100644 --- a/src/aliceVision/image/io.cpp +++ b/src/aliceVision/image/io.cpp @@ -84,7 +84,7 @@ bool isSupported(const std::string &ext) for(const std::string& format: supportedFormat) { std::vector extensions; - std::string str = format.substr(format.find(":")+1); + const std::string str = format.substr(format.find(":")+1); boost::split(extensions, str, boost::is_any_of(","), boost::token_compress_on); for(std::string& extension: extensions) supportedExtensions.push_back(extension.insert(0, ".")); From b2aec2240b467874bd88f1eb86b6535229eea1e0 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Fri, 9 Aug 2019 18:20:53 +0200 Subject: [PATCH 018/174] [software] Stitching: add or remove const and clean code --- src/software/utils/main_fisheyeProjection.cpp | 53 +++++++------------ 1 file changed, 20 insertions(+), 33 deletions(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index 28fe93c88b..97f4d1887e 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -28,6 +28,8 @@ #include #include #include +#include + // These constants define the current software version. // They must be updated when the command line is changed. @@ -39,6 +41,7 @@ using namespace aliceVision; namespace fs = boost::filesystem; namespace po = boost::program_options; namespace oiio = OIIO; +namespace bmf = boost::math::float_constants; /** @@ -93,7 +96,7 @@ float sigmoid(float x, float sigwidth, float sigMid) * @param[out] buffer - to store input metadata in output image * @param[out] imageAlpha - output RGBAf fisheye image correctly oriented */ -void setFisheyeImage(image::Image& imageIn, const float blurWidth_param, oiio::ImageBuf& buffer, image::Image& imageAlpha) +void setFisheyeImage(image::Image& imageIn, float blurWidth_param, oiio::ImageBuf& buffer, image::Image& imageAlpha) { bool correctOrientation = oiio::ImageBufAlgo::reorient(buffer, buffer); @@ -110,10 +113,10 @@ void setFisheyeImage(image::Image& imageIn, const float blurWi imageAlpha.resize(width, height, false); - const float maxRadius = std::min(width, height) * 0.5; + const float maxRadius = std::min(width, height) * 0.5f; const float blurWidth = maxRadius * blurWidth_param; const float blurMid = maxRadius - blurWidth/2.f; - const Vec2i center(width/2, height/2); + const Vec2i center(width/2.f, height/2.f); for(std::size_t x = 0; x < width; ++x) { @@ -121,7 +124,7 @@ void setFisheyeImage(image::Image& imageIn, const float blurWi { const image::RGBfColor& inPixel = imageIn(y, x); image::RGBAfColor& alphaPixel = imageAlpha(y, x); - float dist = sqrt((x-center(0)) * (x-center(0)) + (y-center(1)) * (y-center(1))); + const float dist = std::sqrt((x-center(0)) * (x-center(0)) + (y-center(1)) * (y-center(1))); if(dist > maxRadius) { alphaPixel.a() = 0.f; @@ -146,30 +149,28 @@ void setFisheyeImage(image::Image& imageIn, const float blurWi * @param[in] rotations - contains adjustment rotations on each image set by user * @param[out] imageOut - output panorama which is incremented at each call */ -void fisheyeToEquirectangular(image::Image& imageIn, const int nbImages, int iter, const std::array, 3> rotations, image::Image& imageOut) +void fisheyeToEquirectangular(image::Image& imageIn, int nbImages, int iter, const std::array, 3>& rotations, image::Image& imageOut) { std::size_t inWidth = imageIn.Width(); std::size_t inHeight = imageIn.Height(); std::size_t inSize = std::min(inWidth, inHeight); - double zRotation = double(30.0 - iter * 360.0 / nbImages) + rotations[2].at(iter); - double xRotation = double(-abs(90.0 - abs(zRotation))/30.0) + rotations[0].at(iter); - double yRotation = rotations[1].at(iter); + const double zRotation = double(30.0 - iter * 360.0 / nbImages) + rotations[2].at(iter); + const double xRotation = double(-abs(90.0 - abs(zRotation))/30.0) + rotations[0].at(iter); + const double yRotation = rotations[1].at(iter); const image::Sampler2d sampler; for(int j = 0; j < inSize; ++j) { for(int i = 0; i < 2 * inSize; ++i) { - image::RGBAfColor pixel(0.f, 0.f, 0.f, 0.f); - Vec3 ray = SphericalMapping::get3DPoint(Vec2(i,j), 2*inSize, inSize); ray = rotationXYZ(degreeToRadian(xRotation), degreeToRadian(yRotation), degreeToRadian(zRotation)) * ray; const Vec2 x = SphericalMapping::get2DCoordinates(ray, inSize); const Vec2 xFish(inWidth/2 - x(0), inHeight/2 - x(1)); - pixel = sampler(imageIn, xFish(1), xFish(0)); + const image::RGBAfColor& pixel = sampler(imageIn, xFish(1), xFish(0)); float alpha = pixel.a(); imageOut(j, i).r() = pixel.r()*alpha + imageOut(j, i).r()*(1.f-alpha); @@ -188,7 +189,7 @@ void fisheyeToEquirectangular(image::Image& imageIn, const in * @param[in] rotations - contains adjustment rotations on each image set by user * @param[out] outputFolder - output folder path to write panorama */ -void stitchPanorama(const std::vector& imagePaths, const std::vector& metadatas, const float blurWidth, const std::array, 3> rotations, std::string& outputPath) +void stitchPanorama(const std::vector& imagePaths, const std::vector& metadatas, float blurWidth, const std::array, 3>& rotations, std::string& outputPath) { int nbImages = imagePaths.size(); image::Image imageOut; @@ -204,15 +205,7 @@ void stitchPanorama(const std::vector& imagePaths, const std::vecto ALICEVISION_LOG_INFO("Projecting " << imagePaths[i] << " into equirectangular space"); - try - { - image::readImage(imagePaths[i], imageIn, image::EImageColorSpace::LINEAR); - } - catch(fs::filesystem_error& e) - { - ALICEVISION_LOG_ERROR("Can't open image file : " << imagePaths[i]); - } - + image::readImage(imagePaths[i], imageIn, image::EImageColorSpace::LINEAR); image::getBufferFromImage(imageIn, buffer); buffer.specmod().extra_attribs = metadatas[i]; @@ -230,10 +223,7 @@ void stitchPanorama(const std::vector& imagePaths, const std::vecto // save equirectangular image with fisheye's metadata if(fs::is_directory(fs::path(outputPath))) { - if(outputPath.back() == '/') - outputPath = std::string(outputPath + "panorama.exr"); - else - outputPath = std::string(outputPath + "/panorama.exr"); + outputPath = (fs::path(outputPath) / ("panorama.exr")).string(); } image::writeImage(outputPath, imageOut, image::EImageColorSpace::AUTO, bufferOut.specmod().extra_attribs); @@ -312,10 +302,7 @@ int main(int argc, char** argv) system::Logger::get()->setLogLevel(verboseLevel); // check output folder and update to its absolute path - { - const fs::path outDir = fs::absolute(outputFolder); - outputFolder = outDir.string(); - } + outputFolder = fs::absolute(outputFolder).string(); std::vector imagePaths; std::vector times; @@ -340,7 +327,7 @@ int main(int argc, char** argv) { imagePaths.push_back(file.path().string()); - oiio::ParamValueList metadata = image::readImageMetadata(file.path().string()); + const oiio::ParamValueList metadata = image::readImageMetadata(file.path().string()); metadatas.push_back(metadata); std::string dateTime; dateTime = metadata.get_string("Exif:DateTimeOriginal"); @@ -396,7 +383,7 @@ int main(int argc, char** argv) return EXIT_FAILURE; } - int nbImages = imagePaths.size(); + const auto nbImages = imagePaths.size(); std::vector times_sorted = times; std::vector imagePaths_sorted; std::vector metadatas_sorted; @@ -407,12 +394,12 @@ int main(int argc, char** argv) // sort images according to their metadata "DateTime" std::sort(times_sorted.begin(), times_sorted.end()); - for(int i=0; i::iterator it = std::find(times.begin(), times.end(), times_sorted[i]); if(it != times.end()) { - std::size_t index = std::distance(times.begin(), it); + const std::size_t index = std::distance(times.begin(), it); imagePaths_sorted.push_back(imagePaths.at(index)); metadatas_sorted.push_back(metadatas.at(index)); } From 78b80414e7bcd9a5b59aeb59e053dd2d34c551b2 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Fri, 9 Aug 2019 18:21:40 +0200 Subject: [PATCH 019/174] [software] Stitching: change class SphericalMaping to namespace --- src/software/utils/main_fisheyeProjection.cpp | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index 97f4d1887e..2b8b635bac 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -47,11 +47,9 @@ namespace bmf = boost::math::float_constants; /** * @brief Function to map 3D coordinates onto a 2D image according a spherical projection */ -class SphericalMapping +namespace SphericalMapping { -public: - - static Vec3 get3DPoint(const Vec2& pos2d, int width, int height) + Vec3 get3DPoint(const Vec2& pos2d, int width, int height) { const double x = pos2d(0) - width; const double y = height/2.0 - pos2d(1); @@ -66,7 +64,7 @@ class SphericalMapping return Vec3(Px, Py, Pz); } - static Vec2 get2DCoordinates(const Vec3& ray, const int inSize) + Vec2 get2DCoordinates(const Vec3& ray, int inSize) { const double Px = ray(0); const double Py = ray(1); @@ -75,12 +73,12 @@ class SphericalMapping // float aperture = 2.0 * std::atan(36.0 / (8.0 * 2.0)); // float aperture = 2.0 * asin(1.0 / (2.0 * fNumber)); - float r = 2.0 * atan2(sqrt(Square(Px) + Square(Pz)), Py) / M_PI; - float theta = atan2(Pz, Px); - Vec2 v(r*cos(theta), r*sin(theta)); + const float r = 2.f * std::atan2(std::sqrt(Square(Px) + Square(Pz)), Py) / bmf::pi; + const float theta = atan2(Pz, Px); + const Vec2 v(r*cos(theta), r*sin(theta)); return v * inSize/2; } -}; +} float sigmoid(float x, float sigwidth, float sigMid) From 0e03fc12b30cfcf65c359af127c7502d56ac9793 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Fri, 9 Aug 2019 18:22:25 +0200 Subject: [PATCH 020/174] [software] Stitching: add function to sort images by time --- src/software/utils/main_fisheyeProjection.cpp | 47 ++++++++++--------- 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/src/software/utils/main_fisheyeProjection.cpp b/src/software/utils/main_fisheyeProjection.cpp index 2b8b635bac..a3c749d048 100644 --- a/src/software/utils/main_fisheyeProjection.cpp +++ b/src/software/utils/main_fisheyeProjection.cpp @@ -86,6 +86,29 @@ float sigmoid(float x, float sigwidth, float sigMid) return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); } +/** + * @brief convert metadata "exif:OriginalDateTime" into float and fill a vector with returned value + * @param[in] dateTime - input string containing date and time of photo shooting + * @param[out] times - vector containing all times of photo shooting in float + */ +void convertTime(const std::string& dateTime, std::vector& times) +{ + const boost::regex dateTimeExp( + // Syntax of Exif:DateTimeOriginal tag + "(?[\\d\\:]*) " // YYYY:MM:DD date separated with time with a space + "(?\\d*):" // HH: hour + "(?\\d*):" // MM: minutes + "(?\\d*)" // SS: seconds + ); + + // convert time from string to float to sort images by time for future stitching + boost::smatch what; + if(boost::regex_search(dateTime, what, dateTimeExp)) + { + times.push_back(24.0 * std::stof(what["h"]) + 60.0 * std::stof(what["m"]) + std::stof(what["s"])); + } +} + /** * @brief Function to set fisheye images with correct orientation and add an alpha channel @@ -306,14 +329,6 @@ int main(int argc, char** argv) std::vector times; std::vector metadatas; - const boost::regex dateTimeExp( - // Syntax of Exif:DateTimeOriginal tag - "(?[\\d\\:]*) " // YYYY:MM:DD date separated with time with a space - "(?\\d*):" // HH: hour - "(?\\d*):" // MM: minutes - "(?\\d*)" // SS: seconds - ); - for(const std::string& entry: inputPath) { const fs::path path = fs::absolute(entry); @@ -335,13 +350,7 @@ int main(int argc, char** argv) ALICEVISION_LOG_ERROR("Can't sort images : no time metadata in " << file.path().string()); return EXIT_FAILURE; } - - // convert time from string to float to sort images by time for future stitching - boost::smatch what; - if(boost::regex_search(dateTime, what, dateTimeExp)) - { - times.push_back(24.0 * std::stof(what["h"]) + 60.0 * std::stof(what["m"]) + std::stof(what["s"])); - } + convertTime(dateTime, times); } } } @@ -359,13 +368,7 @@ int main(int argc, char** argv) ALICEVISION_LOG_ERROR("Can't sort images : no time metadata in " << entry); return EXIT_FAILURE; } - - // convert time from string to float to sort images by time for future stitching - boost::smatch what; - if(boost::regex_search(dateTime, what, dateTimeExp)) - { - times.push_back(24.0 * std::stof(what["h"]) + 60.0 * std::stof(what["m"]) + std::stof(what["s"])); - } + convertTime(dateTime, times); } else From 8edd995872499db91dffaaf72297d7445274739f Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Mon, 12 Aug 2019 18:40:17 +0200 Subject: [PATCH 021/174] [globalSfM] Add enum methods for rotations and translations --- .../GlobalSfMRotationAveragingSolver.hpp | 37 +++++++++++++++++++ .../GlobalSfMTranslationAveragingSolver.hpp | 36 ++++++++++++++++++ src/software/pipeline/main_globalSfM.cpp | 11 +++--- 3 files changed, 79 insertions(+), 5 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.hpp b/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.hpp index 16e7e2d8d4..65e3d946ed 100644 --- a/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.hpp +++ b/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.hpp @@ -7,6 +7,10 @@ #pragma once +#include +#include +#include + namespace aliceVision { namespace sfm { @@ -22,6 +26,39 @@ enum ERelativeRotationInferenceMethod TRIPLET_ROTATION_INFERENCE_COMPOSITION_ERROR = 1 }; +inline std::string ERotationAveragingMethod_enumToString(ERotationAveragingMethod eRotationAveragingMethod) +{ + switch(eRotationAveragingMethod) + { + case ERotationAveragingMethod::ROTATION_AVERAGING_L1: + return "L1_minimization"; + case ERotationAveragingMethod::ROTATION_AVERAGING_L2: + return "L2_minimization"; + } + throw std::out_of_range("Invalid rotation averaging method type"); +} + +inline ERotationAveragingMethod ERotationAveragingMethod_stringToEnum(const std::string& RotationAveragingMethodName) +{ + if(RotationAveragingMethodName == "L1_minimization") return ERotationAveragingMethod::ROTATION_AVERAGING_L1; + if(RotationAveragingMethodName == "L2_minimization") return ERotationAveragingMethod::ROTATION_AVERAGING_L2; + + throw std::out_of_range("Invalid rotation averaging method name : '" + RotationAveragingMethodName + "'"); +} + +inline std::ostream& operator<<(std::ostream& os, ERotationAveragingMethod e) +{ + return os << ERotationAveragingMethod_enumToString(e); +} + +inline std::istream& operator>>(std::istream& in, ERotationAveragingMethod& rotationType) +{ + std::string token; + in >> token; + rotationType = ERotationAveragingMethod_stringToEnum(token); + return in; +} + } // namespace sfm } // namespace aliceVision diff --git a/src/aliceVision/sfm/pipeline/global/GlobalSfMTranslationAveragingSolver.hpp b/src/aliceVision/sfm/pipeline/global/GlobalSfMTranslationAveragingSolver.hpp index 77026e4153..411bc498e3 100644 --- a/src/aliceVision/sfm/pipeline/global/GlobalSfMTranslationAveragingSolver.hpp +++ b/src/aliceVision/sfm/pipeline/global/GlobalSfMTranslationAveragingSolver.hpp @@ -24,6 +24,42 @@ enum ETranslationAveragingMethod TRANSLATION_AVERAGING_SOFTL1 = 3 }; +inline std::string ETranslationAveragingMethod_enumToString(ETranslationAveragingMethod eTranslationAveragingMethod) +{ + switch(eTranslationAveragingMethod) + { + case ETranslationAveragingMethod::TRANSLATION_AVERAGING_L1: + return "L1_minimization"; + case ETranslationAveragingMethod::TRANSLATION_AVERAGING_L2_DISTANCE_CHORDAL: + return "L2_minimization"; + case ETranslationAveragingMethod::TRANSLATION_AVERAGING_SOFTL1: + return "L1_soft_minimization"; + } + throw std::out_of_range("Invalid translation averaging method type"); +} + +inline ETranslationAveragingMethod ETranslationAveragingMethod_stringToEnum(const std::string& TranslationAveragingMethodName) +{ + if(TranslationAveragingMethodName == "L1_minimization") return ETranslationAveragingMethod::TRANSLATION_AVERAGING_L1; + if(TranslationAveragingMethodName == "L2_minimization") return ETranslationAveragingMethod::TRANSLATION_AVERAGING_L2_DISTANCE_CHORDAL; + if(TranslationAveragingMethodName == "L1_soft_minimization") return ETranslationAveragingMethod::TRANSLATION_AVERAGING_SOFTL1; + + throw std::out_of_range("Invalid translation averaging method name : '" + TranslationAveragingMethodName + "'"); +} + +inline std::ostream& operator<<(std::ostream& os, ETranslationAveragingMethod e) +{ + return os << ETranslationAveragingMethod_enumToString(e); +} + +inline std::istream& operator>>(std::istream& in, ETranslationAveragingMethod& translationType) +{ + std::string token; + in >> token; + translationType = ETranslationAveragingMethod_stringToEnum(token); + return in; +} + class GlobalSfMTranslationAveragingSolver { translationAveraging::RelativeInfoVec m_vec_initialRijTijEstimates; diff --git a/src/software/pipeline/main_globalSfM.cpp b/src/software/pipeline/main_globalSfM.cpp index 3fed48e3a2..524b27555e 100644 --- a/src/software/pipeline/main_globalSfM.cpp +++ b/src/software/pipeline/main_globalSfM.cpp @@ -43,8 +43,8 @@ int main(int argc, char **argv) std::string outSfMDataFilename = "SfmData.json"; std::string describerTypesName = feature::EImageDescriberType_enumToString(feature::EImageDescriberType::SIFT); - int rotationAveragingMethod = static_cast(sfm::ROTATION_AVERAGING_L2); - int translationAveragingMethod = static_cast(sfm::TRANSLATION_AVERAGING_SOFTL1); + sfm::ERotationAveragingMethod rotationAveragingMethod = sfm::ROTATION_AVERAGING_L2; + sfm::ETranslationAveragingMethod translationAveragingMethod = sfm::TRANSLATION_AVERAGING_SOFTL1; bool lockAllIntrinsics = false; po::options_description allParams("Implementation of the paper\n" @@ -70,12 +70,13 @@ int main(int argc, char **argv) "Filename of the output SfMData file.") ("describerTypes,d", po::value(&describerTypesName)->default_value(describerTypesName), feature::EImageDescriberType_informations().c_str()) - ("rotationAveraging", po::value(&rotationAveragingMethod)->default_value(rotationAveragingMethod), + ("rotationAveraging", po::value(&rotationAveragingMethod)->default_value(rotationAveragingMethod), "* 1: L1 minimization\n" "* 2: L2 minimization") - ("translationAveraging", po::value(&translationAveragingMethod)->default_value(translationAveragingMethod), + ("translationAveraging", po::value(&translationAveragingMethod)->default_value(translationAveragingMethod), "* 1: L1 minimization\n" - "* 2: L2 minimization of sum of squared Chordal distances") + "* 2: L2 minimization of sum of squared Chordal distances\n" + "* 3: L1 soft minimization") ("lockAllIntrinsics", po::value(&lockAllIntrinsics)->default_value(lockAllIntrinsics), "Force lock of all camera intrinsic parameters, so they will not be refined during Bundle Adjustment."); From 6de58b6f55aa527278406835bec36275a0fb0fcb Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Mon, 19 Aug 2019 17:58:56 +0200 Subject: [PATCH 022/174] [sfm] Add panorama reconstruction engine based on global sfm --- src/aliceVision/sfm/CMakeLists.txt | 2 + src/aliceVision/sfm/pipeline/CMakeLists.txt | 1 + .../sfm/pipeline/panorama/CMakeLists.txt | 6 + .../ReconstructionEngine_panorama.cpp | 903 ++++++++++++++++++ .../ReconstructionEngine_panorama.hpp | 114 +++ 5 files changed, 1026 insertions(+) create mode 100644 src/aliceVision/sfm/pipeline/panorama/CMakeLists.txt create mode 100644 src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp create mode 100644 src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp diff --git a/src/aliceVision/sfm/CMakeLists.txt b/src/aliceVision/sfm/CMakeLists.txt index 5edb444e49..c334811c40 100644 --- a/src/aliceVision/sfm/CMakeLists.txt +++ b/src/aliceVision/sfm/CMakeLists.txt @@ -14,6 +14,7 @@ set(sfm_files_headers pipeline/pairwiseMatchesIO.hpp pipeline/RelativePoseInfo.hpp pipeline/structureFromKnownPoses/StructureEstimationFromKnownPoses.hpp + pipeline/panorama/ReconstructionEngine_panorama.hpp pipeline/regionsIO.hpp utils/alignment.hpp utils/statistics.hpp @@ -41,6 +42,7 @@ set(sfm_files_sources pipeline/RigSequence.cpp pipeline/RelativePoseInfo.cpp pipeline/structureFromKnownPoses/StructureEstimationFromKnownPoses.cpp + pipeline/panorama/ReconstructionEngine_panorama.cpp pipeline/regionsIO.cpp utils/alignment.cpp utils/statistics.cpp diff --git a/src/aliceVision/sfm/pipeline/CMakeLists.txt b/src/aliceVision/sfm/pipeline/CMakeLists.txt index c16e3d9c97..32071ee9a5 100644 --- a/src/aliceVision/sfm/pipeline/CMakeLists.txt +++ b/src/aliceVision/sfm/pipeline/CMakeLists.txt @@ -1,3 +1,4 @@ add_subdirectory(sequential) add_subdirectory(global) +add_subdirectory(panorama) diff --git a/src/aliceVision/sfm/pipeline/panorama/CMakeLists.txt b/src/aliceVision/sfm/pipeline/panorama/CMakeLists.txt new file mode 100644 index 0000000000..1476c47cb8 --- /dev/null +++ b/src/aliceVision/sfm/pipeline/panorama/CMakeLists.txt @@ -0,0 +1,6 @@ +alicevision_add_test(panorama_test.cpp + NAME "sfm_panorama" + LINKS aliceVision_sfm + aliceVision_feature + aliceVision_system +) diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp new file mode 100644 index 0000000000..451e6b5209 --- /dev/null +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -0,0 +1,903 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2016 AliceVision contributors. +// Copyright (c) 2012 openMVG contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#include "ReconstructionEngine_panorama.hpp" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +#include +#include +#include +#include +#include +#include +#include + + +#include + +#include + +#ifdef _MSC_VER +#pragma warning( once : 4267 ) //warning C4267: 'argument' : conversion from 'size_t' to 'const int', possible loss of data +#endif + +namespace aliceVision { +namespace sfm { + +using namespace aliceVision::camera; +using namespace aliceVision::geometry; +using namespace aliceVision::feature; +using namespace aliceVision::sfmData; + + + +using namespace aliceVision::robustEstimation; + +bool robustRelativeRotation_fromE( + const Mat3 & K1, const Mat3 & K2, + const Mat & x1, const Mat & x2, + RelativePoseInfo & relativePose_info, + const std::pair & size_ima1, + const std::pair & size_ima2, + const size_t max_iteration_count) +{ + // Use the 5 point solver to estimate E + typedef aliceVision::essential::kernel::FivePointKernel SolverType; + // Define the AContrario adaptor + typedef ACKernelAdaptorEssential< + SolverType, + aliceVision::fundamental::kernel::EpipolarDistanceError, + UnnormalizerT, + Mat3> + KernelType; + + KernelType kernel(x1, size_ima1.first, size_ima1.second, + x2, size_ima2.first, size_ima2.second, K1, K2); + + // Robustly estimation of the Essential matrix and its precision + const std::pair acRansacOut = ACRANSAC(kernel, relativePose_info.vec_inliers, + max_iteration_count, &relativePose_info.essential_matrix, relativePose_info.initial_residual_tolerance); + relativePose_info.found_residual_precision = acRansacOut.first; + + if (relativePose_info.vec_inliers.size() < SolverType::MINIMUM_SAMPLES * ALICEVISION_MINIMUM_SAMPLES_COEF ) + { + ALICEVISION_LOG_INFO("robustRelativePose: no sufficient coverage (the model does not support enough samples): " << relativePose_info.vec_inliers.size()); + return false; // no sufficient coverage (the model does not support enough samples) + } + + // estimation of the relative poses + Mat3 R; + Vec3 t; + if (!estimate_Rt_fromE( + K1, K2, x1, x2, + relativePose_info.essential_matrix, relativePose_info.vec_inliers, &R, &t)) + { + ALICEVISION_LOG_INFO("robustRelativePose: cannot find a valid [R|t] couple that makes the inliers in front of the camera."); + return false; // cannot find a valid [R|t] couple that makes the inliers in front of the camera. + } + t = Vec3(0.0, 0.0, 0.0); + + // Store [R|C] for the second camera, since the first camera is [Id|0] + relativePose_info.relativePose = geometry::Pose3(R, -R.transpose() * t); + return true; +} + + +/** + * @brief Decompose a homography given known calibration matrices, assuming a pure rotation between the two views. + * It is supposed that \f$ x_2 \sim H x_1 \f$ with \f$ H = K_2 * R * K_1^{-1} \f$ + * @param[in] homography 3x3 homography matrix H. + * @param[in] K1 3x3 calibration matrix of the first view. + * @param[in] K2 3x3 calibration matrix of the second view. + * @return The 3x3 rotation matrix corresponding to the pure rotation between the views. + */ +aliceVision::Mat3 decomposePureRotationHomography(const aliceVision::Mat3 &homography, const aliceVision::Mat3 &K1, + const aliceVision::Mat3 &K2) +{ + // G is the "calibrated" homography inv(K2) * H * K1 + const auto G = K2.inverse() * homography * K1; + // compute the scale factor lambda that makes det(lambda*G) = 1 + const auto lambda = std::pow(1 / G.determinant(), 1 / 3); + const auto rotation = lambda * G; + + //@fixme find possible bad cases? + + // compute and return the closest rotation matrix + Eigen::JacobiSVD usv(rotation, Eigen::ComputeFullU | Eigen::ComputeFullV); + const auto &u = usv.matrixU(); + const auto vt = usv.matrixV().transpose(); + return u * vt; +} + +/** + * @brief Estimate the homography between two views using corresponding points such that \f$ x_2 \sim H x_1 \f$ + * @param[in] x1 The points on the first image. + * @param[in] x2 The corresponding points on the second image. + * @param[in] imgSize1 The size of the first image. + * @param[in] imgSize2 The size of the second image. + * @param[out] H The estimated homography. + * @param[out] vec_inliers The inliers satisfying the homography as a list of indices. + * @return the status of the estimation. + */ +aliceVision::EstimationStatus robustHomographyEstimationAC(const aliceVision::Mat2X &x1, + const aliceVision::Mat2X &x2, + const std::pair &imgSize1, + const std::pair &imgSize2, + aliceVision::Mat3 &H, + std::vector &vec_inliers) +{ + using KernelType = aliceVision::robustEstimation::ACKernelAdaptor< + aliceVision::homography::kernel::FourPointSolver, + aliceVision::homography::kernel::AsymmetricError, + aliceVision::UnnormalizerI, + aliceVision::Mat3>; + + KernelType kernel(x1, imgSize1.first, imgSize1.second, + x2, imgSize2.first, imgSize2.second, + false); // configure as point to point error model. + + + const std::pair ACRansacOut = aliceVision::robustEstimation::ACRANSAC(kernel, vec_inliers, + 1024, + &H, + std::numeric_limits::infinity()); + + const bool valid{!vec_inliers.empty()}; + //@fixme + const bool hasStrongSupport{vec_inliers.size() > KernelType::MINIMUM_SAMPLES * 2.5}; + + return {valid, hasStrongSupport}; +} + +/** + * @brief A struct containing the information of the relative rotation. + */ +struct RelativeRotationInfo +{ + /** + * @brief Default constructor. + */ + RelativeRotationInfo() = default; + + /// the homography. + aliceVision::Mat3 _homography{}; + /// the relative rotation. + aliceVision::Mat3 _relativeRotation{}; + /// the inliers. + std::vector _inliers{}; + /// initial threshold for the acransac process. + double _initialResidualTolerance{std::numeric_limits::infinity()}; + /// the estimated threshold found by acransac. + double _foundResidualPrecision{std::numeric_limits::infinity()}; + +}; + +/** + * @brief Estimate the relative rotation between two views related by a pure rotation. + * @param[in] K1 3x3 calibration matrix of the first view. + * @param[in] K2 3x3 calibration matrix of the second view. + * @param[in] x1 The points on the first image. + * @param[in] x2 The corresponding points on the second image. + * @param[in] imgSize1 The size of the first image. + * @param[in] imgSize2 The size of the second image. + * @param[out] relativeRotationInfo Contains the result of the estimation. + * @param[in] maxIterationCount Max number of iteration for the ransac process. + * @return true if a homography has been estimated. + */ +bool robustRelativeRotation_fromH(const aliceVision::Mat3 &K1, + const aliceVision::Mat3 &K2, + const aliceVision::Mat2X &x1, + const aliceVision::Mat2X &x2, + RelativeRotationInfo &relativeRotationInfo, + const std::pair &imgSize1, + const std::pair &imgSize2, + const size_t maxIterationCount) +{ + std::vector vec_inliers{}; + + // estimate the homography + const auto status = robustHomographyEstimationAC(x1, x2, imgSize1, imgSize2, relativeRotationInfo._homography, + relativeRotationInfo._inliers); + + if (!status.isValid && !status.hasStrongSupport) + { + return false; + } + + relativeRotationInfo._relativeRotation = decomposePureRotationHomography(relativeRotationInfo._homography, K1, K2); + ALICEVISION_LOG_INFO("Found homography H:\n" << relativeRotationInfo._homography); + ALICEVISION_LOG_INFO("Homography H decomposes to rotation R:\n" << relativeRotationInfo._relativeRotation); + + return true; +} + + + + +ReconstructionEngine_panorama::ReconstructionEngine_panorama(const SfMData& sfmData, + const std::string& outDirectory, + const std::string& loggingFile) + : ReconstructionEngine(sfmData, outDirectory) + , _loggingFile(loggingFile) + , _normalizedFeaturesPerView(nullptr) +{ + if(!_loggingFile.empty()) + { + // setup HTML logger + _htmlDocStream = std::make_shared("GlobalReconstructionEngine SFM report."); + _htmlDocStream->pushInfo(htmlDocument::htmlMarkup("h1", std::string("ReconstructionEngine_globalSfM"))); + _htmlDocStream->pushInfo("
"); + _htmlDocStream->pushInfo( "Dataset info:"); + _htmlDocStream->pushInfo( "Views count: " + htmlDocument::toString( sfmData.getViews().size()) + "
"); + } + + // Set default motion Averaging methods + _eRotationAveragingMethod = ROTATION_AVERAGING_L2; + + // Set default relative rotation methods + _eRelativeRotationMethod = RELATIVE_ROTATION_FROM_E; +} + +ReconstructionEngine_panorama::~ReconstructionEngine_panorama() +{ + if(!_loggingFile.empty()) + { + // Save the reconstruction Log + std::ofstream htmlFileStream(_loggingFile.c_str()); + htmlFileStream << _htmlDocStream->getDoc(); + } +} + +void ReconstructionEngine_panorama::SetFeaturesProvider(feature::FeaturesPerView* featuresPerView) +{ + _featuresPerView = featuresPerView; + + // Copy features and save a normalized version + _normalizedFeaturesPerView = std::make_shared(*featuresPerView); + #pragma omp parallel + for(MapFeaturesPerView::iterator iter = _normalizedFeaturesPerView->getData().begin(); + iter != _normalizedFeaturesPerView->getData().end(); ++iter) + { + #pragma omp single nowait + { + // get the related view & camera intrinsic and compute the corresponding bearing vectors + const View * view = _sfmData.getViews().at(iter->first).get(); + if(_sfmData.getIntrinsics().count(view->getIntrinsicId())) + { + const std::shared_ptr cam = _sfmData.getIntrinsics().find(view->getIntrinsicId())->second; + for(auto& iterFeatPerDesc: iter->second) + { + for (PointFeatures::iterator iterPt = iterFeatPerDesc.second.begin(); + iterPt != iterFeatPerDesc.second.end(); ++iterPt) + { + const Vec3 bearingVector = (*cam)(cam->get_ud_pixel(iterPt->coords().cast())); + iterPt->coords() << (bearingVector.head(2) / bearingVector(2)).cast(); + } + } + } + } + } +} + +void ReconstructionEngine_panorama::SetMatchesProvider(matching::PairwiseMatches* provider) +{ + _pairwiseMatches = provider; +} + +void ReconstructionEngine_panorama::SetRotationAveragingMethod(ERotationAveragingMethod eRotationAveragingMethod) +{ + _eRotationAveragingMethod = eRotationAveragingMethod; +} + +void ReconstructionEngine_panorama::SetRelativeRotationMethod(ERelativeRotationMethod eRelativeRotationMethod) +{ + _eRelativeRotationMethod = eRelativeRotationMethod; +} + +bool ReconstructionEngine_panorama::process() +{ + // keep only the largest biedge connected subgraph + { + const PairSet pairs = matching::getImagePairs(*_pairwiseMatches); + const std::set set_remainingIds = graph::CleanGraph_KeepLargestBiEdge_Nodes(pairs, _outputFolder); + if(set_remainingIds.empty()) + { + ALICEVISION_LOG_DEBUG("Invalid input image graph for panorama"); + return false; + } + KeepOnlyReferencedElement(set_remainingIds, *_pairwiseMatches); + } + + aliceVision::rotationAveraging::RelativeRotations relatives_R; + Compute_Relative_Rotations(relatives_R); + + HashMap global_rotations; + if(!Compute_Global_Rotations(relatives_R, global_rotations)) + { + ALICEVISION_LOG_WARNING("Panorama:: Rotation Averaging failure!"); + return false; + } + + // we set translation vector to zero + for(const auto& gR: global_rotations) + { + const Vec3 t(0.0, 0.0, 0.0); + const IndexT poseId = gR.first; + const Mat3 & Ri = gR.second; + _sfmData.setAbsolutePose(poseId, CameraPose(Pose3(Ri, t))); + } + + //-- Export statistics about the SfM process + if (!_loggingFile.empty()) + { + using namespace htmlDocument; + std::ostringstream os; + os << "Structure from Motion statistics."; + _htmlDocStream->pushInfo("
"); + _htmlDocStream->pushInfo(htmlMarkup("h1",os.str())); + + os.str(""); + os << "-------------------------------" << "
" + << "-- View count: " << _sfmData.getViews().size() << "
" + << "-- Intrinsic count: " << _sfmData.getIntrinsics().size() << "
" + << "-- Pose count: " << _sfmData.getPoses().size() << "
" + << "-- Track count: " << _sfmData.getLandmarks().size() << "
" + << "-------------------------------" << "
"; + _htmlDocStream->pushInfo(os.str()); + } + + return true; +} + +/// Compute from relative rotations the global rotations of the camera poses +bool ReconstructionEngine_panorama::Compute_Global_Rotations(const rotationAveraging::RelativeRotations& relatives_R, + HashMap& global_rotations) +{ + if(relatives_R.empty()) + return false; + // Log statistics about the relative rotation graph + { + std::set set_pose_ids; + for (const auto & relative_R : relatives_R) + { + set_pose_ids.insert(relative_R.i); + set_pose_ids.insert(relative_R.j); + } + + ALICEVISION_LOG_DEBUG("Global rotations computation: " << "\n" + "\t- relative rotations: " << relatives_R.size() << "\n" + "\t- global rotations: " << set_pose_ids.size()); + } + + // Global Rotation solver: + const ERelativeRotationInferenceMethod eRelativeRotationInferenceMethod = TRIPLET_ROTATION_INFERENCE_NONE; // TRIPLET_ROTATION_INFERENCE_COMPOSITION_ERROR; + + GlobalSfMRotationAveragingSolver rotationAveraging_solver; + //-- Rejection triplet that are 'not' identity rotation (error to identity > 50°) + const bool b_rotationAveraging = rotationAveraging_solver.Run(_eRotationAveragingMethod, eRelativeRotationInferenceMethod, relatives_R, 100.0, global_rotations); + + ALICEVISION_LOG_DEBUG("Found #global_rotations: " << global_rotations.size()); + + if(b_rotationAveraging) + { + // Log input graph to the HTML report + if(!_loggingFile.empty() && !_outputFolder.empty()) + { + // Log a relative pose graph + { + std::set set_pose_ids; + PairSet relative_pose_pairs; + for(const auto & view : _sfmData.getViews()) + { + const IndexT pose_id = view.second->getPoseId(); + set_pose_ids.insert(pose_id); + } + const std::string sGraph_name = "global_relative_rotation_pose_graph_final"; + graph::indexedGraph putativeGraph(set_pose_ids, rotationAveraging_solver.GetUsedPairs()); + graph::exportToGraphvizData((fs::path(_outputFolder) / (sGraph_name + ".dot")).string(), putativeGraph.g); + + /* + using namespace htmlDocument; + std::ostringstream os; + os << "
" << sGraph_name << "
" + << "\n"; + _htmlDocStream->pushInfo(os.str()); + */ + } + } + } + + return b_rotationAveraging; +} + +/// Compute the initial structure of the scene +bool ReconstructionEngine_panorama::Compute_Initial_Structure(matching::PairwiseMatches& tripletWise_matches) +{ + // Build tracks from selected triplets (Union of all the validated triplet tracks (_tripletWise_matches)) + { + using namespace aliceVision::track; + TracksBuilder tracksBuilder; +#ifdef USE_ALL_VALID_MATCHES // not used by default + matching::PairwiseMatches pose_supported_matches; + for (const auto & pairwiseMatchesIt : *_pairwiseMatches) + { + const View * vI = _sfm_data.getViews().at(pairwiseMatchesIt.first.first).get(); + const View * vJ = _sfm_data.getViews().at(pairwiseMatchesIt.first.second).get(); + if (_sfm_data.isPoseAndIntrinsicDefined(vI) && _sfm_data.isPoseAndIntrinsicDefined(vJ)) + { + pose_supported_matches.insert(pairwiseMatchesIt); + } + } + tracksBuilder.Build(pose_supported_matches); +#else + // Use triplet validated matches + tracksBuilder.build(tripletWise_matches); +#endif + tracksBuilder.filter(3); + TracksMap map_selectedTracks; // reconstructed track (visibility per 3D point) + tracksBuilder.exportToSTL(map_selectedTracks); + + // Fill sfm_data with the computed tracks (no 3D yet) + Landmarks & structure = _sfmData.structure; + IndexT idx(0); + for (TracksMap::const_iterator itTracks = map_selectedTracks.begin(); + itTracks != map_selectedTracks.end(); + ++itTracks, ++idx) + { + const Track & track = itTracks->second; + Landmark& newLandmark = structure[idx]; + newLandmark.descType = track.descType; + Observations & obs = newLandmark.observations; + std::vector pos3d; + pos3d.reserve(track.featPerView.size()); + for (Track::FeatureIdPerView::const_iterator it = track.featPerView.begin(); it != track.featPerView.end(); ++it) + { + const size_t imaIndex = it->first; + const size_t featIndex = it->second; + const PointFeature & pt = _featuresPerView->getFeatures(imaIndex, track.descType)[featIndex]; + obs[imaIndex] = Observation(pt.coords().cast(), featIndex); + + /* + { + // back project a feature from the first observation + const sfmData::View * view = _sfmData.views.at(imaIndex).get(); + if (_sfmData.isPoseAndIntrinsicDefined(view)) + { + const IntrinsicBase * cam = _sfmData.getIntrinsics().at(view->getIntrinsicId()).get(); + const Pose3 pose = _sfmData.getPose(*view).getTransform(); + Vec3 v = cam.backproject(pose, pt.coords().cast(), 1.0, true); + pos3d.push_back(v); + } + }*/ + } + /* + newLandmark.X = Vec3::Zero(); + for(const Vec3& p: pos3d) + newLandmark.X += p; + newLandmark.X /= pos.size();*/ + } + + ALICEVISION_LOG_DEBUG("Track stats"); + { + std::ostringstream osTrack; + //-- Display stats: + // - number of images + // - number of tracks + std::set set_imagesId; + tracksUtilsMap::imageIdInTracks(map_selectedTracks, set_imagesId); + osTrack << "------------------" << "\n" + << "-- Tracks Stats --" << "\n" + << " Tracks number: " << tracksBuilder.nbTracks() << "\n" + << " Images Id: " << "\n"; + std::copy(set_imagesId.begin(), + set_imagesId.end(), + std::ostream_iterator(osTrack, ", ")); + osTrack << "\n------------------" << "\n"; + + std::map map_Occurence_TrackLength; + tracksUtilsMap::tracksLength(map_selectedTracks, map_Occurence_TrackLength); + osTrack << "TrackLength, Occurrence" << "\n"; + for (std::map::const_iterator iter = map_Occurence_TrackLength.begin(); + iter != map_Occurence_TrackLength.end(); ++iter) { + osTrack << "\t" << iter->first << "\t" << iter->second << "\n"; + } + osTrack << "\n"; + ALICEVISION_LOG_DEBUG(osTrack.str()); + } + } + + // Compute 3D position of the landmark of the structure by triangulation of the observations + { + aliceVision::system::Timer timer; +/* + const IndexT trackCountBefore = _sfmData.getLandmarks().size(); + StructureComputation_blind structure_estimator(true); + structure_estimator.triangulate(_sfmData); + + ALICEVISION_LOG_DEBUG("#removed tracks (invalid triangulation): " << + trackCountBefore - IndexT(_sfmData.getLandmarks().size())); + ALICEVISION_LOG_DEBUG(" Triangulation took (s): " << timer.elapsed()); +*/ + // Export initial structure + if (!_loggingFile.empty()) + { + sfmDataIO::Save(_sfmData, + (fs::path(_loggingFile).parent_path() / "initial_structure.ply").string(), + sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); + } + } + return !_sfmData.structure.empty(); +} + +// Adjust the scene (& remove outliers) +bool ReconstructionEngine_panorama::Adjust() +{ + // refine sfm scene (in a 3 iteration process (free the parameters regarding their incertainty order)): + BundleAdjustmentCeres::CeresOptions options; + options.useParametersOrdering = false; // disable parameters ordering + + BundleAdjustmentCeres BA(options); + // - refine only Structure and translations + bool success = BA.adjust(_sfmData, BundleAdjustment::REFINE_TRANSLATION | BundleAdjustment::REFINE_STRUCTURE); + if(success) + { + if(!_loggingFile.empty()) + sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_00_refine_T_Xi.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); + + // refine only structure and rotations & translations + success = BA.adjust(_sfmData, BundleAdjustment::REFINE_ROTATION | BundleAdjustment::REFINE_TRANSLATION | BundleAdjustment::REFINE_STRUCTURE); + + if(success && !_loggingFile.empty()) + sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_01_refine_RT_Xi.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); + } + + if(success && !_lockAllIntrinsics) + { + // refine all: Structure, motion:{rotations, translations} and optics:{intrinsics} + success = BA.adjust(_sfmData, BundleAdjustment::REFINE_ALL); + if(success && !_loggingFile.empty()) + sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_02_refine_KRT_Xi.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); + } + + // Remove outliers (max_angle, residual error) + const size_t pointcount_initial = _sfmData.structure.size(); + RemoveOutliers_PixelResidualError(_sfmData, 4.0); + const size_t pointcount_pixelresidual_filter = _sfmData.structure.size(); + RemoveOutliers_AngleError(_sfmData, 2.0); + const size_t pointcount_angular_filter = _sfmData.structure.size(); + ALICEVISION_LOG_DEBUG("Outlier removal (remaining points):\n" + "\t- # landmarks initial: " << pointcount_initial << "\n" + "\t- # landmarks after pixel residual filter: " << pointcount_pixelresidual_filter << "\n" + "\t- # landmarks after angular filter: " << pointcount_angular_filter); + + if(!_loggingFile.empty()) + sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_03_outlier_removed.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); + + // check that poses & intrinsic cover some measures (after outlier removal) + const IndexT minPointPerPose = 12; // 6 min + const IndexT minTrackLength = 3; // 2 min todo param@L + + if (eraseUnstablePosesAndObservations(_sfmData, minPointPerPose, minTrackLength)) + { + // TODO: must ensure that track graph is producing a single connected component + + const size_t pointcount_cleaning = _sfmData.structure.size(); + ALICEVISION_LOG_DEBUG("# landmarks after eraseUnstablePosesAndObservations: " << pointcount_cleaning); + } + + BundleAdjustment::ERefineOptions refineOptions = BundleAdjustment::REFINE_ROTATION | BundleAdjustment::REFINE_TRANSLATION | BundleAdjustment::REFINE_STRUCTURE; + if(!_lockAllIntrinsics) + refineOptions |= BundleAdjustment::REFINE_INTRINSICS_ALL; + success = BA.adjust(_sfmData, refineOptions); + + if(success && !_loggingFile.empty()) + sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_04_outlier_removed.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); + + return success; +} + +void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging::RelativeRotations& vec_relatives_R) +{ + // + // Build the Relative pose graph from matches: + // + /// pairwise view relation between poseIds + typedef std::map PoseWiseMatches; + + // List shared correspondences (pairs) between poses + PoseWiseMatches poseWiseMatches; + for (matching::PairwiseMatches::const_iterator iterMatches = _pairwiseMatches->begin(); + iterMatches != _pairwiseMatches->end(); ++iterMatches) + { + const Pair pair = iterMatches->first; + const View* v1 = _sfmData.getViews().at(pair.first).get(); + const View* v2 = _sfmData.getViews().at(pair.second).get(); + poseWiseMatches[Pair(v1->getPoseId(), v2->getPoseId())].insert(pair); + } + + ALICEVISION_LOG_INFO("Relative pose computation:"); +// boost::progress_display progressBar( poseWiseMatches.size(), std::cout, "\n- Relative pose computation -\n" ); +// #pragma omp parallel for schedule(dynamic) + // Compute the relative pose from pairwise point matches: + for (int i = 0; i < poseWiseMatches.size(); ++i) + { +// #pragma omp critical +// { +// ++progressBar; +// } + { + PoseWiseMatches::const_iterator iter (poseWiseMatches.begin()); + std::advance(iter, i); + const auto& relative_pose_iterator(*iter); + const Pair relative_pose_pair = relative_pose_iterator.first; + const PairSet& match_pairs = relative_pose_iterator.second; + + // If a pair has the same ID, discard it + if (relative_pose_pair.first == relative_pose_pair.second) + { + continue; + } + + // Select common bearing vectors + if (match_pairs.size() > 1) + { + ALICEVISION_LOG_WARNING("Compute relative pose between more than two view is not supported"); + continue; + } + + const Pair pairIterator = *(match_pairs.begin()); + + const IndexT I = pairIterator.first; + const IndexT J = pairIterator.second; + + const View* view_I = _sfmData.views[I].get(); + const View* view_J = _sfmData.views[J].get(); + + // Check that valid cameras are existing for the pair of view + if (_sfmData.getIntrinsics().count(view_I->getIntrinsicId()) == 0 || + _sfmData.getIntrinsics().count(view_J->getIntrinsicId()) == 0) + continue; + + // Setup corresponding bearing vector + const matching::MatchesPerDescType & matchesPerDesc = _pairwiseMatches->at(pairIterator); + const std::size_t nbBearing = matchesPerDesc.getNbAllMatches(); + std::size_t iBearing = 0; + Mat x1(2, nbBearing), x2(2, nbBearing); + + for(const auto& matchesPerDescIt: matchesPerDesc) + { + const feature::EImageDescriberType descType = matchesPerDescIt.first; + assert(descType != feature::EImageDescriberType::UNINITIALIZED); + const matching::IndMatches & matches = matchesPerDescIt.second; + + for (const auto & match : matches) + { + x1.col(iBearing) = _normalizedFeaturesPerView->getFeatures(I, descType)[match._i].coords().cast(); + x2.col(iBearing++) = _normalizedFeaturesPerView->getFeatures(J, descType)[match._j].coords().cast(); + } + } + assert(nbBearing == iBearing); + + const IntrinsicBase* cam_I = _sfmData.getIntrinsics().at(view_I->getIntrinsicId()).get(); + const IntrinsicBase* cam_J = _sfmData.getIntrinsics().at(view_J->getIntrinsicId()).get(); + + RelativePoseInfo relativePose_info; + // Compute max authorized error as geometric mean of camera plane tolerated residual error + relativePose_info.initial_residual_tolerance = std::pow( + cam_I->imagePlane_toCameraPlaneError(2.5) * + cam_J->imagePlane_toCameraPlaneError(2.5), + 1./2.); + + // Since we use normalized features, we will use unit image size and intrinsic matrix: + const std::pair imageSize(1., 1.); + const Mat3 K = Mat3::Identity(); + const size_t maxIterations = 256; + + switch(_eRelativeRotationMethod) + { + case RELATIVE_ROTATION_FROM_E: + { + if(!robustRelativeRotation_fromE(K, K, x1, x2, relativePose_info, imageSize, imageSize, maxIterations)) + { + ALICEVISION_LOG_INFO("Relative pose computation: i: " << i << ", (" << I << ", " << J <<") => FAILED"); + continue; + } + } + break; + case RELATIVE_ROTATION_FROM_H: + { + RelativeRotationInfo relativeRotation_info; + relativeRotation_info._initialResidualTolerance = std::pow( + cam_I->imagePlane_toCameraPlaneError(2.5) * + cam_J->imagePlane_toCameraPlaneError(2.5), + 1./2.); + if(!robustRelativeRotation_fromH(K, K, x1, x2, relativeRotation_info, imageSize, imageSize, maxIterations)) + { + ALICEVISION_LOG_INFO("Relative pose computation: i: " << i << ", (" << I << ", " << J <<") => FAILED"); + continue; + } + relativePose_info.relativePose = geometry::Pose3(relativeRotation_info._relativeRotation, Vec3::Zero()); + relativePose_info.initial_residual_tolerance = relativeRotation_info._initialResidualTolerance; + relativePose_info.found_residual_precision = relativeRotation_info._foundResidualPrecision; + relativePose_info.vec_inliers = relativeRotation_info._inliers; + } + break; + default: + ALICEVISION_LOG_DEBUG( + "Unknown relative rotation method: " << ERelativeRotationMethod_enumToString(_eRelativeRotationMethod)); + } + + ALICEVISION_LOG_INFO("Relative pose computation: i: " << i << ", (" << I << ", " << J <<") => SUCCESS"); + ALICEVISION_LOG_INFO("Nb inliers: " << relativePose_info.vec_inliers.size() + << ", initial_residual_tolerance: " << relativePose_info.initial_residual_tolerance + << ", found_residual_precision: " << relativePose_info.found_residual_precision); + + const bool refineUsingBA = false; + if(refineUsingBA) + { + // Refine the defined scene + SfMData tinyScene; + tinyScene.views.insert(*_sfmData.getViews().find(view_I->getViewId())); + tinyScene.views.insert(*_sfmData.getViews().find(view_J->getViewId())); + tinyScene.intrinsics.insert(*_sfmData.getIntrinsics().find(view_I->getIntrinsicId())); + tinyScene.intrinsics.insert(*_sfmData.getIntrinsics().find(view_J->getIntrinsicId())); + + // Init poses + const Pose3& poseI = Pose3(Mat3::Identity(), Vec3::Zero()); + const Pose3& poseJ = relativePose_info.relativePose; + + tinyScene.setPose(*view_I, CameraPose(poseI)); + tinyScene.setPose(*view_J, CameraPose(poseJ)); + + // Init structure + const Mat34 P1 = cam_I->get_projective_equivalent(poseI); + const Mat34 P2 = cam_J->get_projective_equivalent(poseJ); + Landmarks & landmarks = tinyScene.structure; + + size_t landmarkId = 0; + for(const auto& matchesPerDescIt: matchesPerDesc) + { + const feature::EImageDescriberType descType = matchesPerDescIt.first; + assert(descType != feature::EImageDescriberType::UNINITIALIZED); + if(descType == feature::EImageDescriberType::UNINITIALIZED) + throw std::logic_error("descType UNINITIALIZED"); + const matching::IndMatches & matches = matchesPerDescIt.second; + for (const matching::IndMatch& match: matches) + { + const Vec2 x1_ = _featuresPerView->getFeatures(I, descType)[match._i].coords().cast(); + const Vec2 x2_ = _featuresPerView->getFeatures(J, descType)[match._j].coords().cast(); + Vec3 X; + TriangulateDLT(P1, x1_, P2, x2_, &X); + Observations obs; + obs[view_I->getViewId()] = Observation(x1_, match._i); + obs[view_J->getViewId()] = Observation(x2_, match._j); + Landmark& newLandmark = landmarks[landmarkId++]; + newLandmark.descType = descType; + newLandmark.observations = obs; + newLandmark.X = X; + } + } + // - refine only Structure and Rotations & translations (keep intrinsic constant) + BundleAdjustmentCeres::CeresOptions options(false, false); + options.linearSolverType = ceres::DENSE_SCHUR; + BundleAdjustmentCeres bundle_adjustment_obj(options); + if(bundle_adjustment_obj.adjust(tinyScene, BundleAdjustment::REFINE_ROTATION | BundleAdjustment::REFINE_TRANSLATION | BundleAdjustment::REFINE_STRUCTURE)) + { + // --> to debug: save relative pair geometry on disk + // std::ostringstream os; + // os << relative_pose_pair.first << "_" << relative_pose_pair.second << ".ply"; + // Save(tiny_scene, os.str(), ESfMData(STRUCTURE | EXTRINSICS)); + // + + const geometry::Pose3 poseI = tinyScene.getPose(*view_I).getTransform(); + const geometry::Pose3 poseJ = tinyScene.getPose(*view_J).getTransform(); + + const Mat3 R1 = poseI.rotation(); + const Mat3 R2 = poseJ.rotation(); + const Vec3 t1 = poseI.translation(); + const Vec3 t2 = poseJ.translation(); + // Compute relative motion and save it + Mat3 Rrel; + Vec3 trel; + RelativeCameraMotion(R1, t1, R2, t2, &Rrel, &trel); + // Update found relative pose + relativePose_info.relativePose = Pose3(Rrel, -Rrel.transpose() * trel); + } + } + + // #pragma omp critical + { + // Add the relative rotation to the relative 'rotation' pose graph + using namespace aliceVision::rotationAveraging; + vec_relatives_R.emplace_back( + relative_pose_pair.first, relative_pose_pair.second, + relativePose_info.relativePose.rotation(), relativePose_info.vec_inliers.size()); + } + } + } // for all relative pose + + ALICEVISION_LOG_DEBUG("Compute_Relative_Rotations: vec_relatives_R.size(): " << vec_relatives_R.size()); + + for(rotationAveraging::RelativeRotation& rotation: vec_relatives_R) + { + ALICEVISION_LOG_DEBUG("Relative_Rotation:\n" + << "i: " << rotation.i << ", j: " << rotation.j << ", weight: " << rotation.weight << "\n" + << "Rij" << rotation.Rij + ); + } + + // Re-weight rotation in [0,1] + if (vec_relatives_R.size() > 1) + { + std::vector vec_count; + vec_count.reserve(vec_relatives_R.size()); + for(const auto & relative_rotation_info : vec_relatives_R) + { + vec_count.push_back(relative_rotation_info.weight); + } + std::partial_sort(vec_count.begin(), vec_count.begin() + vec_count.size() / 2, vec_count.end()); + // const float thTrustPair = vec_count[vec_count.size() / 2]; + for(auto & relative_rotation_info : vec_relatives_R) + { + relative_rotation_info.weight = std::min(relative_rotation_info.weight, 1.f); + } + } + + // Log input graph to the HTML report + if (!_loggingFile.empty() && !_outputFolder.empty()) + { + // Log a relative view graph + { + std::set set_ViewIds; + std::transform(_sfmData.getViews().begin(), _sfmData.getViews().end(), std::inserter(set_ViewIds, set_ViewIds.begin()), stl::RetrieveKey()); + graph::indexedGraph putativeGraph(set_ViewIds, getImagePairs(*_pairwiseMatches)); + graph::exportToGraphvizData((fs::path(_outputFolder) / "global_relative_rotation_view_graph.dot").string(), putativeGraph.g); + } + + // Log a relative pose graph + { + std::set set_pose_ids; + PairSet relative_pose_pairs; + for(const auto& relative_R : vec_relatives_R) + { + const Pair relative_pose_indices(relative_R.i, relative_R.j); + relative_pose_pairs.insert(relative_pose_indices); + set_pose_ids.insert(relative_R.i); + set_pose_ids.insert(relative_R.j); + } + const std::string sGraph_name = "global_relative_rotation_pose_graph"; + graph::indexedGraph putativeGraph(set_pose_ids, relative_pose_pairs); + graph::exportToGraphvizData((fs::path(_outputFolder) / (sGraph_name + ".dot")).string(), putativeGraph.g); + /* + using namespace htmlDocument; + std::ostringstream os; + + os << "
" << "global_relative_rotation_pose_graph" << "
" + << "\n"; + _htmlDocStream->pushInfo(os.str()); + */ + } + } +} + +} // namespace sfm +} // namespace aliceVision + diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp new file mode 100644 index 0000000000..f795772940 --- /dev/null +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp @@ -0,0 +1,114 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2016 AliceVision contributors. +// Copyright (c) 2012 openMVG contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#pragma once + +#include +#include +#include + +#include + +namespace aliceVision{ +namespace sfm{ + +enum ERelativeRotationMethod +{ + RELATIVE_ROTATION_FROM_E, + RELATIVE_ROTATION_FROM_H +}; + +inline std::string ERelativeRotationMethod_enumToString(const ERelativeRotationMethod rotationMethod) +{ + switch(rotationMethod) + { + case ERelativeRotationMethod::RELATIVE_ROTATION_FROM_E: return "essential_matrix"; + case ERelativeRotationMethod::RELATIVE_ROTATION_FROM_H: return "homography_matrix"; + } + throw std::out_of_range("Invalid method name enum"); +} + +inline ERelativeRotationMethod ERelativeRotationMethod_stringToEnum(const std::string& rotationMethodName) +{ + std::string methodName = rotationMethodName; + std::transform(methodName.begin(), methodName.end(), methodName.begin(), ::tolower); + + if(methodName == "essential_matrix") return ERelativeRotationMethod::RELATIVE_ROTATION_FROM_E; + if(methodName == "homography_matrix") return ERelativeRotationMethod::RELATIVE_ROTATION_FROM_H; + + throw std::out_of_range("Invalid method name : '" + rotationMethodName + "'"); +} + +inline std::ostream& operator<<(std::ostream& os, ERelativeRotationMethod rotationMethodName) +{ + os << ERelativeRotationMethod_enumToString(rotationMethodName); + return os; +} + +inline std::istream& operator>>(std::istream& in, ERelativeRotationMethod& rotationMethod) +{ + std::string token; + in >> token; + rotationMethod = ERelativeRotationMethod_stringToEnum(token); + return in; +} + +/// Panorama Pipeline Reconstruction Engine. +/// - Method: Based on Global SfM but with no translations between cameras. +class ReconstructionEngine_panorama : public ReconstructionEngine +{ +public: + + ReconstructionEngine_panorama(const sfmData::SfMData& sfmData, + const std::string& outDirectory, + const std::string& loggingFile = ""); + + ~ReconstructionEngine_panorama(); + + void SetFeaturesProvider(feature::FeaturesPerView* featuresPerView); + void SetMatchesProvider(matching::PairwiseMatches* provider); + + void SetRotationAveragingMethod(ERotationAveragingMethod eRotationAveragingMethod); + void SetRelativeRotationMethod(ERelativeRotationMethod eRelativeRotationMethod); + + void setLockAllIntrinsics(bool v) { _lockAllIntrinsics = v; } + + virtual bool process(); + +protected: + /// Compute from relative rotations the global rotations of the camera poses + bool Compute_Global_Rotations(const aliceVision::rotationAveraging::RelativeRotations& vec_relatives_R, + HashMap& map_globalR); + + /// Compute the initial structure of the scene + bool Compute_Initial_Structure(matching::PairwiseMatches& tripletWise_matches); + + /// Adjust the scene (& remove outliers) + bool Adjust(); + +private: + /// Compute relative rotations + void Compute_Relative_Rotations(aliceVision::rotationAveraging::RelativeRotations& vec_relatives_R); + + // Logger + std::shared_ptr _htmlDocStream; + std::string _loggingFile; + + // Parameter + ERotationAveragingMethod _eRotationAveragingMethod; + ERelativeRotationMethod _eRelativeRotationMethod; + bool _lockAllIntrinsics = false; + + // Data provider + feature::FeaturesPerView* _featuresPerView; + matching::PairwiseMatches* _pairwiseMatches; + + std::shared_ptr _normalizedFeaturesPerView; +}; + +} // namespace sfm +} // namespace aliceVision From 237b542962c52da4f772e64e81dff439ed345fe0 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Mon, 19 Aug 2019 18:01:19 +0200 Subject: [PATCH 023/174] [software] New software for stitching panorama with feature matching --- src/software/pipeline/CMakeLists.txt | 13 ++ src/software/pipeline/main_panorama.cpp | 264 ++++++++++++++++++++++++ 2 files changed, 277 insertions(+) create mode 100644 src/software/pipeline/main_panorama.cpp diff --git a/src/software/pipeline/CMakeLists.txt b/src/software/pipeline/CMakeLists.txt index d916cffb03..ec46f12dbd 100644 --- a/src/software/pipeline/CMakeLists.txt +++ b/src/software/pipeline/CMakeLists.txt @@ -88,6 +88,19 @@ if(ALICEVISION_BUILD_SFM) ${Boost_LIBRARIES} ) + # Panorama + alicevision_add_software(aliceVision_panorama + SOURCE main_panorama.cpp + FOLDER ${FOLDER_SOFTWARE_PIPELINE} + LINKS aliceVision_system + aliceVision_image + aliceVision_feature + aliceVision_sfm + aliceVision_sfmData + aliceVision_sfmDataIO + ${Boost_LIBRARIES} + ) + # Compute structure from known camera poses alicevision_add_software(aliceVision_computeStructureFromKnownPoses SOURCE main_computeStructureFromKnownPoses.cpp diff --git a/src/software/pipeline/main_panorama.cpp b/src/software/pipeline/main_panorama.cpp new file mode 100644 index 0000000000..6eed6daf6b --- /dev/null +++ b/src/software/pipeline/main_panorama.cpp @@ -0,0 +1,264 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2017 AliceVision contributors. +// Copyright (c) 2012 openMVG contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include + +// These constants define the current software version. +// They must be updated when the command line is changed. +#define ALICEVISION_SOFTWARE_VERSION_MAJOR 1 +#define ALICEVISION_SOFTWARE_VERSION_MINOR 0 + +using namespace aliceVision; + +namespace po = boost::program_options; +namespace fs = boost::filesystem; + +int main(int argc, char **argv) +{ + // command-line parameters + + std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); + std::string sfmDataFilename; + std::vector featuresFolders; + std::vector matchesFolders; + std::string outDirectory; + + // user optional parameters + + std::string outSfMDataFilename = "SfmData.json"; + std::string describerTypesName = feature::EImageDescriberType_enumToString(feature::EImageDescriberType::SIFT); + sfm::ERotationAveragingMethod rotationAveragingMethod = sfm::ROTATION_AVERAGING_L2; + sfm::ERelativeRotationMethod relativeRotationMethod = sfm::RELATIVE_ROTATION_FROM_E; + bool lockAllIntrinsics = false; + + po::options_description allParams( + "Perform detection of cameras for 360° panorama stitching\n" + "Based on Global SfM method but with no translations between cameras\n" + "AliceVision panorama"); + + po::options_description requiredParams("Required parameters"); + requiredParams.add_options() + ("input,i", po::value(&sfmDataFilename)->required(), + "SfMData file.") + ("output,o", po::value(&outDirectory)->required(), + "Path of the output folder.") + ("featuresFolders,f", po::value>(&featuresFolders)->multitoken()->required(), + "Path to folder(s) containing the extracted features.") + ("matchesFolders,m", po::value>(&matchesFolders)->multitoken()->required(), + "Path to folder(s) in which computed matches are stored."); + + po::options_description optionalParams("Optional parameters"); + optionalParams.add_options() + ("outSfMDataFilename", po::value(&outSfMDataFilename)->default_value(outSfMDataFilename), + "Filename of the output SfMData file.") + ("describerTypes,d", po::value(&describerTypesName)->default_value(describerTypesName), + feature::EImageDescriberType_informations().c_str()) + ("rotationAveraging", po::value(&rotationAveragingMethod)->default_value(rotationAveragingMethod), + "* 1: L1 minimization\n" + "* 2: L2 minimization") + ("relativeRotation", po::value(&relativeRotationMethod)->default_value(relativeRotationMethod), + "* from essential matrix" + "* from homography matrix") + ("lockAllIntrinsics", po::value(&lockAllIntrinsics)->default_value(lockAllIntrinsics), + "Force lock of all camera intrinsic parameters, so they will not be refined during Bundle Adjustment."); + + po::options_description logParams("Log parameters"); + logParams.add_options() + ("verboseLevel,v", po::value(&verboseLevel)->default_value(verboseLevel), + "verbosity level (fatal, error, warning, info, debug, trace)."); + + allParams.add(requiredParams).add(optionalParams).add(logParams); + + po::variables_map vm; + try + { + po::store(po::parse_command_line(argc, argv, allParams), vm); + + if(vm.count("help") || (argc == 1)) + { + ALICEVISION_COUT(allParams); + return EXIT_SUCCESS; + } + po::notify(vm); + } + catch(boost::program_options::required_option& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + catch(boost::program_options::error& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + + ALICEVISION_COUT("Program called with the following parameters:"); + ALICEVISION_COUT(vm); + + // set verbose level + system::Logger::get()->setLogLevel(verboseLevel); + + if (rotationAveragingMethod < sfm::ROTATION_AVERAGING_L1 || + rotationAveragingMethod > sfm::ROTATION_AVERAGING_L2 ) + { + ALICEVISION_LOG_ERROR("Rotation averaging method is invalid"); + return EXIT_FAILURE; + } + + if (relativeRotationMethod < sfm::RELATIVE_ROTATION_FROM_E || + relativeRotationMethod > sfm::RELATIVE_ROTATION_FROM_H ) + { + ALICEVISION_LOG_ERROR("Relative rotation method is invalid"); + return EXIT_FAILURE; + } + + // load input SfMData scene + sfmData::SfMData sfmData; + if(!sfmDataIO::Load(sfmData, sfmDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) + { + ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmDataFilename << "' cannot be read."); + return EXIT_FAILURE; + } + + if(!sfmData.structure.empty()) + { + ALICEVISION_LOG_ERROR("Part computed SfMData are not currently supported in Global SfM." << std::endl << "Please use Incremental SfM. Aborted"); + return EXIT_FAILURE; + } + + if(!sfmData.getRigs().empty()) + { + ALICEVISION_LOG_ERROR("Rigs are not currently supported in Global SfM." << std::endl << "Please use Incremental SfM. Aborted"); + return EXIT_FAILURE; + } + + // get describerTypes + const std::vector describerTypes = feature::EImageDescriberType_stringToEnums(describerTypesName); + + // features reading + feature::FeaturesPerView featuresPerView; + if(!sfm::loadFeaturesPerView(featuresPerView, sfmData, featuresFolders, describerTypes)) + { + ALICEVISION_LOG_ERROR("Invalid features"); + return EXIT_FAILURE; + } + + // matches reading + // Load the match file (try to read the two matches file formats). + matching::PairwiseMatches pairwiseMatches; + if(!sfm::loadPairwiseMatches(pairwiseMatches, sfmData, matchesFolders, describerTypes)) + { + ALICEVISION_LOG_ERROR("Unable to load matches files from: " << matchesFolders); + return EXIT_FAILURE; + } + + if(outDirectory.empty()) + { + ALICEVISION_LOG_ERROR("It is an invalid output folder"); + return EXIT_FAILURE; + } + + if(!fs::exists(outDirectory)) + fs::create_directory(outDirectory); + + // global SfM reconstruction process + aliceVision::system::Timer timer; + sfm::ReconstructionEngine_panorama sfmEngine( + sfmData, + outDirectory, + (fs::path(outDirectory) / "sfm_log.html").string()); + + // configure the featuresPerView & the matches_provider + sfmEngine.SetFeaturesProvider(&featuresPerView); + sfmEngine.SetMatchesProvider(&pairwiseMatches); + + // configure reconstruction parameters + sfmEngine.setLockAllIntrinsics(lockAllIntrinsics); // TODO: rename param + + // configure motion averaging method + sfmEngine.SetRotationAveragingMethod(sfm::ERotationAveragingMethod(rotationAveragingMethod)); + + // configure relative rotation method (from essential or from homography matrix) + sfmEngine.SetRelativeRotationMethod(sfm::ERelativeRotationMethod(relativeRotationMethod)); + + if(!sfmEngine.process()) + return EXIT_FAILURE; + + // get the color for the 3D points + sfmEngine.colorize(); + + // set featuresFolders and matchesFolders relative paths + { + sfmEngine.getSfMData().addFeaturesFolders(featuresFolders); + sfmEngine.getSfMData().addMatchesFolders(matchesFolders); + sfmEngine.getSfMData().setAbsolutePath(outDirectory); + } + + sfmData::SfMData& outSfmData = sfmEngine.getSfMData(); + + ALICEVISION_LOG_INFO("Global structure from motion took (s): " << timer.elapsed()); + ALICEVISION_LOG_INFO("Generating HTML report..."); + + sfm::generateSfMReport(outSfmData, (fs::path(outDirectory) / "sfm_report.html").string()); + + // export to disk computed scene (data & visualizable results) + ALICEVISION_LOG_INFO("Export SfMData to disk"); + + sfmDataIO::Save(outSfmData, outSfMDataFilename, sfmDataIO::ESfMData::ALL); + sfmDataIO::Save(outSfmData, (fs::path(outDirectory) / "cloud_and_poses.ply").string(), sfmDataIO::ESfMData::ALL); + + ALICEVISION_LOG_INFO("Structure from Motion results:" << std::endl + << "\t- # input images: " << outSfmData.getViews().size() << std::endl + << "\t- # cameras calibrated: " << outSfmData.getPoses().size() << std::endl + << "\t- # landmarks: " << outSfmData.getLandmarks().size()); + + /* + // Create panorama buffer + + for(auto& viewIt: sfmData.getViews()) + { + IndexT viewId = viewIt.first; + const View& view = *viewIt.second.get(); + if(!sfmData.isPoseAndIntrinsicDefined(view)) + continue; + + const CameraPose camPose = sfmData.getPose(view); + const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); + + std::string imagePath = view.getImagePath(); + // Image RGB(A)f + + for(int y = 0; y <; ++y) + { + for(int x = 0; x <; ++x) + { + // equirectangular to unit vector + // unit vector to camera + // is in camera frustrum? + // camera to image (pixel coord with distortion) + // add_disto + cam2ima + } + } + } + */ + + return EXIT_SUCCESS; +} From bb929c12a0f96f20f3dc31a1c43df271352c019d Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Mon, 19 Aug 2019 18:05:49 +0200 Subject: [PATCH 024/174] [globalSfM] Add max_angular_error for triplet rotation rejection --- .../sfm/pipeline/global/GlobalSfMRotationAveragingSolver.cpp | 5 +++-- .../sfm/pipeline/global/GlobalSfMRotationAveragingSolver.hpp | 1 + .../sfm/pipeline/global/ReconstructionEngine_globalSfM.cpp | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.cpp b/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.cpp index f70f35bbd1..1fe2660c11 100644 --- a/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.cpp +++ b/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.cpp @@ -27,6 +27,7 @@ bool GlobalSfMRotationAveragingSolver::Run( ERotationAveragingMethod eRotationAveragingMethod, ERelativeRotationInferenceMethod eRelativeRotationInferenceMethod, const RelativeRotations& relativeRot_In, + const double max_angular_error, HashMap& map_globalR ) const { @@ -43,8 +44,8 @@ bool GlobalSfMRotationAveragingSolver::Run( //------------------- PairSet pairs = getPairs(relativeRotations); std::vector< graph::Triplet > vec_triplets = graph::tripletListing(pairs); - //-- Rejection triplet that are 'not' identity rotation (error to identity > 5°) - TripletRotationRejection(5.0f, vec_triplets, relativeRotations); + //-- Rejection triplet that are 'not' identity rotation (error to identity > max_angular_error) + TripletRotationRejection(max_angular_error, vec_triplets, relativeRotations); pairs = getPairs(relativeRotations); const std::set set_remainingIds = graph::CleanGraph_KeepLargestBiEdge_Nodes(pairs); diff --git a/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.hpp b/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.hpp index 65e3d946ed..cfa034e419 100644 --- a/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.hpp +++ b/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.hpp @@ -79,6 +79,7 @@ class GlobalSfMRotationAveragingSolver bool Run(ERotationAveragingMethod eRotationAveragingMethod, ERelativeRotationInferenceMethod eRelativeRotationInferenceMethod, const rotationAveraging::RelativeRotations& relativeRot_In, + const double max_angular_error, HashMap& map_globalR) const; /** diff --git a/src/aliceVision/sfm/pipeline/global/ReconstructionEngine_globalSfM.cpp b/src/aliceVision/sfm/pipeline/global/ReconstructionEngine_globalSfM.cpp index 6657fbfb61..099698f053 100644 --- a/src/aliceVision/sfm/pipeline/global/ReconstructionEngine_globalSfM.cpp +++ b/src/aliceVision/sfm/pipeline/global/ReconstructionEngine_globalSfM.cpp @@ -197,7 +197,8 @@ bool ReconstructionEngine_globalSfM::Compute_Global_Rotations(const rotationAver const ERelativeRotationInferenceMethod eRelativeRotationInferenceMethod = TRIPLET_ROTATION_INFERENCE_COMPOSITION_ERROR; //TRIPLET_ROTATION_INFERENCE_NONE; GlobalSfMRotationAveragingSolver rotationAveraging_solver; - const bool b_rotationAveraging = rotationAveraging_solver.Run(_eRotationAveragingMethod, eRelativeRotationInferenceMethod, relatives_R, global_rotations); + //-- Rejection triplet that are 'not' identity rotation (error to identity > 5°) + const bool b_rotationAveraging = rotationAveraging_solver.Run(_eRotationAveragingMethod, eRelativeRotationInferenceMethod, relatives_R, 5.0, global_rotations); ALICEVISION_LOG_DEBUG("Found #global_rotations: " << global_rotations.size()); From be120eb2af53042ded69c425c86e225ac8341e21 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Mon, 19 Aug 2019 18:06:47 +0200 Subject: [PATCH 025/174] [sfm] Add log infos --- .../sfm/pipeline/RelativePoseInfo.cpp | 8 ++++++- .../GlobalSfMRotationAveragingSolver.cpp | 21 +++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/src/aliceVision/sfm/pipeline/RelativePoseInfo.cpp b/src/aliceVision/sfm/pipeline/RelativePoseInfo.cpp index b9c23f612b..12676625ad 100644 --- a/src/aliceVision/sfm/pipeline/RelativePoseInfo.cpp +++ b/src/aliceVision/sfm/pipeline/RelativePoseInfo.cpp @@ -102,8 +102,11 @@ bool robustRelativePose( max_iteration_count, &relativePose_info.essential_matrix, relativePose_info.initial_residual_tolerance); relativePose_info.found_residual_precision = acRansacOut.first; - if (relativePose_info.vec_inliers.size() < SolverType::MINIMUM_SAMPLES * ALICEVISION_MINIMUM_SAMPLES_COEF ) + if (relativePose_info.vec_inliers.size() < SolverType::MINIMUM_SAMPLES * ALICEVISION_MINIMUM_SAMPLES_COEF ) + { + ALICEVISION_LOG_INFO("robustRelativePose: no sufficient coverage (the model does not support enough samples): " << relativePose_info.vec_inliers.size()); return false; // no sufficient coverage (the model does not support enough samples) + } // estimation of the relative poses Mat3 R; @@ -111,7 +114,10 @@ bool robustRelativePose( if (!estimate_Rt_fromE( K1, K2, x1, x2, relativePose_info.essential_matrix, relativePose_info.vec_inliers, &R, &t)) + { + ALICEVISION_LOG_INFO("robustRelativePose: cannot find a valid [R|t] couple that makes the inliers in front of the camera."); return false; // cannot find a valid [R|t] couple that makes the inliers in front of the camera. + } // Store [R|C] for the second camera, since the first camera is [Id|0] relativePose_info.relativePose = geometry::Pose3(R, -R.transpose() * t); diff --git a/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.cpp b/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.cpp index 1fe2660c11..9f9f238ad0 100644 --- a/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.cpp +++ b/src/aliceVision/sfm/pipeline/global/GlobalSfMRotationAveragingSolver.cpp @@ -34,6 +34,8 @@ bool GlobalSfMRotationAveragingSolver::Run( RelativeRotations relativeRotations = relativeRot_In; // We work on a copy, since inference can remove some relative motions + ALICEVISION_LOG_DEBUG("GlobalSfMRotationAveragingSolver: A) relativeRotations.size(): " << relativeRotations.size()); + //-> Test there is only one graph and at least 3 camera? switch(eRelativeRotationInferenceMethod) { @@ -43,7 +45,11 @@ bool GlobalSfMRotationAveragingSolver::Run( // Triplet inference (test over the composition error) //------------------- PairSet pairs = getPairs(relativeRotations); + ALICEVISION_LOG_DEBUG("GlobalSfMRotationAveragingSolver: pairs.size(): " << pairs.size()); + std::vector< graph::Triplet > vec_triplets = graph::tripletListing(pairs); + ALICEVISION_LOG_DEBUG("GlobalSfMRotationAveragingSolver: vec_triplets.size(): " << vec_triplets.size()); + //-- Rejection triplet that are 'not' identity rotation (error to identity > max_angular_error) TripletRotationRejection(max_angular_error, vec_triplets, relativeRotations); @@ -76,6 +82,9 @@ bool GlobalSfMRotationAveragingSolver::Run( //- B. solve global rotation computation bool bSuccess = false; std::vector vec_globalR(_reindexForward.size()); + + ALICEVISION_LOG_DEBUG("GlobalSfMRotationAveragingSolver: B) vec_globalR.size(): " << vec_globalR.size()); + switch(eRotationAveragingMethod) { case ROTATION_AVERAGING_L2: @@ -85,11 +94,16 @@ bool GlobalSfMRotationAveragingSolver::Run( _reindexForward.size(), relativeRotations, vec_globalR); + + ALICEVISION_LOG_DEBUG("rotationAveraging::l2::L2RotationAveraging: success: " << bSuccess); //- Non linear refinement of the global rotations if (bSuccess) + { bSuccess = rotationAveraging::l2::L2RotationAveraging_Refine( relativeRotations, vec_globalR); + ALICEVISION_LOG_DEBUG("rotationAveraging::l2::L2RotationAveraging_Refine: success: " << bSuccess); + } // save kept pairs (restore original pose indices using the backward reindexing) for(RelativeRotations::iterator iter = relativeRotations.begin(); iter != relativeRotations.end(); ++iter) @@ -111,6 +125,7 @@ bool GlobalSfMRotationAveragingSolver::Run( bSuccess = rotationAveraging::l1::GlobalRotationsRobust( relativeRotations, vec_globalR, nMainViewID, 0.0f, &vec_inliers); + ALICEVISION_LOG_DEBUG("rotationAveraging::l1::GlobalRotationsRobust: success: " << bSuccess); ALICEVISION_LOG_DEBUG("inliers: " << vec_inliers); // save kept pairs (restore original pose indices using the backward reindexing) @@ -171,6 +186,8 @@ void GlobalSfMRotationAveragingSolver::TripletRotationRejection( const graph::Triplet & triplet = vec_triplets[i]; const IndexT I = triplet.i, J = triplet.j , K = triplet.k; + ALICEVISION_LOG_DEBUG("GlobalSfMRotationAveragingSolver::TripletRotationRejection: i: " << i << ", (" << I << ", " << J << ", " << K << ")."); + //-- Find the three relative rotations const Pair ij(I,J), ji(J,I); const Mat3 RIJ = (map_relatives.count(ij)) ? @@ -207,6 +224,10 @@ void GlobalSfMRotationAveragingSolver::TripletRotationRejection( else map_relatives_validated[ik] = map_relatives.at(ik); } + else + { + ALICEVISION_LOG_DEBUG("GlobalSfMRotationAveragingSolver::TripletRotationRejection: i: " << i << ", angularErrorDegree: " << angularErrorDegree << ", max_angular_error: " << max_angular_error); + } } map_relatives = std::move(map_relatives_validated); From 9cdfb960ae47a4988f85211e5eca4256e5fd43bb Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Mon, 26 Aug 2019 17:21:14 +0200 Subject: [PATCH 026/174] [panorama] remove aliceVision namespace --- .../ReconstructionEngine_panorama.cpp | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 451e6b5209..2b1585c53f 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -56,7 +56,7 @@ bool robustRelativeRotation_fromE( const size_t max_iteration_count) { // Use the 5 point solver to estimate E - typedef aliceVision::essential::kernel::FivePointKernel SolverType; + typedef essential::kernel::FivePointKernel SolverType; // Define the AContrario adaptor typedef ACKernelAdaptorEssential< SolverType, @@ -105,8 +105,8 @@ bool robustRelativeRotation_fromE( * @param[in] K2 3x3 calibration matrix of the second view. * @return The 3x3 rotation matrix corresponding to the pure rotation between the views. */ -aliceVision::Mat3 decomposePureRotationHomography(const aliceVision::Mat3 &homography, const aliceVision::Mat3 &K1, - const aliceVision::Mat3 &K2) +aliceVision::Mat3 decomposePureRotationHomography(const Mat3 &homography, const Mat3 &K1, + const Mat3 &K2) { // G is the "calibrated" homography inv(K2) * H * K1 const auto G = K2.inverse() * homography * K1; @@ -117,7 +117,7 @@ aliceVision::Mat3 decomposePureRotationHomography(const aliceVision::Mat3 &homog //@fixme find possible bad cases? // compute and return the closest rotation matrix - Eigen::JacobiSVD usv(rotation, Eigen::ComputeFullU | Eigen::ComputeFullV); + Eigen::JacobiSVD usv(rotation, Eigen::ComputeFullU | Eigen::ComputeFullV); const auto &u = usv.matrixU(); const auto vt = usv.matrixV().transpose(); return u * vt; @@ -133,25 +133,25 @@ aliceVision::Mat3 decomposePureRotationHomography(const aliceVision::Mat3 &homog * @param[out] vec_inliers The inliers satisfying the homography as a list of indices. * @return the status of the estimation. */ -aliceVision::EstimationStatus robustHomographyEstimationAC(const aliceVision::Mat2X &x1, - const aliceVision::Mat2X &x2, +aliceVision::EstimationStatus robustHomographyEstimationAC(const Mat2X &x1, + const Mat2X &x2, const std::pair &imgSize1, const std::pair &imgSize2, - aliceVision::Mat3 &H, + Mat3 &H, std::vector &vec_inliers) { - using KernelType = aliceVision::robustEstimation::ACKernelAdaptor< - aliceVision::homography::kernel::FourPointSolver, - aliceVision::homography::kernel::AsymmetricError, - aliceVision::UnnormalizerI, - aliceVision::Mat3>; + using KernelType = robustEstimation::ACKernelAdaptor< + homography::kernel::FourPointSolver, + homography::kernel::AsymmetricError, + UnnormalizerI, + Mat3>; KernelType kernel(x1, imgSize1.first, imgSize1.second, x2, imgSize2.first, imgSize2.second, false); // configure as point to point error model. - const std::pair ACRansacOut = aliceVision::robustEstimation::ACRANSAC(kernel, vec_inliers, + const std::pair ACRansacOut = robustEstimation::ACRANSAC(kernel, vec_inliers, 1024, &H, std::numeric_limits::infinity()); From e2fcd33913649f130d9e34455ad9b78f47ad5613 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Wed, 28 Aug 2019 10:53:58 +0200 Subject: [PATCH 027/174] [panorama] move maxIterations as default parameter --- .../sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 2b1585c53f..0de2ec46ec 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -707,13 +707,12 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging // Since we use normalized features, we will use unit image size and intrinsic matrix: const std::pair imageSize(1., 1.); const Mat3 K = Mat3::Identity(); - const size_t maxIterations = 256; switch(_eRelativeRotationMethod) { case RELATIVE_ROTATION_FROM_E: { - if(!robustRelativeRotation_fromE(K, K, x1, x2, relativePose_info, imageSize, imageSize, maxIterations)) + if(!robustRelativeRotation_fromE(K, K, x1, x2, imageSize, imageSize, relativePose_info)) { ALICEVISION_LOG_INFO("Relative pose computation: i: " << i << ", (" << I << ", " << J <<") => FAILED"); continue; @@ -727,7 +726,7 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging cam_I->imagePlane_toCameraPlaneError(2.5) * cam_J->imagePlane_toCameraPlaneError(2.5), 1./2.); - if(!robustRelativeRotation_fromH(K, K, x1, x2, relativeRotation_info, imageSize, imageSize, maxIterations)) + if(!robustRelativeRotation_fromH(K, K, x1, x2, imageSize, imageSize, relativeRotation_info)) { ALICEVISION_LOG_INFO("Relative pose computation: i: " << i << ", (" << I << ", " << J <<") => FAILED"); continue; From d9df8faf7e8d32ecfe97806fb6bf2b6fe05dc7b5 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Wed, 28 Aug 2019 12:22:56 +0200 Subject: [PATCH 028/174] [panorama] move structure RelativeRotation from .cpp to .hpp --- .../ReconstructionEngine_panorama.cpp | 49 ++------------ .../ReconstructionEngine_panorama.hpp | 64 +++++++++++++++++++ 2 files changed, 71 insertions(+), 42 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 0de2ec46ec..154df09002 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -50,9 +50,9 @@ using namespace aliceVision::robustEstimation; bool robustRelativeRotation_fromE( const Mat3 & K1, const Mat3 & K2, const Mat & x1, const Mat & x2, - RelativePoseInfo & relativePose_info, const std::pair & size_ima1, const std::pair & size_ima2, + RelativePoseInfo & relativePose_info, const size_t max_iteration_count) { // Use the 5 point solver to estimate E @@ -163,49 +163,14 @@ aliceVision::EstimationStatus robustHomographyEstimationAC(const Mat2X &x1, return {valid, hasStrongSupport}; } -/** - * @brief A struct containing the information of the relative rotation. - */ -struct RelativeRotationInfo -{ - /** - * @brief Default constructor. - */ - RelativeRotationInfo() = default; - - /// the homography. - aliceVision::Mat3 _homography{}; - /// the relative rotation. - aliceVision::Mat3 _relativeRotation{}; - /// the inliers. - std::vector _inliers{}; - /// initial threshold for the acransac process. - double _initialResidualTolerance{std::numeric_limits::infinity()}; - /// the estimated threshold found by acransac. - double _foundResidualPrecision{std::numeric_limits::infinity()}; - -}; - -/** - * @brief Estimate the relative rotation between two views related by a pure rotation. - * @param[in] K1 3x3 calibration matrix of the first view. - * @param[in] K2 3x3 calibration matrix of the second view. - * @param[in] x1 The points on the first image. - * @param[in] x2 The corresponding points on the second image. - * @param[in] imgSize1 The size of the first image. - * @param[in] imgSize2 The size of the second image. - * @param[out] relativeRotationInfo Contains the result of the estimation. - * @param[in] maxIterationCount Max number of iteration for the ransac process. - * @return true if a homography has been estimated. - */ -bool robustRelativeRotation_fromH(const aliceVision::Mat3 &K1, - const aliceVision::Mat3 &K2, - const aliceVision::Mat2X &x1, - const aliceVision::Mat2X &x2, - RelativeRotationInfo &relativeRotationInfo, +bool robustRelativeRotation_fromH(const Mat3 &K1, + const Mat3 &K2, + const Mat2X &x1, + const Mat2X &x2, const std::pair &imgSize1, const std::pair &imgSize2, - const size_t maxIterationCount) + RelativeRotationInfo &relativeRotationInfo, + const size_t max_iteration_count) { std::vector vec_inliers{}; diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp index f795772940..6348f213b4 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp @@ -57,6 +57,70 @@ inline std::istream& operator>>(std::istream& in, ERelativeRotationMethod& rotat return in; } + +/** + * @brief A struct containing the information of the relative rotation. + */ +struct RelativeRotationInfo +{ + /** + * @brief Default constructor. + */ + RelativeRotationInfo() = default; + + /// the homography. + Mat3 _homography{}; + /// the relative rotation. + Mat3 _relativeRotation{}; + /// the inliers. + std::vector _inliers{}; + /// initial threshold for the acransac process. + double _initialResidualTolerance{std::numeric_limits::infinity()}; + /// the estimated threshold found by acransac. + double _foundResidualPrecision{std::numeric_limits::infinity()}; + +}; + + +/** + * @brief Estimate the relative pose between two views. + * @param[in] K1 3x3 calibration matrix of the first view. + * @param[in] K2 3x3 calibration matrix of the second view. + * @param[in] x1 The points on the first image. + * @param[in] x2 The corresponding points on the second image. + * @param[in] imgSize1 The size of the first image. + * @param[in] imgSize2 The size of the second image. + * @param[out] relativePoseInfo Contains the result of the estimation. + * @param[in] maxIterationCount Max number of iteration for the ransac process. + * @return true if a homography has been estimated. + */ +bool robustRelativeRotation_fromE(const Mat3 & K1, const Mat3 & K2, + const Mat & x1, const Mat & x2, + const std::pair & size_ima1, + const std::pair & size_ima2, + RelativePoseInfo & relativePose_info, + const size_t max_iteration_count = 4096); + +/** + * @brief Estimate the relative rotation between two views related by a pure rotation. + * @param[in] K1 3x3 calibration matrix of the first view. + * @param[in] K2 3x3 calibration matrix of the second view. + * @param[in] x1 The points on the first image. + * @param[in] x2 The corresponding points on the second image. + * @param[in] imgSize1 The size of the first image. + * @param[in] imgSize2 The size of the second image. + * @param[out] relativeRotationInfo Contains the result of the estimation. + * @param[in] maxIterationCount Max number of iteration for the ransac process. + * @return true if a homography has been estimated. + */ +bool robustRelativeRotation_fromH(const Mat3 &K1, const Mat3 &K2, + const Mat2X &x1, const Mat2X &x2, + const std::pair &imgSize1, + const std::pair &imgSize2, + RelativeRotationInfo &relativeRotationInfo, + const size_t max_iteration_count = 4096); + + /// Panorama Pipeline Reconstruction Engine. /// - Method: Based on Global SfM but with no translations between cameras. class ReconstructionEngine_panorama : public ReconstructionEngine From c18c79a5d278dbf1ac1502068732398f988ce603 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Wed, 28 Aug 2019 12:23:44 +0200 Subject: [PATCH 029/174] [sfm] include ReconstructionEngine_panorama.hpp --- src/aliceVision/sfm/sfm.hpp | 1 + 1 file changed, 1 insertion(+) diff --git a/src/aliceVision/sfm/sfm.hpp b/src/aliceVision/sfm/sfm.hpp index 697d4487ee..438b4a1235 100644 --- a/src/aliceVision/sfm/sfm.hpp +++ b/src/aliceVision/sfm/sfm.hpp @@ -25,6 +25,7 @@ #include #include #include +#include #include #include #include From 3a507282f3ba9c8e21b9bafa0e6cbc080995b155 Mon Sep 17 00:00:00 2001 From: Anouk Liberge Date: Wed, 28 Aug 2019 12:24:54 +0200 Subject: [PATCH 030/174] [panorama] Add unit test for panorama SfM --- .../sfm/pipeline/panorama/CMakeLists.txt | 2 +- .../pipeline/panorama/panoramaSfM_test.cpp | 121 ++++++++++++++++++ 2 files changed, 122 insertions(+), 1 deletion(-) create mode 100644 src/aliceVision/sfm/pipeline/panorama/panoramaSfM_test.cpp diff --git a/src/aliceVision/sfm/pipeline/panorama/CMakeLists.txt b/src/aliceVision/sfm/pipeline/panorama/CMakeLists.txt index 1476c47cb8..1f876755f4 100644 --- a/src/aliceVision/sfm/pipeline/panorama/CMakeLists.txt +++ b/src/aliceVision/sfm/pipeline/panorama/CMakeLists.txt @@ -1,4 +1,4 @@ -alicevision_add_test(panorama_test.cpp +alicevision_add_test(panoramaSfM_test.cpp NAME "sfm_panorama" LINKS aliceVision_sfm aliceVision_feature diff --git a/src/aliceVision/sfm/pipeline/panorama/panoramaSfM_test.cpp b/src/aliceVision/sfm/pipeline/panorama/panoramaSfM_test.cpp new file mode 100644 index 0000000000..c6a5b98f82 --- /dev/null +++ b/src/aliceVision/sfm/pipeline/panorama/panoramaSfM_test.cpp @@ -0,0 +1,121 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2016 AliceVision contributors. +// Copyright (c) 2012 openMVG contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#include +#include +#include +#include + +#include +#include +#include + +#define BOOST_TEST_MODULE PANORAMA_SFM +#include +#include +#include + +using namespace aliceVision; +using namespace aliceVision::camera; +using namespace aliceVision::geometry; +using namespace aliceVision::sfm; +using namespace aliceVision::sfmData; + +// Test summary: +// - Create a rotation matrix between two views +// - Create two matrices of calibration for two views + + +BOOST_AUTO_TEST_CASE(PANORAMA_SFM) +{ + // rotation between the two views + const Mat3 rotation = aliceVision::rotationXYZ(0.01, -0.001, -0.2); + ALICEVISION_LOG_INFO("Ground truth rotation:\n" << rotation); + + // some random 3D points on the unit sphere + const Mat3X pts3d = Mat3X::Random(3, 20).colwise().normalized(); + ALICEVISION_LOG_INFO("points 3d:\n" << pts3d); + + // calibration 1 + Mat3 K1; + K1 << 1200, 0, 960, + 0, 1200, 540, + 0, 0, 1; + ALICEVISION_LOG_INFO("K1:\n" << K1); + + // calibration 2 + Mat3 K2; + K2 << 1100, 0, 960, + 0, 1100, 540, + 0, 0, 1; + ALICEVISION_LOG_INFO("K2:\n" << K2); + + // 2d points on each side as projection of the 3D points + const Mat2X pts1 = (K1 * pts3d).colwise().hnormalized(); + // ALICEVISION_LOG_INFO("pts1:\n" << pts1); + const Mat2X pts2 = (K2 * rotation * pts3d).colwise().hnormalized(); + // ALICEVISION_LOG_INFO("pts2:\n" << pts2); + + // test the uncalibrated version, just pass the 2d points and compute R + + const double epsilon = 1e-4; + + // Relative Rotation from H + { + RelativeRotationInfo rotationInfo{}; + ALICEVISION_LOG_INFO("\n\n###########################################\nUncalibrated from H"); + robustRelativeRotation_fromH(K1, K2, pts1, pts2, + std::make_pair(1920, 1080), + std::make_pair(1920, 1080), + rotationInfo); + ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo._relativeRotation); + + EXPECT_MATRIX_NEAR(rotation, rotationInfo._relativeRotation, epsilon); + + + // test the calibrated version, compute normalized points for each view (by multiplying by the inverse of K) and estimate H and R + ALICEVISION_LOG_INFO("\n\n###########################################\nCalibrated from H"); + robustRelativeRotation_fromH(Mat3::Identity(), Mat3::Identity(), + (K1.inverse()*pts1.colwise().homogeneous()).colwise().hnormalized(), + (K2.inverse()*pts2.colwise().homogeneous()).colwise().hnormalized(), + std::make_pair(1920, 1080), + std::make_pair(1920, 1080), + rotationInfo); + ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo._relativeRotation); + + EXPECT_MATRIX_NEAR(rotation, rotationInfo._relativeRotation, epsilon); + } + + /* + // Relative Rotation from E + { + RelativePoseInfo rotationInfo{}; + ALICEVISION_LOG_INFO("\n\n###########################################\nUncalibrated from E"); + robustRelativeRotation_fromE(K1, K2, pts1, pts2, + std::make_pair(1920, 1080), + std::make_pair(1920, 1080), + rotationInfo); + ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo.relativePose.rotation()); + + EXPECT_MATRIX_NEAR(rotation, rotationInfo.relativePose.rotation(), epsilon); + + + + // test the calibrated version, compute normalized points for each view (by multiplying by the inverse of K) and estimate H and R + ALICEVISION_LOG_INFO("\n\n###########################################\nCalibrated from E"); + robustRelativeRotation_fromE(Mat3::Identity(), Mat3::Identity(), + (K1.inverse()*pts1.colwise().homogeneous()).colwise().hnormalized(), + (K2.inverse()*pts2.colwise().homogeneous()).colwise().hnormalized(), + std::make_pair(1920, 1080), + std::make_pair(1920, 1080), + rotationInfo); + ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo.relativePose.rotation()); + + EXPECT_MATRIX_NEAR(rotation, rotationInfo.relativePose.rotation(), epsilon); + } + */ +} From e474a82385f1b103ab76d67f814562105139f8c0 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 12:35:19 +0200 Subject: [PATCH 031/174] [software] panoramaEstimation: rename panorama into panoramaEstimation and add reorientation on the first image --- src/aliceVision/sfmData/View.hpp | 9 ++ src/software/pipeline/CMakeLists.txt | 24 +-- ...norama.cpp => main_panoramaEstimation.cpp} | 140 ++++++++++++------ 3 files changed, 119 insertions(+), 54 deletions(-) rename src/software/pipeline/{main_panorama.cpp => main_panoramaEstimation.cpp} (69%) diff --git a/src/aliceVision/sfmData/View.hpp b/src/aliceVision/sfmData/View.hpp index e5afcfedb9..892f83f818 100644 --- a/src/aliceVision/sfmData/View.hpp +++ b/src/aliceVision/sfmData/View.hpp @@ -389,9 +389,18 @@ class View { if(hasDigitMetadata("Orientation")) return static_cast(std::stoi(getMetadata("Orientation"))); + if(hasDigitMetadata("Exif:Orientation")) + return static_cast(std::stoi(getMetadata("Exif:Orientation"))); return EEXIFOrientation::UNKNOWN; } + std::string getMetadataDateTimeOriginal() const + { + if(hasMetadata("Exif:DateTimeOriginal")) + return getMetadata("Exif:DateTimeOriginal"); + return ""; + } + /** * @brief Get the view metadata structure * @return the view metadata diff --git a/src/software/pipeline/CMakeLists.txt b/src/software/pipeline/CMakeLists.txt index ec46f12dbd..666a808d7a 100644 --- a/src/software/pipeline/CMakeLists.txt +++ b/src/software/pipeline/CMakeLists.txt @@ -88,18 +88,18 @@ if(ALICEVISION_BUILD_SFM) ${Boost_LIBRARIES} ) - # Panorama - alicevision_add_software(aliceVision_panorama - SOURCE main_panorama.cpp - FOLDER ${FOLDER_SOFTWARE_PIPELINE} - LINKS aliceVision_system - aliceVision_image - aliceVision_feature - aliceVision_sfm - aliceVision_sfmData - aliceVision_sfmDataIO - ${Boost_LIBRARIES} - ) + # Panorama + alicevision_add_software(aliceVision_panoramaEstimation + SOURCE main_panoramaEstimation.cpp + FOLDER ${FOLDER_SOFTWARE_PIPELINE} + LINKS aliceVision_system + aliceVision_image + aliceVision_feature + aliceVision_sfm + aliceVision_sfmData + aliceVision_sfmDataIO + ${Boost_LIBRARIES} + ) # Compute structure from known camera poses alicevision_add_software(aliceVision_computeStructureFromKnownPoses diff --git a/src/software/pipeline/main_panorama.cpp b/src/software/pipeline/main_panoramaEstimation.cpp similarity index 69% rename from src/software/pipeline/main_panorama.cpp rename to src/software/pipeline/main_panoramaEstimation.cpp index 6eed6daf6b..449fc09ef6 100644 --- a/src/software/pipeline/main_panorama.cpp +++ b/src/software/pipeline/main_panoramaEstimation.cpp @@ -10,13 +10,19 @@ #include #include #include +#include #include #include #include +#include #include #include +#include +#include +#include + #include // These constants define the current software version. @@ -29,6 +35,13 @@ using namespace aliceVision; namespace po = boost::program_options; namespace fs = boost::filesystem; +inline std::istream& operator>>(std::istream& in, std::pair& out) +{ + in >> out.first; + in >> out.second; + return in; +} + int main(int argc, char **argv) { // command-line parameters @@ -41,16 +54,16 @@ int main(int argc, char **argv) // user optional parameters - std::string outSfMDataFilename = "SfmData.json"; + std::string outSfMDataFilename = "sfmData.json"; std::string describerTypesName = feature::EImageDescriberType_enumToString(feature::EImageDescriberType::SIFT); sfm::ERotationAveragingMethod rotationAveragingMethod = sfm::ROTATION_AVERAGING_L2; sfm::ERelativeRotationMethod relativeRotationMethod = sfm::RELATIVE_ROTATION_FROM_E; bool lockAllIntrinsics = false; + int orientation = 0; po::options_description allParams( - "Perform detection of cameras for 360° panorama stitching\n" - "Based on Global SfM method but with no translations between cameras\n" - "AliceVision panorama"); + "Perform estimation of cameras orientation around a nodal point for 360° panorama.\n" + "AliceVision PanoramaEstimation"); po::options_description requiredParams("Required parameters"); requiredParams.add_options() @@ -67,6 +80,8 @@ int main(int argc, char **argv) optionalParams.add_options() ("outSfMDataFilename", po::value(&outSfMDataFilename)->default_value(outSfMDataFilename), "Filename of the output SfMData file.") + ("orientation", po::value(&orientation)->default_value(orientation), + "Orientation") ("describerTypes,d", po::value(&describerTypesName)->default_value(describerTypesName), feature::EImageDescriberType_informations().c_str()) ("rotationAveraging", po::value(&rotationAveragingMethod)->default_value(rotationAveragingMethod), @@ -131,20 +146,20 @@ int main(int argc, char **argv) } // load input SfMData scene - sfmData::SfMData sfmData; - if(!sfmDataIO::Load(sfmData, sfmDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) + sfmData::SfMData inputSfmData; + if(!sfmDataIO::Load(inputSfmData, sfmDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) { ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmDataFilename << "' cannot be read."); return EXIT_FAILURE; } - if(!sfmData.structure.empty()) + if(!inputSfmData.structure.empty()) { ALICEVISION_LOG_ERROR("Part computed SfMData are not currently supported in Global SfM." << std::endl << "Please use Incremental SfM. Aborted"); return EXIT_FAILURE; } - if(!sfmData.getRigs().empty()) + if(!inputSfmData.getRigs().empty()) { ALICEVISION_LOG_ERROR("Rigs are not currently supported in Global SfM." << std::endl << "Please use Incremental SfM. Aborted"); return EXIT_FAILURE; @@ -155,7 +170,7 @@ int main(int argc, char **argv) // features reading feature::FeaturesPerView featuresPerView; - if(!sfm::loadFeaturesPerView(featuresPerView, sfmData, featuresFolders, describerTypes)) + if(!sfm::loadFeaturesPerView(featuresPerView, inputSfmData, featuresFolders, describerTypes)) { ALICEVISION_LOG_ERROR("Invalid features"); return EXIT_FAILURE; @@ -164,7 +179,7 @@ int main(int argc, char **argv) // matches reading // Load the match file (try to read the two matches file formats). matching::PairwiseMatches pairwiseMatches; - if(!sfm::loadPairwiseMatches(pairwiseMatches, sfmData, matchesFolders, describerTypes)) + if(!sfm::loadPairwiseMatches(pairwiseMatches, inputSfmData, matchesFolders, describerTypes)) { ALICEVISION_LOG_ERROR("Unable to load matches files from: " << matchesFolders); return EXIT_FAILURE; @@ -182,7 +197,7 @@ int main(int argc, char **argv) // global SfM reconstruction process aliceVision::system::Timer timer; sfm::ReconstructionEngine_panorama sfmEngine( - sfmData, + inputSfmData, outDirectory, (fs::path(outDirectory) / "sfm_log.html").string()); @@ -202,16 +217,16 @@ int main(int argc, char **argv) if(!sfmEngine.process()) return EXIT_FAILURE; - // get the color for the 3D points - sfmEngine.colorize(); - // set featuresFolders and matchesFolders relative paths { sfmEngine.getSfMData().addFeaturesFolders(featuresFolders); sfmEngine.getSfMData().addMatchesFolders(matchesFolders); - sfmEngine.getSfMData().setAbsolutePath(outDirectory); + sfmEngine.getSfMData().setAbsolutePath(outSfMDataFilename); } + // get the color for the 3D points + sfmEngine.colorize(); + sfmData::SfMData& outSfmData = sfmEngine.getSfMData(); ALICEVISION_LOG_INFO("Global structure from motion took (s): " << timer.elapsed()); @@ -219,46 +234,87 @@ int main(int argc, char **argv) sfm::generateSfMReport(outSfmData, (fs::path(outDirectory) / "sfm_report.html").string()); - // export to disk computed scene (data & visualizable results) - ALICEVISION_LOG_INFO("Export SfMData to disk"); - - sfmDataIO::Save(outSfmData, outSfMDataFilename, sfmDataIO::ESfMData::ALL); - sfmDataIO::Save(outSfmData, (fs::path(outDirectory) / "cloud_and_poses.ply").string(), sfmDataIO::ESfMData::ALL); - ALICEVISION_LOG_INFO("Structure from Motion results:" << std::endl << "\t- # input images: " << outSfmData.getViews().size() << std::endl << "\t- # cameras calibrated: " << outSfmData.getPoses().size() << std::endl << "\t- # landmarks: " << outSfmData.getLandmarks().size()); - /* - // Create panorama buffer - - for(auto& viewIt: sfmData.getViews()) + auto validViews = outSfmData.getValidViews(); + int nbCameras = outSfmData.getValidViews().size(); + if(nbCameras == 0) { - IndexT viewId = viewIt.first; - const View& view = *viewIt.second.get(); - if(!sfmData.isPoseAndIntrinsicDefined(view)) - continue; - - const CameraPose camPose = sfmData.getPose(view); - const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); + ALICEVISION_LOG_ERROR("Failed to get valid cameras from input images."); + return -1; + } - std::string imagePath = view.getImagePath(); - // Image RGB(A)f + { + std::string firstShot_datetime; + IndexT firstShot_viewId = 0; - for(int y = 0; y <; ++y) + for(auto& viewIt: outSfmData.getViews()) { - for(int x = 0; x <; ++x) + IndexT viewId = viewIt.first; + const sfmData::View& view = *viewIt.second.get(); + if(!outSfmData.isPoseAndIntrinsicDefined(&view)) + continue; + std::string datetime = view.getMetadataDateTimeOriginal(); + ALICEVISION_LOG_INFO("Shot datetime candidate: " << datetime << "."); + if(firstShot_datetime.empty() || datetime < firstShot_datetime) { - // equirectangular to unit vector - // unit vector to camera - // is in camera frustrum? - // camera to image (pixel coord with distortion) - // add_disto + cam2ima + firstShot_datetime = datetime; + firstShot_viewId = viewId; + ALICEVISION_LOG_INFO("Update shot datetime: " << firstShot_datetime << "."); } } + ALICEVISION_LOG_INFO("First shot datetime: " << firstShot_datetime << "."); + ALICEVISION_LOG_INFO("Reset orientation from: " << firstShot_viewId << "."); + + double S; + Mat3 R = Mat3::Identity(); + Vec3 t; + + if(orientation == 0) + { + // case sfmData::EEXIFOrientation::RIGHT: + R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) + * Eigen::AngleAxisd(degreeToRadian(90.0), Vec3(0,0,1)) + * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); + } + else if(orientation == 1) + { + // case sfmData::EEXIFOrientation::LEFT: + R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) + * Eigen::AngleAxisd(degreeToRadian(270.0), Vec3(0,0,1)) + * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); + } + else if(orientation == 2) + { + // case sfmData::EEXIFOrientation::UPSIDEDOWN: + R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) + * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); + } + else if(orientation == 3) + { + // case sfmData::EEXIFOrientation::NONE: + R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) + * Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,0,1)) + * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); + } + else if(orientation == 4) + { + sfm::computeNewCoordinateSystemFromSingleCamera(outSfmData, std::to_string(firstShot_viewId), S, R, t); + } + + // We only need to correct the rotation + S = 1.0; + t = Vec3::Zero(); + sfm::applyTransform(outSfmData, S, R, t); } - */ + + // export to disk computed scene (data & visualizable results) + ALICEVISION_LOG_INFO("Export SfMData to disk"); + sfmDataIO::Save(outSfmData, outSfMDataFilename, sfmDataIO::ESfMData::ALL); + sfmDataIO::Save(outSfmData, (fs::path(outDirectory) / "cloud_and_poses.ply").string(), sfmDataIO::ESfMData::ALL); return EXIT_SUCCESS; } From 191c1ac5e35a7e42dafc4867d2ef3f4ff14777fc Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 12:36:05 +0200 Subject: [PATCH 032/174] [software] add WIP panoramaStitching --- src/software/pipeline/CMakeLists.txt | 11 + .../pipeline/main_panoramaStitching.cpp | 287 ++++++++++++++++++ 2 files changed, 298 insertions(+) create mode 100644 src/software/pipeline/main_panoramaStitching.cpp diff --git a/src/software/pipeline/CMakeLists.txt b/src/software/pipeline/CMakeLists.txt index 666a808d7a..fa7de3cf89 100644 --- a/src/software/pipeline/CMakeLists.txt +++ b/src/software/pipeline/CMakeLists.txt @@ -100,6 +100,17 @@ if(ALICEVISION_BUILD_SFM) aliceVision_sfmDataIO ${Boost_LIBRARIES} ) + alicevision_add_software(aliceVision_panoramaStitching + SOURCE main_panoramaStitching.cpp + FOLDER ${FOLDER_SOFTWARE_PIPELINE} + LINKS aliceVision_system + aliceVision_image + aliceVision_feature + aliceVision_sfm + aliceVision_sfmData + aliceVision_sfmDataIO + ${Boost_LIBRARIES} + ) # Compute structure from known camera poses alicevision_add_software(aliceVision_computeStructureFromKnownPoses diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp new file mode 100644 index 0000000000..65b42ebd16 --- /dev/null +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -0,0 +1,287 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2017 AliceVision contributors. +// Copyright (c) 2012 openMVG contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include +#include + +#include + +// These constants define the current software version. +// They must be updated when the command line is changed. +#define ALICEVISION_SOFTWARE_VERSION_MAJOR 1 +#define ALICEVISION_SOFTWARE_VERSION_MINOR 0 + +using namespace aliceVision; + +namespace po = boost::program_options; +namespace fs = boost::filesystem; + +float sigmoid(float x, float sigwidth, float sigMid) +{ + return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); +} + +/** + * @brief Function to map equirectangular coordinates onto a world unit vector according a spherical projection + */ +namespace SphericalMapping +{ + Vec3 get3DPoint(const Vec2& pos2d, int width, int height) + { + const double x = pos2d(0) - width; + const double y = height/2.0 - pos2d(1); + + const double longitude = M_PI * 2.0 * x / width; // between -PI and PI + const double latitude = M_PI * y / height; // between -PI/2 and PI/2 + + const double Px = cos(latitude) * cos(longitude); + const double Py = cos(latitude) * sin(longitude); + const double Pz = sin(latitude); + + return Vec3(Px, Py, Pz); + } +} + +inline std::istream& operator>>(std::istream& in, std::pair& out) +{ + in >> out.first; + in >> out.second; + return in; +} + +int main(int argc, char **argv) +{ + // command-line parameters + + std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); + std::string sfmDataFilename; + std::string outputPanorama; + + float scaleFactor = 0.2f; + std::pair panoramaSize = {0, 0}; + bool fisheyeMasking = false; + float fisheyeMaskingMargin = 0.05f; + float transitionSize = 10.0f; + + po::options_description allParams( + "Perform panorama stiching of cameras around a nodal point for 360° panorama creation.\n" + "AliceVision PanoramaStitching"); + + po::options_description requiredParams("Required parameters"); + requiredParams.add_options() + ("input,i", po::value(&sfmDataFilename)->required(), + "SfMData file.") + ("output,o", po::value(&outputPanorama)->required(), + "Path of the output folder."); + + po::options_description optionalParams("Optional parameters"); + optionalParams.add_options() + ("scaleFactor", po::value(&scaleFactor)->default_value(scaleFactor), + "Scale factor to resize the output resolution (e.g. 0.5 for downscaling to half resolution).") + ("fisheyeMasking", po::value(&fisheyeMasking)->default_value(fisheyeMasking), + "For fisheye images, skip the invalid pixels on the borders.") + ("fisheyeMaskingMargin", po::value(&fisheyeMaskingMargin)->default_value(fisheyeMaskingMargin), + "Margin for fisheye images (in percentage of the image).") + ("transitionSize", po::value(&transitionSize)->default_value(transitionSize), + "Size of the transition between images (in pixels).") + //("panoramaSize", po::value>(&panoramaSize)->default_value(panoramaSize), + // "Image size of the output panorama image file.") + ; + + po::options_description logParams("Log parameters"); + logParams.add_options() + ("verboseLevel,v", po::value(&verboseLevel)->default_value(verboseLevel), + "verbosity level (fatal, error, warning, info, debug, trace)."); + + allParams.add(requiredParams).add(optionalParams).add(logParams); + + po::variables_map vm; + try + { + po::store(po::parse_command_line(argc, argv, allParams), vm); + + if(vm.count("help") || (argc == 1)) + { + ALICEVISION_COUT(allParams); + return EXIT_SUCCESS; + } + po::notify(vm); + } + catch(boost::program_options::required_option& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + catch(boost::program_options::error& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + + ALICEVISION_COUT("Program called with the following parameters:"); + ALICEVISION_COUT(vm); + + // set verbose level + system::Logger::get()->setLogLevel(verboseLevel); + + // load input SfMData scene + sfmData::SfMData sfmData; + if(!sfmDataIO::Load(sfmData, sfmDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS|sfmDataIO::EXTRINSICS))) + { + ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmDataFilename << "' cannot be read."); + return EXIT_FAILURE; + } + + auto validViews = sfmData.getValidViews(); + int nbCameras = sfmData.getValidViews().size(); + + ALICEVISION_LOG_INFO(nbCameras << " loaded from " << sfmDataFilename); + if(nbCameras == 0) + { + ALICEVISION_LOG_ERROR("Failed to get valid cameras from input images."); + return -1; + } + + if(panoramaSize.first == 0 || panoramaSize.second == 0) + { + if(panoramaSize.first != 0 || panoramaSize.second != 0) + { + int s = std::max(panoramaSize.first, panoramaSize.second); + panoramaSize.first = panoramaSize.second = s; + } + else + { + ALICEVISION_LOG_INFO("Automatic panorama size choice."); + // TODO: better heuristic to decide the output resolution + panoramaSize.first = 0; + for(auto& viewIt: sfmData.getViews()) + { + IndexT viewId = viewIt.first; + const sfmData::View& view = *viewIt.second.get(); + if(!sfmData.isPoseAndIntrinsicDefined(&view)) + continue; + panoramaSize.first += view.getWidth(); + panoramaSize.second = std::max(panoramaSize.second, int(view.getHeight())); + ALICEVISION_LOG_INFO("Update output panorama size: " << panoramaSize.first << ", " << panoramaSize.second); + } + } + } + + panoramaSize.first *= scaleFactor; + panoramaSize.second *= scaleFactor; + + ALICEVISION_LOG_INFO("Output panorama size: " << panoramaSize.first << ", " << panoramaSize.second); + + // Create panorama buffer + image::Image imageOut(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + + for(auto& viewIt: sfmData.getViews()) + { + IndexT viewId = viewIt.first; + const sfmData::View& view = *viewIt.second.get(); + if(!sfmData.isPoseAndIntrinsicDefined(&view)) + continue; + + const sfmData::CameraPose camPose = sfmData.getPose(view); + const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); + + std::string imagePath = view.getImagePath(); + + // Image RGB(A)f + image::Image imageIn; + + ALICEVISION_LOG_INFO("Reading " << imagePath); + image::readImage(imagePath, imageIn, image::EImageColorSpace::LINEAR); + + + const float maxRadius = std::min(imageIn.Width(), imageIn.Height()) * 0.5f * (1.0 - fisheyeMaskingMargin); + const float blurWidth = maxRadius * transitionSize; + const float blurMid = maxRadius - transitionSize/2.f; + const Vec2i center(imageIn.Width()/2.f, imageIn.Height()/2.f); + + const image::Sampler2d sampler; + + for(int y = 0; y < imageOut.Height(); ++y) + { + for(int x = 0; x < imageOut.Width(); ++x) + { + // equirectangular to unit vector + // Vec3 ray = SphericalMapping::get3DPoint(Vec2(y,x), imageOut.Height(), imageOut.Width()); + Vec3 ray = SphericalMapping::get3DPoint(Vec2(x,y), imageOut.Width(), imageOut.Height()); + + if(camPose.getTransform().depth(ray) < 0) + { + // point is not in front of the camera + continue; + } + + // unit vector to camera + const Vec2 pix_disto = intrinsic.project(camPose.getTransform(), ray, true); + + if( pix_disto(0) < 0 || pix_disto(0) >= imageIn.Width() || + pix_disto(1) < 0 || pix_disto(1) >= imageIn.Height()) + { + // the pixel is out of the image + continue; + } + + const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); + float contribution = 1.0f; + + if(fisheyeMasking) + { + // TODO: if image is fisheye, skip the undefined part of the image + const float dist = std::sqrt((x-center(0)) * (x-center(0)) + (y-center(1)) * (y-center(1))); + contribution = (dist > maxRadius) ? 0.0 : sigmoid(dist, transitionSize, blurMid); + } + if(contribution > 0.0f) + { + imageOut(y, x).r() = pixel.r() * contribution; + imageOut(y, x).g() = pixel.g() * contribution; + imageOut(y, x).b() = pixel.b() * contribution; + imageOut(y, x).a() = imageOut(y, x).a() + contribution; + } + } + } + } + + for(int y = 0; y < imageOut.Height(); ++y) + { + for(int x = 0; x < imageOut.Width(); ++x) + { + image::RGBAfColor& pixel = imageOut(y, x); + if(pixel.a() > 0.0001f) + { + pixel.r() /= pixel.a(); + pixel.g() /= pixel.a(); + pixel.b() /= pixel.a(); + // pixel.a() = 1.0f; // TMP: keep the alpha with the contribution for debugging + } + } + } + + image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::AUTO); + + return EXIT_SUCCESS; +} From 99f871bd9356e930bfcc583ff60374687ffb822b Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 12:38:29 +0200 Subject: [PATCH 033/174] [sfm] utils: minor change in alignment and add log --- src/aliceVision/sfm/utils/alignment.cpp | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/aliceVision/sfm/utils/alignment.cpp b/src/aliceVision/sfm/utils/alignment.cpp index c9b3327b2c..81deeb1837 100644 --- a/src/aliceVision/sfm/utils/alignment.cpp +++ b/src/aliceVision/sfm/utils/alignment.cpp @@ -175,7 +175,6 @@ void computeNewCoordinateSystemFromSingleCamera(const sfmData::SfMData& sfmData, } } - if(viewId == -1) throw std::invalid_argument("The camera name \"" + camName + "\" is not found in the sfmData."); else if(!sfmData.isPoseAndIntrinsicDefined(viewId)) @@ -184,22 +183,31 @@ void computeNewCoordinateSystemFromSingleCamera(const sfmData::SfMData& sfmData, switch(orientation) { case sfmData::EEXIFOrientation::RIGHT: - out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(90.0), Vec3(0,0,1)) * sfmData.getAbsolutePose(viewId).getTransform().rotation(); + ALICEVISION_LOG_TRACE("computeNewCoordinateSystemFromSingleCamera orientation: RIGHT"); + out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) + * Eigen::AngleAxisd(degreeToRadian(90.0), Vec3(0,0,1)) + * sfmData.getAbsolutePose(viewId).getTransform().rotation(); break; case sfmData::EEXIFOrientation::LEFT: - out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(270.0), Vec3(0,0,1)) * sfmData.getAbsolutePose(viewId).getTransform().rotation(); + ALICEVISION_LOG_TRACE("computeNewCoordinateSystemFromSingleCamera orientation: LEFT"); + out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) + * Eigen::AngleAxisd(degreeToRadian(270.0), Vec3(0,0,1)) + * sfmData.getAbsolutePose(viewId).getTransform().rotation(); break; case sfmData::EEXIFOrientation::UPSIDEDOWN: - out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * sfmData.getAbsolutePose(viewId).getTransform().rotation(); + ALICEVISION_LOG_TRACE("computeNewCoordinateSystemFromSingleCamera orientation: UPSIDEDOWN"); + out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) + * sfmData.getAbsolutePose(viewId).getTransform().rotation(); break; case sfmData::EEXIFOrientation::NONE: - out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,0,1)) * sfmData.getAbsolutePose(viewId).getTransform().rotation(); - break; default: - out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,0,1)) * sfmData.getAbsolutePose(viewId).getTransform().rotation(); + ALICEVISION_LOG_TRACE("computeNewCoordinateSystemFromSingleCamera orientation: NONE"); + out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) + * Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,0,1)) + * sfmData.getAbsolutePose(viewId).getTransform().rotation(); break; } - + out_t = - out_R * sfmData.getAbsolutePose(viewId).getTransform().center(); out_S = 1; } From a231d7a9d17ea802cfbe3e449db8ba0581138c0b Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 12:39:14 +0200 Subject: [PATCH 034/174] [camera] add backproject utility function --- src/aliceVision/camera/IntrinsicBase.hpp | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/aliceVision/camera/IntrinsicBase.hpp b/src/aliceVision/camera/IntrinsicBase.hpp index 5dff0b76a4..82a54058b1 100644 --- a/src/aliceVision/camera/IntrinsicBase.hpp +++ b/src/aliceVision/camera/IntrinsicBase.hpp @@ -124,6 +124,20 @@ struct IntrinsicBase return this->cam2ima( X.head<2>()/X(2) ); } + inline Vec3 backproject(const geometry::Pose3& pose, const Vec2& pt2D, double depth, bool applyUndistortion = true) const + { + Vec2 pt2DCam; + if (applyUndistortion && this->have_disto()) // apply disto & intrinsics + pt2DCam = this->ima2cam(this->remove_disto(pt2D)); + else + pt2DCam = this->ima2cam(pt2D); + + const Vec3 pt2DCamN = pt2DCam.homogeneous().normalized(); + const Vec3 pt3d = depth * pt2DCamN; + const Vec3 output = pose.inverse()(pt3d); + return output; + } + /** * @brief Compute the residual between the 3D projected point X and an image observation x * @param[in] pose The pose From cbb4ed96fc2c0a667e43f1c43fd2e0922e543888 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 15:29:11 +0200 Subject: [PATCH 035/174] [sfm] alignment: fix orientation read from metadata --- src/aliceVision/sfm/utils/alignment.cpp | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/aliceVision/sfm/utils/alignment.cpp b/src/aliceVision/sfm/utils/alignment.cpp index 81deeb1837..e32202537b 100644 --- a/src/aliceVision/sfm/utils/alignment.cpp +++ b/src/aliceVision/sfm/utils/alignment.cpp @@ -145,9 +145,8 @@ void computeNewCoordinateSystemFromSingleCamera(const sfmData::SfMData& sfmData, double& out_S, Mat3& out_R, Vec3& out_t) -{ +{ IndexT viewId = -1; - sfmData::EEXIFOrientation orientation = sfmData::EEXIFOrientation::UNKNOWN; try { @@ -166,7 +165,6 @@ void computeNewCoordinateSystemFromSingleCamera(const sfmData::SfMData& sfmData, { std::string path = view.second->getImagePath(); std::size_t found = path.find(camName); - orientation = view.second->getMetadataOrientation(); if (found!=std::string::npos) { viewId = view.second->getViewId(); @@ -180,6 +178,9 @@ void computeNewCoordinateSystemFromSingleCamera(const sfmData::SfMData& sfmData, else if(!sfmData.isPoseAndIntrinsicDefined(viewId)) throw std::invalid_argument("The camera \"" + camName + "\" exists in the sfmData but is not reconstructed."); + sfmData::EEXIFOrientation orientation = sfmData.getView(viewId).getMetadataOrientation(); + ALICEVISION_LOG_TRACE("computeNewCoordinateSystemFromSingleCamera orientation: " << int(orientation)); + switch(orientation) { case sfmData::EEXIFOrientation::RIGHT: @@ -200,8 +201,9 @@ void computeNewCoordinateSystemFromSingleCamera(const sfmData::SfMData& sfmData, * sfmData.getAbsolutePose(viewId).getTransform().rotation(); break; case sfmData::EEXIFOrientation::NONE: - default: ALICEVISION_LOG_TRACE("computeNewCoordinateSystemFromSingleCamera orientation: NONE"); + default: + ALICEVISION_LOG_TRACE("computeNewCoordinateSystemFromSingleCamera orientation: default"); out_R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,0,1)) * sfmData.getAbsolutePose(viewId).getTransform().rotation(); From 258e104d1a7358eee6fa833608337d9fca53d701 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 15:31:25 +0200 Subject: [PATCH 036/174] [software] panoramaStitching: use image metadata orientation in the heuristic for image size choice --- .../pipeline/main_panoramaStitching.cpp | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 65b42ebd16..d61cf62dc2 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -165,6 +165,7 @@ int main(int argc, char **argv) if(panoramaSize.first == 0 || panoramaSize.second == 0) { + sfmData::EEXIFOrientation orientation = sfmData.getView(*validViews.begin()).getMetadataOrientation(); if(panoramaSize.first != 0 || panoramaSize.second != 0) { int s = std::max(panoramaSize.first, panoramaSize.second); @@ -181,8 +182,20 @@ int main(int argc, char **argv) const sfmData::View& view = *viewIt.second.get(); if(!sfmData.isPoseAndIntrinsicDefined(&view)) continue; - panoramaSize.first += view.getWidth(); - panoramaSize.second = std::max(panoramaSize.second, int(view.getHeight())); + + if(orientation == sfmData::EEXIFOrientation::RIGHT || + orientation == sfmData::EEXIFOrientation::LEFT || + orientation == sfmData::EEXIFOrientation::RIGHT_REVERSED || + orientation == sfmData::EEXIFOrientation::LEFT_REVERSED) + { + panoramaSize.first += view.getHeight(); + panoramaSize.second = std::max(panoramaSize.second, int(view.getWidth())); + } + else + { + panoramaSize.first += view.getWidth(); + panoramaSize.second = std::max(panoramaSize.second, int(view.getHeight())); + } ALICEVISION_LOG_INFO("Update output panorama size: " << panoramaSize.first << ", " << panoramaSize.second); } } From 39a778339a309a27cfce49569f55bfec1bb956fb Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 15:32:21 +0200 Subject: [PATCH 037/174] [software] panoramaStitching: fix masking and contributions --- .../pipeline/main_panoramaStitching.cpp | 25 ++++++++++++------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index d61cf62dc2..18abdf951e 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -259,21 +259,28 @@ int main(int argc, char **argv) continue; } - const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); float contribution = 1.0f; - if(fisheyeMasking) { - // TODO: if image is fisheye, skip the undefined part of the image - const float dist = std::sqrt((x-center(0)) * (x-center(0)) + (y-center(1)) * (y-center(1))); - contribution = (dist > maxRadius) ? 0.0 : sigmoid(dist, transitionSize, blurMid); + const float dist = std::sqrt((pix_disto(0)-center(0)) * (pix_disto(0)-center(0)) + (pix_disto(1)-center(1)) * (pix_disto(1)-center(1))); + if(dist > maxRadius) + { + // contribution = 0.0f; + continue; + } + else + { + contribution = sigmoid(dist, transitionSize, blurMid); + } } + + const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); if(contribution > 0.0f) { - imageOut(y, x).r() = pixel.r() * contribution; - imageOut(y, x).g() = pixel.g() * contribution; - imageOut(y, x).b() = pixel.b() * contribution; - imageOut(y, x).a() = imageOut(y, x).a() + contribution; + imageOut(y, x).r() += pixel.r() * contribution; + imageOut(y, x).g() += pixel.g() * contribution; + imageOut(y, x).b() += pixel.b() * contribution; + imageOut(y, x).a() += contribution; } } } From b6044552779c35a5d6483f7158cb009cb0c23067 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 15:33:03 +0200 Subject: [PATCH 038/174] [software] panoramaStitching: ad debugging param to compute only a subset of images --- src/software/pipeline/main_panoramaStitching.cpp | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 18abdf951e..ae08cbf5cc 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -82,6 +82,8 @@ int main(int argc, char **argv) float fisheyeMaskingMargin = 0.05f; float transitionSize = 10.0f; + int maxNbImages = 0; + po::options_description allParams( "Perform panorama stiching of cameras around a nodal point for 360° panorama creation.\n" "AliceVision PanoramaStitching"); @@ -103,6 +105,8 @@ int main(int argc, char **argv) "Margin for fisheye images (in percentage of the image).") ("transitionSize", po::value(&transitionSize)->default_value(transitionSize), "Size of the transition between images (in pixels).") + ("maxNbImages", po::value(&maxNbImages)->default_value(maxNbImages), + "For debug only: Max number of images to merge.") //("panoramaSize", po::value>(&panoramaSize)->default_value(panoramaSize), // "Image size of the output panorama image file.") ; @@ -209,6 +213,7 @@ int main(int argc, char **argv) // Create panorama buffer image::Image imageOut(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + int imageIndex = 0; for(auto& viewIt: sfmData.getViews()) { IndexT viewId = viewIt.first; @@ -216,17 +221,21 @@ int main(int argc, char **argv) if(!sfmData.isPoseAndIntrinsicDefined(&view)) continue; + if(maxNbImages != 0 && imageIndex >= maxNbImages) + break; + ++imageIndex; + const sfmData::CameraPose camPose = sfmData.getPose(view); const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); std::string imagePath = view.getImagePath(); - // Image RGB(A)f + // Image RGB image::Image imageIn; ALICEVISION_LOG_INFO("Reading " << imagePath); image::readImage(imagePath, imageIn, image::EImageColorSpace::LINEAR); - + ALICEVISION_LOG_INFO(" - " << imageIn.Width() << "x" << imageIn.Height()); const float maxRadius = std::min(imageIn.Width(), imageIn.Height()) * 0.5f * (1.0 - fisheyeMaskingMargin); const float blurWidth = maxRadius * transitionSize; From a9d51d1ed1fd0a674fe1515e45142fc11188ce48 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 15:34:42 +0200 Subject: [PATCH 039/174] [software] panoramaEstimation: remove colorize and change some log --- .../pipeline/main_panoramaEstimation.cpp | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/src/software/pipeline/main_panoramaEstimation.cpp b/src/software/pipeline/main_panoramaEstimation.cpp index 449fc09ef6..88e4ddecf9 100644 --- a/src/software/pipeline/main_panoramaEstimation.cpp +++ b/src/software/pipeline/main_panoramaEstimation.cpp @@ -194,7 +194,7 @@ int main(int argc, char **argv) if(!fs::exists(outDirectory)) fs::create_directory(outDirectory); - // global SfM reconstruction process + // Panorama reconstruction process aliceVision::system::Timer timer; sfm::ReconstructionEngine_panorama sfmEngine( inputSfmData, @@ -224,17 +224,14 @@ int main(int argc, char **argv) sfmEngine.getSfMData().setAbsolutePath(outSfMDataFilename); } - // get the color for the 3D points - sfmEngine.colorize(); - sfmData::SfMData& outSfmData = sfmEngine.getSfMData(); - ALICEVISION_LOG_INFO("Global structure from motion took (s): " << timer.elapsed()); + ALICEVISION_LOG_INFO("Panorama solve took (s): " << timer.elapsed()); ALICEVISION_LOG_INFO("Generating HTML report..."); sfm::generateSfMReport(outSfmData, (fs::path(outDirectory) / "sfm_report.html").string()); - ALICEVISION_LOG_INFO("Structure from Motion results:" << std::endl + ALICEVISION_LOG_INFO("Panorama results:" << std::endl << "\t- # input images: " << outSfmData.getViews().size() << std::endl << "\t- # cameras calibrated: " << outSfmData.getPoses().size() << std::endl << "\t- # landmarks: " << outSfmData.getLandmarks().size()); @@ -258,50 +255,52 @@ int main(int argc, char **argv) if(!outSfmData.isPoseAndIntrinsicDefined(&view)) continue; std::string datetime = view.getMetadataDateTimeOriginal(); - ALICEVISION_LOG_INFO("Shot datetime candidate: " << datetime << "."); + ALICEVISION_LOG_TRACE("Shot datetime candidate: " << datetime << "."); if(firstShot_datetime.empty() || datetime < firstShot_datetime) { firstShot_datetime = datetime; firstShot_viewId = viewId; - ALICEVISION_LOG_INFO("Update shot datetime: " << firstShot_datetime << "."); + ALICEVISION_LOG_TRACE("Update shot datetime: " << firstShot_datetime << "."); } } ALICEVISION_LOG_INFO("First shot datetime: " << firstShot_datetime << "."); - ALICEVISION_LOG_INFO("Reset orientation from: " << firstShot_viewId << "."); + ALICEVISION_LOG_TRACE("Reset orientation to view: " << firstShot_viewId << "."); double S; Mat3 R = Mat3::Identity(); Vec3 t; + ALICEVISION_LOG_INFO("orientation: " << orientation); if(orientation == 0) { - // case sfmData::EEXIFOrientation::RIGHT: + ALICEVISION_LOG_INFO("Orientation: RIGHT"); R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(90.0), Vec3(0,0,1)) * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); } else if(orientation == 1) { - // case sfmData::EEXIFOrientation::LEFT: + ALICEVISION_LOG_INFO("Orientation: LEFT"); R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(270.0), Vec3(0,0,1)) * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); } else if(orientation == 2) { - // case sfmData::EEXIFOrientation::UPSIDEDOWN: + ALICEVISION_LOG_INFO("Orientation: UPSIDEDOWN"); R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); } else if(orientation == 3) { - // case sfmData::EEXIFOrientation::NONE: + ALICEVISION_LOG_INFO("Orientation: NONE"); R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,0,1)) * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); } else if(orientation == 4) { + ALICEVISION_LOG_INFO("Orientation: FROM IMAGES"); sfm::computeNewCoordinateSystemFromSingleCamera(outSfmData, std::to_string(firstShot_viewId), S, R, t); } From 4562124deb21bc7b3e982e8c00f072f7688c7171 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 15:48:58 +0200 Subject: [PATCH 040/174] [software] panoramaEstimation: change debug param code values --- .../pipeline/main_panoramaEstimation.cpp | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/software/pipeline/main_panoramaEstimation.cpp b/src/software/pipeline/main_panoramaEstimation.cpp index 88e4ddecf9..6ff5c4b722 100644 --- a/src/software/pipeline/main_panoramaEstimation.cpp +++ b/src/software/pipeline/main_panoramaEstimation.cpp @@ -272,37 +272,37 @@ int main(int argc, char **argv) ALICEVISION_LOG_INFO("orientation: " << orientation); if(orientation == 0) + { + ALICEVISION_LOG_INFO("Orientation: FROM IMAGES"); + sfm::computeNewCoordinateSystemFromSingleCamera(outSfmData, std::to_string(firstShot_viewId), S, R, t); + } + else if(orientation == 1) { ALICEVISION_LOG_INFO("Orientation: RIGHT"); R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(90.0), Vec3(0,0,1)) * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); } - else if(orientation == 1) + else if(orientation == 2) { ALICEVISION_LOG_INFO("Orientation: LEFT"); R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(270.0), Vec3(0,0,1)) * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); } - else if(orientation == 2) + else if(orientation == 3) { ALICEVISION_LOG_INFO("Orientation: UPSIDEDOWN"); R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); } - else if(orientation == 3) + else if(orientation == 4) { ALICEVISION_LOG_INFO("Orientation: NONE"); R = Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(180.0), Vec3(0,0,1)) * outSfmData.getAbsolutePose(firstShot_viewId).getTransform().rotation(); } - else if(orientation == 4) - { - ALICEVISION_LOG_INFO("Orientation: FROM IMAGES"); - sfm::computeNewCoordinateSystemFromSingleCamera(outSfmData, std::to_string(firstShot_viewId), S, R, t); - } // We only need to correct the rotation S = 1.0; From 5f7f08e73c7efe181378a6a297c0b118436749dc Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 17:23:43 +0200 Subject: [PATCH 041/174] [software] panoramaStitching: change debug option to stitch only a subset of images --- .../pipeline/main_panoramaStitching.cpp | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index ae08cbf5cc..dbbd94607b 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -82,7 +82,8 @@ int main(int argc, char **argv) float fisheyeMaskingMargin = 0.05f; float transitionSize = 10.0f; - int maxNbImages = 0; + int debugSubsetStart = 0; + int debugSubsetSize = 0; po::options_description allParams( "Perform panorama stiching of cameras around a nodal point for 360° panorama creation.\n" @@ -105,8 +106,10 @@ int main(int argc, char **argv) "Margin for fisheye images (in percentage of the image).") ("transitionSize", po::value(&transitionSize)->default_value(transitionSize), "Size of the transition between images (in pixels).") - ("maxNbImages", po::value(&maxNbImages)->default_value(maxNbImages), - "For debug only: Max number of images to merge.") + ("debugSubsetStart", po::value(&debugSubsetStart)->default_value(debugSubsetStart), + "For debug only: Index of first image of the subset to merge.") + ("debugSubsetSize", po::value(&debugSubsetSize)->default_value(debugSubsetSize), + "For debug only: Number of images in the subset to merge.") //("panoramaSize", po::value>(&panoramaSize)->default_value(panoramaSize), // "Image size of the output panorama image file.") ; @@ -221,8 +224,18 @@ int main(int argc, char **argv) if(!sfmData.isPoseAndIntrinsicDefined(&view)) continue; - if(maxNbImages != 0 && imageIndex >= maxNbImages) - break; + if(debugSubsetSize != 0) + { + if(imageIndex < debugSubsetStart) + { + ++imageIndex; + continue; + } + else if(imageIndex >= debugSubsetStart + debugSubsetSize) + { + break; + } + } ++imageIndex; const sfmData::CameraPose camPose = sfmData.getPose(view); @@ -249,7 +262,6 @@ int main(int argc, char **argv) for(int x = 0; x < imageOut.Width(); ++x) { // equirectangular to unit vector - // Vec3 ray = SphericalMapping::get3DPoint(Vec2(y,x), imageOut.Height(), imageOut.Width()); Vec3 ray = SphericalMapping::get3DPoint(Vec2(x,y), imageOut.Width(), imageOut.Height()); if(camPose.getTransform().depth(ray) < 0) @@ -305,7 +317,7 @@ int main(int argc, char **argv) pixel.r() /= pixel.a(); pixel.g() /= pixel.a(); pixel.b() /= pixel.a(); - // pixel.a() = 1.0f; // TMP: keep the alpha with the contribution for debugging + pixel.a() = 1.0f; // TMP: comment to keep the alpha with the number of contribution for debugging } } } From 55b73a88d41ad219ace0960f07d38ee2364b9fa1 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 18 Sep 2019 20:59:40 +0200 Subject: [PATCH 042/174] [software] panoramaEstimation: add WIP option to refine --- .../ReconstructionEngine_panorama.cpp | 95 ++++++------------- .../ReconstructionEngine_panorama.hpp | 1 + .../pipeline/main_panoramaEstimation.cpp | 19 +++- 3 files changed, 47 insertions(+), 68 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 154df09002..75e1e90ebc 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -414,7 +414,6 @@ bool ReconstructionEngine_panorama::Compute_Initial_Structure(matching::Pairwise // Use triplet validated matches tracksBuilder.build(tripletWise_matches); #endif - tracksBuilder.filter(3); TracksMap map_selectedTracks; // reconstructed track (visibility per 3D point) tracksBuilder.exportToSTL(map_selectedTracks); @@ -438,24 +437,22 @@ bool ReconstructionEngine_panorama::Compute_Initial_Structure(matching::Pairwise const PointFeature & pt = _featuresPerView->getFeatures(imaIndex, track.descType)[featIndex]; obs[imaIndex] = Observation(pt.coords().cast(), featIndex); - /* { // back project a feature from the first observation const sfmData::View * view = _sfmData.views.at(imaIndex).get(); if (_sfmData.isPoseAndIntrinsicDefined(view)) { - const IntrinsicBase * cam = _sfmData.getIntrinsics().at(view->getIntrinsicId()).get(); + const IntrinsicBase& cam = *_sfmData.getIntrinsics().at(view->getIntrinsicId()).get(); const Pose3 pose = _sfmData.getPose(*view).getTransform(); - Vec3 v = cam.backproject(pose, pt.coords().cast(), 1.0, true); + Vec3 v = cam.backproject(pose, pt.coords().cast(), 10.0, true); pos3d.push_back(v); } - }*/ + } } - /* newLandmark.X = Vec3::Zero(); for(const Vec3& p: pos3d) newLandmark.X += p; - newLandmark.X /= pos.size();*/ + newLandmark.X /= pos3d.size(); } ALICEVISION_LOG_DEBUG("Track stats"); @@ -487,25 +484,12 @@ bool ReconstructionEngine_panorama::Compute_Initial_Structure(matching::Pairwise } } - // Compute 3D position of the landmark of the structure by triangulation of the observations + // Export initial structure + if (!_loggingFile.empty()) { - aliceVision::system::Timer timer; -/* - const IndexT trackCountBefore = _sfmData.getLandmarks().size(); - StructureComputation_blind structure_estimator(true); - structure_estimator.triangulate(_sfmData); - - ALICEVISION_LOG_DEBUG("#removed tracks (invalid triangulation): " << - trackCountBefore - IndexT(_sfmData.getLandmarks().size())); - ALICEVISION_LOG_DEBUG(" Triangulation took (s): " << timer.elapsed()); -*/ - // Export initial structure - if (!_loggingFile.empty()) - { - sfmDataIO::Save(_sfmData, - (fs::path(_loggingFile).parent_path() / "initial_structure.ply").string(), - sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); - } + sfmDataIO::Save(_sfmData, + (fs::path(_loggingFile).parent_path() / "initial_structure.ply").string(), + sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); } return !_sfmData.structure.empty(); } @@ -519,60 +503,39 @@ bool ReconstructionEngine_panorama::Adjust() BundleAdjustmentCeres BA(options); // - refine only Structure and translations - bool success = BA.adjust(_sfmData, BundleAdjustment::REFINE_TRANSLATION | BundleAdjustment::REFINE_STRUCTURE); + bool success = BA.adjust(_sfmData, BundleAdjustment::REFINE_STRUCTURE); // BundleAdjustment::REFINE_ROTATION | if(success) { - if(!_loggingFile.empty()) - sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_00_refine_T_Xi.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); - - // refine only structure and rotations & translations - success = BA.adjust(_sfmData, BundleAdjustment::REFINE_ROTATION | BundleAdjustment::REFINE_TRANSLATION | BundleAdjustment::REFINE_STRUCTURE); - - if(success && !_loggingFile.empty()) - sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_01_refine_RT_Xi.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); + ALICEVISION_LOG_DEBUG("Rotations successfully refined."); + } + else + { + ALICEVISION_LOG_DEBUG("Failed to refine the rotations only."); } if(success && !_lockAllIntrinsics) { - // refine all: Structure, motion:{rotations, translations} and optics:{intrinsics} - success = BA.adjust(_sfmData, BundleAdjustment::REFINE_ALL); - if(success && !_loggingFile.empty()) - sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_02_refine_KRT_Xi.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); + success = BA.adjust(_sfmData, BundleAdjustment::REFINE_STRUCTURE | BundleAdjustment::REFINE_INTRINSICS_FOCAL | BundleAdjustment::REFINE_INTRINSICS_DISTORTION); // BundleAdjustment::REFINE_ROTATION | REFINE_INTRINSICS_OPTICALCENTER_ALWAYS + if(success) + { + ALICEVISION_LOG_DEBUG("Rotations and intrinsics successfully refined."); + } + else + { + ALICEVISION_LOG_DEBUG("Failed to refine the rotations/intrinsics."); + } } - // Remove outliers (max_angle, residual error) + /* + // Remove outliers (residual error) const size_t pointcount_initial = _sfmData.structure.size(); RemoveOutliers_PixelResidualError(_sfmData, 4.0); const size_t pointcount_pixelresidual_filter = _sfmData.structure.size(); - RemoveOutliers_AngleError(_sfmData, 2.0); - const size_t pointcount_angular_filter = _sfmData.structure.size(); + ALICEVISION_LOG_DEBUG("Outlier removal (remaining points):\n" "\t- # landmarks initial: " << pointcount_initial << "\n" - "\t- # landmarks after pixel residual filter: " << pointcount_pixelresidual_filter << "\n" - "\t- # landmarks after angular filter: " << pointcount_angular_filter); - - if(!_loggingFile.empty()) - sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_03_outlier_removed.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); - - // check that poses & intrinsic cover some measures (after outlier removal) - const IndexT minPointPerPose = 12; // 6 min - const IndexT minTrackLength = 3; // 2 min todo param@L - - if (eraseUnstablePosesAndObservations(_sfmData, minPointPerPose, minTrackLength)) - { - // TODO: must ensure that track graph is producing a single connected component - - const size_t pointcount_cleaning = _sfmData.structure.size(); - ALICEVISION_LOG_DEBUG("# landmarks after eraseUnstablePosesAndObservations: " << pointcount_cleaning); - } - - BundleAdjustment::ERefineOptions refineOptions = BundleAdjustment::REFINE_ROTATION | BundleAdjustment::REFINE_TRANSLATION | BundleAdjustment::REFINE_STRUCTURE; - if(!_lockAllIntrinsics) - refineOptions |= BundleAdjustment::REFINE_INTRINSICS_ALL; - success = BA.adjust(_sfmData, refineOptions); - - if(success && !_loggingFile.empty()) - sfmDataIO::Save(_sfmData, (fs::path(_loggingFile).parent_path() / "structure_04_outlier_removed.ply").string(), sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); + "\t- # landmarks after pixel residual filter: " << pointcount_pixelresidual_filter); + */ return success; } diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp index 6348f213b4..bfe39b2bc8 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp @@ -148,6 +148,7 @@ class ReconstructionEngine_panorama : public ReconstructionEngine bool Compute_Global_Rotations(const aliceVision::rotationAveraging::RelativeRotations& vec_relatives_R, HashMap& map_globalR); +public: /// Compute the initial structure of the scene bool Compute_Initial_Structure(matching::PairwiseMatches& tripletWise_matches); diff --git a/src/software/pipeline/main_panoramaEstimation.cpp b/src/software/pipeline/main_panoramaEstimation.cpp index 6ff5c4b722..c67272c0f3 100644 --- a/src/software/pipeline/main_panoramaEstimation.cpp +++ b/src/software/pipeline/main_panoramaEstimation.cpp @@ -58,6 +58,7 @@ int main(int argc, char **argv) std::string describerTypesName = feature::EImageDescriberType_enumToString(feature::EImageDescriberType::SIFT); sfm::ERotationAveragingMethod rotationAveragingMethod = sfm::ROTATION_AVERAGING_L2; sfm::ERelativeRotationMethod relativeRotationMethod = sfm::RELATIVE_ROTATION_FROM_E; + bool refine = true; bool lockAllIntrinsics = false; int orientation = 0; @@ -80,8 +81,6 @@ int main(int argc, char **argv) optionalParams.add_options() ("outSfMDataFilename", po::value(&outSfMDataFilename)->default_value(outSfMDataFilename), "Filename of the output SfMData file.") - ("orientation", po::value(&orientation)->default_value(orientation), - "Orientation") ("describerTypes,d", po::value(&describerTypesName)->default_value(describerTypesName), feature::EImageDescriberType_informations().c_str()) ("rotationAveraging", po::value(&rotationAveragingMethod)->default_value(rotationAveragingMethod), @@ -90,6 +89,10 @@ int main(int argc, char **argv) ("relativeRotation", po::value(&relativeRotationMethod)->default_value(relativeRotationMethod), "* from essential matrix" "* from homography matrix") + ("orientation", po::value(&orientation)->default_value(orientation), + "Orientation") + ("refine", po::value(&refine)->default_value(refine), + "Refine cameras with a Bundle Adjustment") ("lockAllIntrinsics", po::value(&lockAllIntrinsics)->default_value(lockAllIntrinsics), "Force lock of all camera intrinsic parameters, so they will not be refined during Bundle Adjustment."); @@ -217,6 +220,9 @@ int main(int argc, char **argv) if(!sfmEngine.process()) return EXIT_FAILURE; + sfmEngine.Compute_Initial_Structure(pairwiseMatches); + sfmEngine.colorize(); + // set featuresFolders and matchesFolders relative paths { sfmEngine.getSfMData().addFeaturesFolders(featuresFolders); @@ -224,6 +230,15 @@ int main(int argc, char **argv) sfmEngine.getSfMData().setAbsolutePath(outSfMDataFilename); } + if(refine) + { + sfmDataIO::Save(sfmEngine.getSfMData(), (fs::path(outDirectory) / "BA_before.abc").string(), sfmDataIO::ESfMData::ALL); + + sfmEngine.Adjust(); + + sfmDataIO::Save(sfmEngine.getSfMData(), (fs::path(outDirectory) / "BA_after.abc").string(), sfmDataIO::ESfMData::ALL); + } + sfmData::SfMData& outSfmData = sfmEngine.getSfMData(); ALICEVISION_LOG_INFO("Panorama solve took (s): " << timer.elapsed()); From 5bcf34d99264c2e1f65bddfd7bc51931c06fb246 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 24 Sep 2019 12:47:08 +0200 Subject: [PATCH 043/174] modify pretty print such that it is not used for Eigen types. --- .gitignore | 1 + src/aliceVision/prettyprint.hpp | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index ecab7c43f8..8d649169ff 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,4 @@ Production *.tgz *.tar* +.vscode/settings.json diff --git a/src/aliceVision/prettyprint.hpp b/src/aliceVision/prettyprint.hpp index 01846d73a1..557be4773f 100644 --- a/src/aliceVision/prettyprint.hpp +++ b/src/aliceVision/prettyprint.hpp @@ -427,12 +427,20 @@ bucket_print(const T & m, typename T::size_type n) // Main magic entry point: An overload snuck into namespace std. // Can we do better? +namespace Eigen { +template +class EigenBase; +} + namespace std { // Prints a container to the stream using default delimiters + template + struct is_eigen_object : std::is_base_of, T> + {}; template - inline typename enable_if< ::pretty_print::is_container::value, + inline typename enable_if< ::pretty_print::is_container::value && !is_eigen_object::value, basic_ostream &>::type operator<<(basic_ostream & stream, const T & container) { From a35fc75fef90d51601303ac03aa2c183df0f6040 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 24 Sep 2019 12:49:57 +0200 Subject: [PATCH 044/174] Function transforming equirectangular coordinates to spherical coordinates was erroneous. --- src/software/pipeline/main_panoramaStitching.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index dbbd94607b..63c3e23137 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -53,9 +53,9 @@ namespace SphericalMapping const double longitude = M_PI * 2.0 * x / width; // between -PI and PI const double latitude = M_PI * y / height; // between -PI/2 and PI/2 - const double Px = cos(latitude) * cos(longitude); - const double Py = cos(latitude) * sin(longitude); - const double Pz = sin(latitude); + const double Px = cos(latitude) * sin(longitude); + const double Py = sin(latitude); + const double Pz = cos(latitude) * cos(longitude); return Vec3(Px, Py, Pz); } From 6fea9d124b295126771bb042fac6e9df5ea11acf Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 24 Sep 2019 12:55:06 +0200 Subject: [PATCH 045/174] Doing a cout on a static array doesn't work on my compiler. I just did the cout explicit. --- src/software/convert/main_convertLDRToHDR.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 68204b6157..35cf14b4c1 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -288,7 +288,7 @@ void recoverSourceImage(const image::Image& hdrImage, hdr::rgb float offset[3]; for(int i=0; i<3; ++i) offset[i] = std::abs(meanRecovered[i] - meanVal[i]); - ALICEVISION_LOG_INFO("Offset between target source image and recovered from hdr = " << offset); + ALICEVISION_LOG_INFO("Offset between target source image and recovered from hdr = " << offset[0] << " " << offset[1] << " " << offset[2]); } From 87b63a711e82183d9441576fedab21385b9fbb67 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 24 Sep 2019 13:39:36 +0200 Subject: [PATCH 046/174] DLT input coordinates have to be normalized before use. --- .../multiview/homographyKernelSolver.cpp | 93 ++++++++++++++----- .../multiview/homographyKernelSolver.hpp | 18 ++-- 2 files changed, 80 insertions(+), 31 deletions(-) diff --git a/src/aliceVision/multiview/homographyKernelSolver.cpp b/src/aliceVision/multiview/homographyKernelSolver.cpp index 17921982ba..9d88b9fa22 100644 --- a/src/aliceVision/multiview/homographyKernelSolver.cpp +++ b/src/aliceVision/multiview/homographyKernelSolver.cpp @@ -7,6 +7,7 @@ // You can obtain one at https://mozilla.org/MPL/2.0/. #include "homographyKernelSolver.hpp" +#include namespace aliceVision { namespace homography { @@ -14,37 +15,62 @@ namespace kernel { /// Setup the Direct Linear Transform. /// Use template in order to support fixed or dynamic sized matrix. -/// Allow solve H as homogeneous(x2) = H homogeneous(x1) +/// Allow solve H as homogeneous(p2) = H homogeneous(p1) template -void BuildActionMatrix(Matrix & L, const Mat &x, const Mat &y) { +void BuildActionMatrix(Matrix & L, const Mat &p1, const Mat &p2) { - const Mat::Index n = x.cols(); + const Mat::Index n = p1.cols(); for (Mat::Index i = 0; i < n; ++i) { Mat::Index j = 2 * i; - L(j, 0) = x(0, i); - L(j, 1) = x(1, i); + L(j, 0) = p1(0, i); + L(j, 1) = p1(1, i); L(j, 2) = 1.0; - L(j, 6) = -y(0, i) * x(0, i); - L(j, 7) = -y(0, i) * x(1, i); - L(j, 8) = -y(0, i); + L(j, 6) = -p2(0, i) * p1(0, i); + L(j, 7) = -p2(0, i) * p1(1, i); + L(j, 8) = -p2(0, i); ++j; - L(j, 3) = x(0, i); - L(j, 4) = x(1, i); + L(j, 3) = p1(0, i); + L(j, 4) = p1(1, i); L(j, 5) = 1.0; - L(j, 6) = -y(1, i) * x(0, i); - L(j, 7) = -y(1, i) * x(1, i); - L(j, 8) = -y(1, i); + L(j, 6) = -p2(1, i) * p1(0, i); + L(j, 7) = -p2(1, i) * p1(1, i); + L(j, 8) = -p2(1, i); } } -void FourPointSolver::Solve(const Mat &x, const Mat &y, vector *Hs) { - assert(2 == x.rows()); - assert(4 <= x.cols()); - assert(x.rows() == y.rows()); - assert(x.cols() == y.cols()); +void FourPointSolver::Solve(const Mat &p1, const Mat &p2, vector *Hs) { + assert(2 == p1.rows()); + assert(4 <= p2.cols()); + assert(p1.rows() == p2.rows()); + assert(p1.cols() == p2.cols()); - Mat::Index n = x.cols(); + Mat::Index n = p1.cols(); + + /* Normalize input */ + Mat p1_local = p1; + Mat p2_local = p2; + + auto p1x = p1_local.block(0, 0, 1, n); + auto p1y = p1_local.block(1, 0, 1, n); + auto p2x = p2_local.block(0, 0, 1, n); + auto p2y = p2_local.block(1, 0, 1, n); + + double p1x_mean = p1x.mean(); + double p1y_mean = p1y.mean(); + double p2x_mean = p2x.mean(); + double p2y_mean = p2y.mean(); + + p1x.array() -= p1x_mean; + p1y.array() -= p1y_mean; + p2x.array() -= p2x_mean; + p2y.array() -= p2y_mean; + + double p1_dist = sqrt((p1x * p1x.transpose() + p1y * p1y.transpose())(0,0)); + double p2_dist = sqrt((p2x * p2x.transpose() + p2y * p2y.transpose())(0,0)); + + p1_local /= p1_dist; + p2_local /= p2_dist; Vec9 h; if (n == 4) { @@ -52,15 +78,38 @@ void FourPointSolver::Solve(const Mat &x, const Mat &y, vector *Hs) { // Eigen and the compiler doing the maximum of optimization. typedef Eigen::Matrix Mat16_9; Mat16_9 L = Mat::Zero(16, 9); - BuildActionMatrix(L, x, y); + BuildActionMatrix(L, p1_local, p2_local); Nullspace(&L, &h); } else { MatX9 L = Mat::Zero(n * 2, 9); - BuildActionMatrix(L, x, y); + BuildActionMatrix(L, p1_local, p2_local); Nullspace(&L, &h); } - Mat3 H = Map(h.data()); // map the linear vector as the H matrix + + /*Build G matrix from vector*/ + Mat3 G = Map(h.data()); + + /* + p2_local = G * p1_local + Kp2 * p2 = G * Kp1 * p1 + p2 = Kp2^-1 * G * Kp1 * p1 + H = Kp2^-1 * G * Kp1 + */ + Eigen::Matrix3d Kp1 = Eigen::Matrix3d::Identity(); + Kp1(0, 0) = 1.0 / p1_dist; + Kp1(0, 2) = - p1x_mean / p1_dist; + Kp1(1, 1) = 1.0 / p1_dist; + Kp1(1, 2) = - p1y_mean / p1_dist; + + Eigen::Matrix3d Kp2 = Eigen::Matrix3d::Identity(); + Kp2(0, 0) = 1.0 / p2_dist; + Kp2(0, 2) = - p2x_mean / p2_dist; + Kp2(1, 1) = 1.0 / p2_dist; + Kp2(1, 2) = - p2y_mean / p2_dist; + + Eigen::Matrix3d H = Kp2.inverse() * G * Kp1; + Hs->push_back(H); } diff --git a/src/aliceVision/multiview/homographyKernelSolver.hpp b/src/aliceVision/multiview/homographyKernelSolver.hpp index e69840830f..5e91b5c600 100644 --- a/src/aliceVision/multiview/homographyKernelSolver.hpp +++ b/src/aliceVision/multiview/homographyKernelSolver.hpp @@ -23,24 +23,24 @@ struct FourPointSolver { enum { MINIMUM_SAMPLES = 4 }; enum { MAX_MODELS = 1 }; /** - * Computes the homography that transforms x to y with the Direct Linear + * Computes the homography that transforms p1 to p2 with the Direct Linear * Transform (DLT). * - * \param x A 2xN matrix of column vectors. - * \param y A 2xN matrix of column vectors. + * \param p1 A 2xN matrix of column vectors. + * \param p2 A 2xN matrix of column vectors. * \param Hs A vector into which the computed homography is stored. * - * The estimated homography should approximately hold the condition y = H x. + * The estimated homography should approximately hold the condition p2 = H p1. */ - static void Solve(const Mat &x, const Mat &y, vector *Hs); + static void Solve(const Mat &p1, const Mat &p2, vector *Hs); }; // Should be distributed as Chi-squared with k = 2. struct AsymmetricError { - static double Error(const Mat &H, const Vec2 &x1, const Vec2 &x2) { - Vec3 x2h_est = H * EuclideanToHomogeneous(x1); - Vec2 x2_est = x2h_est.head<2>() / x2h_est[2]; - return (x2 - x2_est).squaredNorm(); + static double Error(const Mat &H, const Vec2 &p1, const Vec2 &p2) { + Vec3 p2h_est = H * EuclideanToHomogeneous(p1); + Vec2 p2_est = p2h_est.head<2>() / p2h_est[2]; + return (p2 - p2_est).squaredNorm(); } }; From 4a6c18ec9bfaf03be510d5ddbe37f8b722b1e726 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 24 Sep 2019 14:03:17 +0200 Subject: [PATCH 047/174] Remove concept of landmarks from panorama estimation --- src/software/pipeline/main_panoramaEstimation.cpp | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/software/pipeline/main_panoramaEstimation.cpp b/src/software/pipeline/main_panoramaEstimation.cpp index c67272c0f3..900178c39a 100644 --- a/src/software/pipeline/main_panoramaEstimation.cpp +++ b/src/software/pipeline/main_panoramaEstimation.cpp @@ -220,9 +220,6 @@ int main(int argc, char **argv) if(!sfmEngine.process()) return EXIT_FAILURE; - sfmEngine.Compute_Initial_Structure(pairwiseMatches); - sfmEngine.colorize(); - // set featuresFolders and matchesFolders relative paths { sfmEngine.getSfMData().addFeaturesFolders(featuresFolders); @@ -248,8 +245,7 @@ int main(int argc, char **argv) ALICEVISION_LOG_INFO("Panorama results:" << std::endl << "\t- # input images: " << outSfmData.getViews().size() << std::endl - << "\t- # cameras calibrated: " << outSfmData.getPoses().size() << std::endl - << "\t- # landmarks: " << outSfmData.getLandmarks().size()); + << "\t- # cameras calibrated: " << outSfmData.getPoses().size()); auto validViews = outSfmData.getValidViews(); int nbCameras = outSfmData.getValidViews().size(); From 956fd993418d9ac2170cdc32ce0f633abd2b2c94 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 24 Sep 2019 14:59:06 +0200 Subject: [PATCH 048/174] Add the constraint2D concept. Describe explicitely a match between 2 views (No indirection with a landmark) . --- src/aliceVision/sfmData/Constraint2D.hpp | 45 ++++++++++++++++++++++++ src/aliceVision/sfmData/SfMData.cpp | 12 +++++++ src/aliceVision/sfmData/SfMData.hpp | 13 +++++++ src/aliceVision/sfmDataIO/sfmDataIO.hpp | 5 +-- 4 files changed, 73 insertions(+), 2 deletions(-) create mode 100644 src/aliceVision/sfmData/Constraint2D.hpp diff --git a/src/aliceVision/sfmData/Constraint2D.hpp b/src/aliceVision/sfmData/Constraint2D.hpp new file mode 100644 index 0000000000..306802d864 --- /dev/null +++ b/src/aliceVision/sfmData/Constraint2D.hpp @@ -0,0 +1,45 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2016 AliceVision contributors. +// Copyright (c) 2012 openMVG contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#pragma once + +#include + +namespace aliceVision { +namespace sfmData { + +/** + * @brief Constraint2D is a 3D point with its 2d observations. + */ +struct Constraint2D +{ + Constraint2D() = default; + + Constraint2D(IndexT view_first = UndefinedIndexT, const Observation & observation_first = Observation(), + IndexT view_second = UndefinedIndexT, const Observation & observation_second = Observation()) + : ViewFirst(view_first) + , ObservationFirst(observation_first) + , ViewSecond(view_second) + , ObservationSecond(observation_second) + {} + + IndexT ViewFirst; + IndexT ViewSecond; + Observation ObservationFirst; + Observation ObservationSecond; + + bool operator==(const Constraint2D& other) const + { + return (ViewFirst == other.ViewFirst) && + (ViewSecond == other.ViewSecond) && + (ObservationFirst == other.ObservationFirst) && + (ObservationSecond == other.ObservationSecond); + } +}; + +} // namespace sfmData +} // namespace aliceVision diff --git a/src/aliceVision/sfmData/SfMData.cpp b/src/aliceVision/sfmData/SfMData.cpp index 7c5052e475..63d057c6a5 100644 --- a/src/aliceVision/sfmData/SfMData.cpp +++ b/src/aliceVision/sfmData/SfMData.cpp @@ -83,6 +83,15 @@ bool SfMData::operator==(const SfMData& other) const { if(control_points != other.control_points) return false; + + Constraints2D::const_iterator constraint2dIt = constraints2d.begin(); + Constraints2D::const_iterator otherconstraint2dIt = other.constraints2d.begin(); + for(; constraint2dIt != constraints2d.end() && otherconstraint2dIt != other.constraints2d.end(); ++constraint2dIt, ++otherconstraint2dIt) + { + if(!(*constraint2dIt == *otherconstraint2dIt)) + return false; + } + // Root path can be reseted during exports return true; @@ -248,6 +257,9 @@ void SfMData::combine(const SfMData& sfmData) // control points control_points.insert(sfmData.control_points.begin(), sfmData.control_points.end()); + + // constraints + constraints2d.insert(constraints2d.end(), sfmData.constraints2d.begin(), sfmData.constraints2d.end()); } } // namespace sfmData diff --git a/src/aliceVision/sfmData/SfMData.hpp b/src/aliceVision/sfmData/SfMData.hpp index c4cc864626..ea73be7dd6 100644 --- a/src/aliceVision/sfmData/SfMData.hpp +++ b/src/aliceVision/sfmData/SfMData.hpp @@ -9,6 +9,7 @@ #include #include +#include #include #include #include @@ -41,6 +42,9 @@ using PosesUncertainty = HashMap; /// Define uncertainty per landmark using LandmarksUncertainty = HashMap; +///Define a collection of constraints +using Constraints2D = std::vector; + /** * @brief SfMData container * Store structure and camera properties @@ -60,6 +64,8 @@ class SfMData PosesUncertainty _posesUncertainty; /// Uncertainty per landmark LandmarksUncertainty _landmarksUncertainty; + /// 2D Constraints + Constraints2D constraints2d; // Operators @@ -102,6 +108,13 @@ class SfMData const Landmarks& getLandmarks() const {return structure;} Landmarks& getLandmarks() {return structure;} + /** + * @brief Get Constraints2D + * @return Constraints2D + */ + const Constraints2D& getConstraints2D() const {return constraints2d;} + Constraints2D& getConstraints2D() {return constraints2d;} + /** * @brief Get control points * @return control points diff --git a/src/aliceVision/sfmDataIO/sfmDataIO.hpp b/src/aliceVision/sfmDataIO/sfmDataIO.hpp index 77535e22f5..cb9ad51a23 100644 --- a/src/aliceVision/sfmDataIO/sfmDataIO.hpp +++ b/src/aliceVision/sfmDataIO/sfmDataIO.hpp @@ -23,10 +23,11 @@ enum ESfMData CONTROL_POINTS = 64, LANDMARKS_UNCERTAINTY = 128, POSES_UNCERTAINTY = 256, + CONSTRAINTS2D = 512, UNCERTAINTY = LANDMARKS_UNCERTAINTY | POSES_UNCERTAINTY, - ALL_DENSE = VIEWS | EXTRINSICS | INTRINSICS | STRUCTURE | OBSERVATIONS | CONTROL_POINTS, - ALL = VIEWS | EXTRINSICS | INTRINSICS | STRUCTURE | OBSERVATIONS_WITH_FEATURES | CONTROL_POINTS | UNCERTAINTY + ALL_DENSE = VIEWS | EXTRINSICS | INTRINSICS | STRUCTURE | OBSERVATIONS | CONTROL_POINTS | CONSTRAINTS2D, + ALL = VIEWS | EXTRINSICS | INTRINSICS | STRUCTURE | OBSERVATIONS_WITH_FEATURES | CONTROL_POINTS | UNCERTAINTY | CONSTRAINTS2D }; /// check that each pose have a valid intrinsic and pose id in the existing View ids From 9a8763529da833ba33c88a47087b5df0bc5336e4 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Wed, 25 Sep 2019 08:01:49 +0200 Subject: [PATCH 049/174] add some override keywords to remove warnings --- src/aliceVision/camera/Pinhole.hpp | 32 +++++++++++----------- src/aliceVision/camera/PinholeBrown.hpp | 16 +++++------ src/aliceVision/camera/PinholeFisheye.hpp | 16 +++++------ src/aliceVision/camera/PinholeFisheye1.hpp | 16 +++++------ src/aliceVision/camera/PinholeRadial.hpp | 16 +++++------ 5 files changed, 48 insertions(+), 48 deletions(-) diff --git a/src/aliceVision/camera/Pinhole.hpp b/src/aliceVision/camera/Pinhole.hpp index 05373af6ec..949d48dcd6 100644 --- a/src/aliceVision/camera/Pinhole.hpp +++ b/src/aliceVision/camera/Pinhole.hpp @@ -49,12 +49,12 @@ class Pinhole : public IntrinsicBase virtual ~Pinhole() {} - virtual Pinhole* clone() const { return new Pinhole(*this); } - virtual void assign(const IntrinsicBase& other) { *this = dynamic_cast(other); } + virtual Pinhole* clone() const override { return new Pinhole(*this); } + virtual void assign(const IntrinsicBase& other) override { *this = dynamic_cast(other); } - virtual bool isValid() const { return focal() > 0 && IntrinsicBase::isValid(); } + virtual bool isValid() const override { return focal() > 0 && IntrinsicBase::isValid(); } - virtual EINTRINSIC getType() const { return PINHOLE_CAMERA; } + virtual EINTRINSIC getType() const override { return PINHOLE_CAMERA; } std::string getTypeStr() const { return EINTRINSIC_enumToString(getType()); } double getFocalLengthPix() const { return _K(0,0); } @@ -74,36 +74,36 @@ class Pinhole : public IntrinsicBase inline Vec2 principal_point() const {return Vec2(_K(0,2), _K(1,2));} // Get bearing vector of p point (image coord) - Vec3 operator () (const Vec2& p) const + Vec3 operator () (const Vec2& p) const override { Vec3 p3(p(0),p(1),1.0); return (_Kinv * p3).normalized(); } // Transform a point from the camera plane to the image plane - Vec2 cam2ima(const Vec2& p) const + Vec2 cam2ima(const Vec2& p) const override { return focal() * p + principal_point(); } // Transform a point from the image plane to the camera plane - Vec2 ima2cam(const Vec2& p) const + Vec2 ima2cam(const Vec2& p) const override { return ( p - principal_point() ) / focal(); } - virtual bool have_disto() const { return false; } + virtual bool have_disto() const override { return false; } - virtual Vec2 add_disto(const Vec2& p) const { return p; } + virtual Vec2 add_disto(const Vec2& p) const override { return p; } - virtual Vec2 remove_disto(const Vec2& p) const { return p; } + virtual Vec2 remove_disto(const Vec2& p) const override { return p; } - virtual double imagePlane_toCameraPlaneError(double value) const + virtual double imagePlane_toCameraPlaneError(double value) const override { return value / focal(); } - virtual Mat34 get_projective_equivalent(const geometry::Pose3 & pose) const + virtual Mat34 get_projective_equivalent(const geometry::Pose3 & pose) const override { Mat34 P; P_From_KRt(K(), pose.rotation(), pose.translation(), &P); @@ -111,7 +111,7 @@ class Pinhole : public IntrinsicBase } // Data wrapper for non linear optimization (get data) - std::vector getParams() const + std::vector getParams() const override { std::vector params = {_K(0,0), _K(0,2), _K(1,2)}; params.insert(params.end(), _distortionParams.begin(), _distortionParams.end()); @@ -143,7 +143,7 @@ class Pinhole : public IntrinsicBase } // Data wrapper for non linear optimization (update from data) - bool updateFromParams(const std::vector& params) + bool updateFromParams(const std::vector& params) override { if (params.size() != (3 + _distortionParams.size())) return false; @@ -155,10 +155,10 @@ class Pinhole : public IntrinsicBase } /// Return the un-distorted pixel (with removed distortion) - virtual Vec2 get_ud_pixel(const Vec2& p) const {return p;} + virtual Vec2 get_ud_pixel(const Vec2& p) const override {return p;} /// Return the distorted pixel (with added distortion) - virtual Vec2 get_d_pixel(const Vec2& p) const {return p;} + virtual Vec2 get_d_pixel(const Vec2& p) const override {return p;} private: // Focal & principal point are embed into the calibration matrix K diff --git a/src/aliceVision/camera/PinholeBrown.hpp b/src/aliceVision/camera/PinholeBrown.hpp index 04f8bba2f1..e07dc4ad0a 100644 --- a/src/aliceVision/camera/PinholeBrown.hpp +++ b/src/aliceVision/camera/PinholeBrown.hpp @@ -31,14 +31,14 @@ class PinholeBrownT2 : public Pinhole { } - PinholeBrownT2* clone() const { return new PinholeBrownT2(*this); } - void assign(const IntrinsicBase& other) { *this = dynamic_cast(other); } + PinholeBrownT2* clone() const override { return new PinholeBrownT2(*this); } + void assign(const IntrinsicBase& other) override { *this = dynamic_cast(other); } - EINTRINSIC getType() const { return PINHOLE_CAMERA_BROWN; } + EINTRINSIC getType() const override { return PINHOLE_CAMERA_BROWN; } - virtual bool have_disto() const { return true;} + virtual bool have_disto() const override { return true;} - virtual Vec2 add_disto(const Vec2 & p) const{ + virtual Vec2 add_disto(const Vec2 & p) const override{ return (p + distoFunction(_distortionParams, p)); } @@ -46,7 +46,7 @@ class PinholeBrownT2 : public Pinhole // Heikkila J (2000) Geometric Camera Calibration Using Circular Control Points. // IEEE Trans. Pattern Anal. Mach. Intell., 22:1066-1077 - virtual Vec2 remove_disto(const Vec2 & p) const{ + virtual Vec2 remove_disto(const Vec2 & p) const override{ const double epsilon = 1e-8; //criteria to stop the iteration Vec2 p_u = p; @@ -59,13 +59,13 @@ class PinholeBrownT2 : public Pinhole } /// Return the un-distorted pixel (with removed distortion) - virtual Vec2 get_ud_pixel(const Vec2& p) const + virtual Vec2 get_ud_pixel(const Vec2& p) const override { return cam2ima( remove_disto(ima2cam(p)) ); } /// Return the distorted pixel (with added distortion) - virtual Vec2 get_d_pixel(const Vec2& p) const + virtual Vec2 get_d_pixel(const Vec2& p) const override { return cam2ima( add_disto(ima2cam(p)) ); } diff --git a/src/aliceVision/camera/PinholeFisheye.hpp b/src/aliceVision/camera/PinholeFisheye.hpp index da9362c33b..1883f5123b 100644 --- a/src/aliceVision/camera/PinholeFisheye.hpp +++ b/src/aliceVision/camera/PinholeFisheye.hpp @@ -33,14 +33,14 @@ class PinholeFisheye : public Pinhole { } - PinholeFisheye* clone() const { return new PinholeFisheye(*this); } - void assign(const IntrinsicBase& other) { *this = dynamic_cast(other); } + PinholeFisheye* clone() const override { return new PinholeFisheye(*this); } + void assign(const IntrinsicBase& other) override { *this = dynamic_cast(other); } - EINTRINSIC getType() const { return PINHOLE_CAMERA_FISHEYE; } + EINTRINSIC getType() const override { return PINHOLE_CAMERA_FISHEYE; } - virtual bool have_disto() const { return true;} + virtual bool have_disto() const override { return true;} - virtual Vec2 add_disto(const Vec2 & p) const + virtual Vec2 add_disto(const Vec2 & p) const override { const std::vector& distortionParams = getDistortionParams(); const double eps = 1e-8; @@ -62,7 +62,7 @@ class PinholeFisheye : public Pinhole return p*cdist; } - virtual Vec2 remove_disto(const Vec2 & p) const + virtual Vec2 remove_disto(const Vec2 & p) const override { const double eps = 1e-8; double scale = 1.0; @@ -89,13 +89,13 @@ class PinholeFisheye : public Pinhole } /// Return the un-distorted pixel (with removed distortion) - virtual Vec2 get_ud_pixel(const Vec2& p) const + virtual Vec2 get_ud_pixel(const Vec2& p) const override { return cam2ima( remove_disto(ima2cam(p)) ); } /// Return the distorted pixel (with added distortion) - virtual Vec2 get_d_pixel(const Vec2& p) const + virtual Vec2 get_d_pixel(const Vec2& p) const override { return cam2ima( add_disto(ima2cam(p)) ); } diff --git a/src/aliceVision/camera/PinholeFisheye1.hpp b/src/aliceVision/camera/PinholeFisheye1.hpp index 3f946cd948..a742480fb1 100644 --- a/src/aliceVision/camera/PinholeFisheye1.hpp +++ b/src/aliceVision/camera/PinholeFisheye1.hpp @@ -34,14 +34,14 @@ class PinholeFisheye1 : public Pinhole { } - PinholeFisheye1* clone() const { return new PinholeFisheye1(*this); } - void assign(const IntrinsicBase& other) { *this = dynamic_cast(other); } + PinholeFisheye1* clone() const override { return new PinholeFisheye1(*this); } + void assign(const IntrinsicBase& other) override { *this = dynamic_cast(other); } - EINTRINSIC getType() const { return PINHOLE_CAMERA_FISHEYE1; } + EINTRINSIC getType() const override { return PINHOLE_CAMERA_FISHEYE1; } - virtual bool have_disto() const { return true;} + virtual bool have_disto() const override { return true;} - virtual Vec2 add_disto(const Vec2 & p) const + virtual Vec2 add_disto(const Vec2 & p) const override { const double k1 = _distortionParams.at(0); const double r = std::hypot(p(0), p(1)); @@ -49,7 +49,7 @@ class PinholeFisheye1 : public Pinhole return p * coef; } - virtual Vec2 remove_disto(const Vec2 & p) const + virtual Vec2 remove_disto(const Vec2 & p) const override { const double k1 = _distortionParams.at(0); const double r = std::hypot(p(0), p(1)); @@ -58,13 +58,13 @@ class PinholeFisheye1 : public Pinhole } /// Return the un-distorted pixel (with removed distortion) - virtual Vec2 get_ud_pixel(const Vec2& p) const + virtual Vec2 get_ud_pixel(const Vec2& p) const override { return cam2ima( remove_disto(ima2cam(p)) ); } /// Return the distorted pixel (with added distortion) - virtual Vec2 get_d_pixel(const Vec2& p) const + virtual Vec2 get_d_pixel(const Vec2& p) const override { return cam2ima( add_disto(ima2cam(p)) ); } diff --git a/src/aliceVision/camera/PinholeRadial.hpp b/src/aliceVision/camera/PinholeRadial.hpp index 991382e2a5..ddcf112e9c 100644 --- a/src/aliceVision/camera/PinholeRadial.hpp +++ b/src/aliceVision/camera/PinholeRadial.hpp @@ -126,15 +126,15 @@ class PinholeRadialK3 : public Pinhole { } - PinholeRadialK3* clone() const { return new PinholeRadialK3(*this); } - void assign(const IntrinsicBase& other) { *this = dynamic_cast(other); } + PinholeRadialK3* clone() const override { return new PinholeRadialK3(*this); } + void assign(const IntrinsicBase& other) override { *this = dynamic_cast(other); } - EINTRINSIC getType() const { return PINHOLE_CAMERA_RADIAL3; } + EINTRINSIC getType() const override { return PINHOLE_CAMERA_RADIAL3; } - virtual bool have_disto() const { return true; } + virtual bool have_disto() const override { return true; } /// Add distortion to the point p (assume p is in the camera frame [normalized coordinates]) - virtual Vec2 add_disto(const Vec2 & p) const + virtual Vec2 add_disto(const Vec2 & p) const override { const double k1 = _distortionParams[0], k2 = _distortionParams[1], k3 = _distortionParams[2]; @@ -147,7 +147,7 @@ class PinholeRadialK3 : public Pinhole } /// Remove distortion (return p' such that disto(p') = p) - virtual Vec2 remove_disto(const Vec2& p) const { + virtual Vec2 remove_disto(const Vec2& p) const override { // Compute the radius from which the point p comes from thanks to a bisection // Minimize disto(radius(p')^2) == actual Squared(radius(p)) @@ -159,13 +159,13 @@ class PinholeRadialK3 : public Pinhole } /// Return the un-distorted pixel (with removed distortion) - virtual Vec2 get_ud_pixel(const Vec2& p) const + virtual Vec2 get_ud_pixel(const Vec2& p) const override { return cam2ima( remove_disto(ima2cam(p)) ); } /// Return the distorted pixel (with added distortion) - virtual Vec2 get_d_pixel(const Vec2& p) const + virtual Vec2 get_d_pixel(const Vec2& p) const override { return cam2ima( add_disto(ima2cam(p)) ); } From 030f79fb48d47ac2320d786029c4c8fc3d5b761b Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Wed, 25 Sep 2019 08:30:55 +0200 Subject: [PATCH 050/174] create some skeleton for ceres cost function for 2D constraints --- src/aliceVision/sfm/BundleAdjustmentCeres.cpp | 21 ++++++- .../sfm/ResidualErrorConstraintFunctor.hpp | 59 +++++++++++++++++++ 2 files changed, 79 insertions(+), 1 deletion(-) create mode 100644 src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp diff --git a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp index d3f6361081..0436e9902b 100644 --- a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp +++ b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp @@ -7,17 +7,19 @@ #include #include +#include #include #include #include - #include #include #include + + namespace fs = boost::filesystem; namespace aliceVision { @@ -80,6 +82,23 @@ ceres::CostFunction* createRigCostFunctionFromIntrinsics(const IntrinsicBase* in } } +/** + * @brief Create the appropriate cost functor according the provided input camera intrinsic model + * @param[in] intrinsicPtr The intrinsic pointer + * @param[in] observation The corresponding observation + * @return cost functor + */ +ceres::CostFunction* createConstraintsCostFunctionFromIntrinsics(const IntrinsicBase* intrinsicPtr, const Vec2& observation_first, const Vec2& observation_second) +{ + switch(intrinsicPtr->getType()) + { + case PINHOLE_CAMERA: + return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_Pinhole(observation_first.data(), observation_second.data())); + default: + throw std::logic_error("Cannot create cost function, unrecognized intrinsic type in BA."); + } +} + void BundleAdjustmentCeres::CeresOptions::setDenseBA() { // default configuration use a DENSE representation diff --git a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp new file mode 100644 index 0000000000..4a5c1ad85b --- /dev/null +++ b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp @@ -0,0 +1,59 @@ +#pragma once + +#include + +#include + +namespace aliceVision { +namespace sfm { + +/** + * @brief Ceres functor to use a pair of pinhole on a pure rotation 2D constraint. + * + * Data parameter blocks are the following <2,3,3,6,6> + * - 2 => dimension of the residuals, + * - 3 => the intrinsic data block for the first view [focal, principal point x, principal point y], + * - 3 => the intrinsic data block for the second view [focal, principal point x, principal point y], + * - 3 => the camera extrinsic data block for the first view (camera orientation only as angle axis) + * - 3 => the camera extrinsic data block for the second view (camera orientation only as angle axis) + * + */ +struct ResidualErrorConstraintFunctor_Pinhole +{ + ResidualErrorConstraintFunctor_Pinhole(const double* const pos_2dpoint_first, const double* const pos_2dpoint_second) + { + m_pos_2dpoint_first[0] = pos_2dpoint_first[0]; + m_pos_2dpoint_first[1] = pos_2dpoint_first[1]; + m_pos_2dpoint_second[0] = pos_2dpoint_second[0]; + m_pos_2dpoint_second[1] = pos_2dpoint_second[1]; + } + + /** + * @param[in] cam_K: Camera intrinsics( focal, principal point [x,y] ) + * @param[in] cam_Rt: Camera parameterized using one block of 6 parameters [R;t]: + * - 3 for rotation(angle axis), 3 for translation + * @param[in] pos_3dpoint + * @param[out] out_residuals + */ + template + bool operator()( + const T* const cam_K1, + const T* const cam_K2, + const T* const cam_R1, + const T* const cam_R2, + T* out_residuals) const + { + //Eigen::Matrix R1; + + + + return true; + } + + double m_pos_2dpoint_first[2]; // The 2D observation in first view + double m_pos_2dpoint_second[2]; // The 2D observation in second view +}; + + +} // namespace sfm +} // namespace aliceVision From 7673f2892778c06a9f12919f0ea51ca430d94ab5 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Wed, 25 Sep 2019 14:12:43 +0200 Subject: [PATCH 051/174] Adding panorama 2D constraints for bundle adjustment --- src/aliceVision/sfm/BundleAdjustmentCeres.cpp | 41 ++++- src/aliceVision/sfm/BundleAdjustmentCeres.hpp | 8 + .../sfm/ResidualErrorConstraintFunctor.hpp | 174 ++++++++++++++++-- .../ReconstructionEngine_panorama.cpp | 92 +++------ 4 files changed, 232 insertions(+), 83 deletions(-) diff --git a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp index 0436e9902b..7d38d790cc 100644 --- a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp +++ b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp @@ -90,10 +90,14 @@ ceres::CostFunction* createRigCostFunctionFromIntrinsics(const IntrinsicBase* in */ ceres::CostFunction* createConstraintsCostFunctionFromIntrinsics(const IntrinsicBase* intrinsicPtr, const Vec2& observation_first, const Vec2& observation_second) { + + switch(intrinsicPtr->getType()) { case PINHOLE_CAMERA: - return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_Pinhole(observation_first.data(), observation_second.data())); + return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_Pinhole(observation_first.homogeneous(), observation_second.homogeneous())); + /*case PINHOLE_CAMERA: + return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_PinholeRadialK1(observation_first.homogeneous(), observation_second.homogeneous()));*/ default: throw std::logic_error("Cannot create cost function, unrecognized intrinsic type in BA."); } @@ -598,6 +602,36 @@ void BundleAdjustmentCeres::addLandmarksToProblem(const sfmData::SfMData& sfmDat } } +void BundleAdjustmentCeres::addConstraints2DToProblem(const sfmData::SfMData& sfmData, ERefineOptions refineOptions, ceres::Problem& problem) +{ + // set a LossFunction to be less penalized by false measurements. + // note: set it to NULL if you don't want use a lossFunction. + ceres::LossFunction* lossFunction = new ceres::HuberLoss(Square(4.0)); // TODO: make the LOSS function and the parameter an option + + for (const auto & constraint : sfmData.getConstraints2D()) { + const sfmData::View& view_1 = sfmData.getView(constraint.ViewFirst); + const sfmData::View& view_2 = sfmData.getView(constraint.ViewSecond); + + assert(getPoseState(view_1.getPoseId()) != EParameterState::IGNORED); + assert(getIntrinsicState(view_1.getIntrinsicId()) != EParameterState::IGNORED); + assert(getPoseState(view_2.getPoseId()) != EParameterState::IGNORED); + assert(getIntrinsicState(view_2.getIntrinsicId()) != EParameterState::IGNORED); + + double * poseBlockPtr_1 = _posesBlocks.at(view_1.getPoseId()).data(); + double * poseBlockPtr_2 = _posesBlocks.at(view_2.getPoseId()).data(); + + double * intrinsicBlockPtr_1 = _intrinsicsBlocks.at(view_1.getIntrinsicId()).data(); + double * intrinsicBlockPtr_2 = _intrinsicsBlocks.at(view_2.getIntrinsicId()).data(); + + //For the moment assume a unique camera + assert(intrinsicBlockPtr_1 == intrinsicBlockPtr_2); + + ceres::CostFunction* costFunction = createConstraintsCostFunctionFromIntrinsics(sfmData.getIntrinsicPtr(view_1.getIntrinsicId()), constraint.ObservationFirst.x, constraint.ObservationSecond.x); + + problem.AddResidualBlock(costFunction, lossFunction, intrinsicBlockPtr_1, poseBlockPtr_1, poseBlockPtr_2); + } +} + void BundleAdjustmentCeres::createProblem(const sfmData::SfMData& sfmData, ERefineOptions refineOptions, ceres::Problem& problem) @@ -617,6 +651,9 @@ void BundleAdjustmentCeres::createProblem(const sfmData::SfMData& sfmData, // add SfM landmarks to the Ceres problem addLandmarksToProblem(sfmData, refineOptions, problem); + + // add 2D constraints to the Ceres problem + addConstraints2DToProblem(sfmData, refineOptions, problem); } void BundleAdjustmentCeres::updateFromSolution(sfmData::SfMData& sfmData, ERefineOptions refineOptions) const @@ -737,7 +774,7 @@ bool BundleAdjustmentCeres::adjust(sfmData::SfMData& sfmData, ERefineOptions ref // print summary if(_ceresOptions.summary) - ALICEVISION_LOG_DEBUG(summary.FullReport()); + ALICEVISION_LOG_INFO(summary.FullReport()); // solution is not usable if(!summary.IsSolutionUsable()) diff --git a/src/aliceVision/sfm/BundleAdjustmentCeres.hpp b/src/aliceVision/sfm/BundleAdjustmentCeres.hpp index 0b63fa6208..89da1ba9cb 100644 --- a/src/aliceVision/sfm/BundleAdjustmentCeres.hpp +++ b/src/aliceVision/sfm/BundleAdjustmentCeres.hpp @@ -206,6 +206,14 @@ class BundleAdjustmentCeres : public BundleAdjustment */ void addLandmarksToProblem(const sfmData::SfMData& sfmData, ERefineOptions refineOptions, ceres::Problem& problem); + /** + * @brief Create a residual block for each 2D constraints + * @param[in] sfmData The input SfMData contains all the information about the reconstruction, notably the intrinsics + * @param[in] refineOptions The chosen refine flag + * @param[out] problem The Ceres bundle adjustement problem + */ + void addConstraints2DToProblem(const sfmData::SfMData& sfmData, ERefineOptions refineOptions, ceres::Problem& problem); + /** * @brief Create the Ceres bundle adjustement problem with: * - extrincics and intrinsics parameters blocks. diff --git a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp index 4a5c1ad85b..c1a91f12e2 100644 --- a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp +++ b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp @@ -10,22 +10,52 @@ namespace sfm { /** * @brief Ceres functor to use a pair of pinhole on a pure rotation 2D constraint. * - * Data parameter blocks are the following <2,3,3,6,6> + * Data parameter blocks are the following <2,3,6,6> * - 2 => dimension of the residuals, * - 3 => the intrinsic data block for the first view [focal, principal point x, principal point y], - * - 3 => the intrinsic data block for the second view [focal, principal point x, principal point y], - * - 3 => the camera extrinsic data block for the first view (camera orientation only as angle axis) - * - 3 => the camera extrinsic data block for the second view (camera orientation only as angle axis) + * - 3 => the camera extrinsic data block for the first view + * - 3 => the camera extrinsic data block for the second view * */ struct ResidualErrorConstraintFunctor_Pinhole { - ResidualErrorConstraintFunctor_Pinhole(const double* const pos_2dpoint_first, const double* const pos_2dpoint_second) + ResidualErrorConstraintFunctor_Pinhole(const Vec3 & pos_2dpoint_first, const Vec3 & pos_2dpoint_second) + : m_pos_2dpoint_first(pos_2dpoint_first), m_pos_2dpoint_second(pos_2dpoint_second) { - m_pos_2dpoint_first[0] = pos_2dpoint_first[0]; - m_pos_2dpoint_first[1] = pos_2dpoint_first[1]; - m_pos_2dpoint_second[0] = pos_2dpoint_second[0]; - m_pos_2dpoint_second[1] = pos_2dpoint_second[1]; + } + + // Enum to map intrinsics parameters between aliceVision & ceres camera data parameter block. + enum { + OFFSET_FOCAL_LENGTH = 0, + OFFSET_PRINCIPAL_POINT_X = 1, + OFFSET_PRINCIPAL_POINT_Y = 2 + }; + + template + void lift(const T* const cam_K, const Vec3 pt, Eigen::Vector & out) const + { + const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; + const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; + const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; + + + out(0) = (pt(0) - principal_point_x) / focal; + out(1) = (pt(1) - principal_point_y) / focal; + out(2) = static_cast(1.0); + } + + template + void unlift(const T* const cam_K, const Eigen::Vector & pt, Eigen::Vector & out) const + { + const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; + const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; + const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; + + Eigen::Vector proj_pt = pt / pt(2); + + out(0) = proj_pt(0) * focal + principal_point_x; + out(1) = proj_pt(1) * focal + principal_point_y; + out(2) = static_cast(1.0); } /** @@ -37,21 +67,137 @@ struct ResidualErrorConstraintFunctor_Pinhole */ template bool operator()( - const T* const cam_K1, - const T* const cam_K2, + const T* const cam_K, const T* const cam_R1, const T* const cam_R2, T* out_residuals) const { - //Eigen::Matrix R1; + Eigen::Matrix oneRo, twoRo, twoRone; + Eigen::Vector pt3d_1; + + lift(cam_K, m_pos_2dpoint_first, pt3d_1); + + ceres::AngleAxisToRotationMatrix(cam_R1, oneRo.data()); + ceres::AngleAxisToRotationMatrix(cam_R2, twoRo.data()); + + twoRone = twoRo * oneRo.transpose(); + + Eigen::Vector pt3d_2_est = twoRone * pt3d_1; + + Eigen::Vector pt2d_2_est; + unlift(cam_K, pt3d_2_est, pt2d_2_est); + + Eigen::Vector residual = pt2d_2_est - m_pos_2dpoint_second; + + out_residuals[0] = residual(0); + out_residuals[1] = residual(1); + + return true; + } + + Vec3 m_pos_2dpoint_first; // The 2D observation in first view + Vec3 m_pos_2dpoint_second; // The 2D observation in second view +}; + +/** + * @brief Ceres functor to use a pair of pinhole on a pure rotation 2D constraint. + * + * Data parameter blocks are the following <2,4,6,6> + * - 2 => dimension of the residuals, + * - 4 => the intrinsic data block for the first view [focal, principal point x, principal point y], + * - 3 => the camera extrinsic data block for the first view + * - 3 => the camera extrinsic data block for the second view + * + */ +struct ResidualErrorConstraintFunctor_PinholeRadialK1 +{ + ResidualErrorConstraintFunctor_PinholeRadialK1(const Vec3 & pos_2dpoint_first, const Vec3 & pos_2dpoint_second) + : m_pos_2dpoint_first(pos_2dpoint_first), m_pos_2dpoint_second(pos_2dpoint_second) + { + } + + // Enum to map intrinsics parameters between aliceVision & ceres camera data parameter block. + enum { + OFFSET_FOCAL_LENGTH = 0, + OFFSET_PRINCIPAL_POINT_X = 1, + OFFSET_PRINCIPAL_POINT_Y = 2, + OFFSET_DISTO_K1 = 3 + }; + + template + void lift(const T* const cam_K, const Vec3 pt, Eigen::Vector & out) const + { + /*const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; + const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; + const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; + const T& k1 = cam_K[OFFSET_DISTO_K1]; + + // Apply distortion (xd,yd) = disto(x_u,y_u) + const T r2 = x_u*x_u + y_u*y_u; + const T r_coeff = (T(1) + k1*r2); + const T x_d = x_u * r_coeff; + const T y_d = y_u * r_coeff; + + out(0) = (pt(0) - principal_point_x) / focal; + out(1) = (pt(1) - principal_point_y) / focal; + out(2) = static_cast(1.0);*/ + } + + template + void unlift(const T* const cam_K, const Eigen::Vector & pt, Eigen::Vector & out) const + { + const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; + const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; + const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; + + Eigen::Vector proj_pt = pt / pt(2); + + out(0) = proj_pt(0) * focal + principal_point_x; + out(1) = proj_pt(1) * focal + principal_point_y; + out(2) = static_cast(1.0); + } + + /** + * @param[in] cam_K: Camera intrinsics( focal, principal point [x,y] ) + * @param[in] cam_Rt: Camera parameterized using one block of 6 parameters [R;t]: + * - 3 for rotation(angle axis), 3 for translation + * @param[in] pos_3dpoint + * @param[out] out_residuals + */ + template + bool operator()( + const T* const cam_K, + const T* const cam_R1, + const T* const cam_R2, + T* out_residuals) const + { + Eigen::Matrix oneRo, twoRo, twoRone; + Eigen::Vector pt3d_1; + + lift(cam_K, m_pos_2dpoint_first, pt3d_1); + + ceres::AngleAxisToRotationMatrix(cam_R1, oneRo.data()); + ceres::AngleAxisToRotationMatrix(cam_R2, twoRo.data()); + + twoRone = twoRo * oneRo.transpose(); + + Eigen::Vector pt3d_2_est = twoRone * pt3d_1; + + Eigen::Vector pt2d_2_est; + unlift(cam_K, pt3d_2_est, pt2d_2_est); + + Eigen::Vector residual = pt2d_2_est - m_pos_2dpoint_second; + + out_residuals[0] = residual(0); + out_residuals[1] = residual(1); return true; } - double m_pos_2dpoint_first[2]; // The 2D observation in first view - double m_pos_2dpoint_second[2]; // The 2D observation in second view + Vec3 m_pos_2dpoint_first; // The 2D observation in first view + Vec3 m_pos_2dpoint_second; // The 2D observation in second view }; diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 75e1e90ebc..1f1055da7e 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -559,6 +559,8 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging poseWiseMatches[Pair(v1->getPoseId(), v2->getPoseId())].insert(pair); } + sfm::Constraints2D constraints2d = _sfmData.getConstraints2D(); + ALICEVISION_LOG_INFO("Relative pose computation:"); // boost::progress_display progressBar( poseWiseMatches.size(), std::cout, "\n- Relative pose computation -\n" ); // #pragma omp parallel for schedule(dynamic) @@ -675,76 +677,32 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging << ", initial_residual_tolerance: " << relativePose_info.initial_residual_tolerance << ", found_residual_precision: " << relativePose_info.found_residual_precision); - const bool refineUsingBA = false; - if(refineUsingBA) + + /*Sort all inliers by increasing ids*/ + std::sort(relativePose_info.vec_inliers.begin(), relativePose_info.vec_inliers.end()); + + size_t index = 0; + size_t index_inlier = 0; + for(const auto& matchesPerDescIt: matchesPerDesc) { - // Refine the defined scene - SfMData tinyScene; - tinyScene.views.insert(*_sfmData.getViews().find(view_I->getViewId())); - tinyScene.views.insert(*_sfmData.getViews().find(view_J->getViewId())); - tinyScene.intrinsics.insert(*_sfmData.getIntrinsics().find(view_I->getIntrinsicId())); - tinyScene.intrinsics.insert(*_sfmData.getIntrinsics().find(view_J->getIntrinsicId())); - - // Init poses - const Pose3& poseI = Pose3(Mat3::Identity(), Vec3::Zero()); - const Pose3& poseJ = relativePose_info.relativePose; - - tinyScene.setPose(*view_I, CameraPose(poseI)); - tinyScene.setPose(*view_J, CameraPose(poseJ)); - - // Init structure - const Mat34 P1 = cam_I->get_projective_equivalent(poseI); - const Mat34 P2 = cam_J->get_projective_equivalent(poseJ); - Landmarks & landmarks = tinyScene.structure; - - size_t landmarkId = 0; - for(const auto& matchesPerDescIt: matchesPerDesc) + const feature::EImageDescriberType descType = matchesPerDescIt.first; + const matching::IndMatches & matches = matchesPerDescIt.second; + + for (const auto & match : matches) { - const feature::EImageDescriberType descType = matchesPerDescIt.first; - assert(descType != feature::EImageDescriberType::UNINITIALIZED); - if(descType == feature::EImageDescriberType::UNINITIALIZED) - throw std::logic_error("descType UNINITIALIZED"); - const matching::IndMatches & matches = matchesPerDescIt.second; - for (const matching::IndMatch& match: matches) - { - const Vec2 x1_ = _featuresPerView->getFeatures(I, descType)[match._i].coords().cast(); - const Vec2 x2_ = _featuresPerView->getFeatures(J, descType)[match._j].coords().cast(); - Vec3 X; - TriangulateDLT(P1, x1_, P2, x2_, &X); - Observations obs; - obs[view_I->getViewId()] = Observation(x1_, match._i); - obs[view_J->getViewId()] = Observation(x2_, match._j); - Landmark& newLandmark = landmarks[landmarkId++]; - newLandmark.descType = descType; - newLandmark.observations = obs; - newLandmark.X = X; + size_t next_inlier = relativePose_info.vec_inliers[index_inlier]; + if (index == next_inlier) { + + Vec2 pt1 = _featuresPerView->getFeatures(I, descType)[match._i].coords().cast(); + Vec2 pt2 = _featuresPerView->getFeatures(J, descType)[match._j].coords().cast(); + + sfm::Constraint2D constraint(I, sfm::Observation(pt1, 0), J, sfm::Observation(pt2, 0)); + constraints2d.push_back(constraint); + + index_inlier++; } - } - // - refine only Structure and Rotations & translations (keep intrinsic constant) - BundleAdjustmentCeres::CeresOptions options(false, false); - options.linearSolverType = ceres::DENSE_SCHUR; - BundleAdjustmentCeres bundle_adjustment_obj(options); - if(bundle_adjustment_obj.adjust(tinyScene, BundleAdjustment::REFINE_ROTATION | BundleAdjustment::REFINE_TRANSLATION | BundleAdjustment::REFINE_STRUCTURE)) - { - // --> to debug: save relative pair geometry on disk - // std::ostringstream os; - // os << relative_pose_pair.first << "_" << relative_pose_pair.second << ".ply"; - // Save(tiny_scene, os.str(), ESfMData(STRUCTURE | EXTRINSICS)); - // - - const geometry::Pose3 poseI = tinyScene.getPose(*view_I).getTransform(); - const geometry::Pose3 poseJ = tinyScene.getPose(*view_J).getTransform(); - - const Mat3 R1 = poseI.rotation(); - const Mat3 R2 = poseJ.rotation(); - const Vec3 t1 = poseI.translation(); - const Vec3 t2 = poseJ.translation(); - // Compute relative motion and save it - Mat3 Rrel; - Vec3 trel; - RelativeCameraMotion(R1, t1, R2, t2, &Rrel, &trel); - // Update found relative pose - relativePose_info.relativePose = Pose3(Rrel, -Rrel.transpose() * trel); + + index++; } } From fdeb437c90f47d9543f79f95b210254a9358b5ff Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Wed, 25 Sep 2019 14:35:21 +0200 Subject: [PATCH 052/174] remove useless code --- .../ReconstructionEngine_panorama.cpp | 145 +----------------- .../ReconstructionEngine_panorama.hpp | 3 - 2 files changed, 5 insertions(+), 143 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 1f1055da7e..83268fb7bf 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -391,119 +391,15 @@ bool ReconstructionEngine_panorama::Compute_Global_Rotations(const rotationAvera return b_rotationAveraging; } -/// Compute the initial structure of the scene -bool ReconstructionEngine_panorama::Compute_Initial_Structure(matching::PairwiseMatches& tripletWise_matches) -{ - // Build tracks from selected triplets (Union of all the validated triplet tracks (_tripletWise_matches)) - { - using namespace aliceVision::track; - TracksBuilder tracksBuilder; -#ifdef USE_ALL_VALID_MATCHES // not used by default - matching::PairwiseMatches pose_supported_matches; - for (const auto & pairwiseMatchesIt : *_pairwiseMatches) - { - const View * vI = _sfm_data.getViews().at(pairwiseMatchesIt.first.first).get(); - const View * vJ = _sfm_data.getViews().at(pairwiseMatchesIt.first.second).get(); - if (_sfm_data.isPoseAndIntrinsicDefined(vI) && _sfm_data.isPoseAndIntrinsicDefined(vJ)) - { - pose_supported_matches.insert(pairwiseMatchesIt); - } - } - tracksBuilder.Build(pose_supported_matches); -#else - // Use triplet validated matches - tracksBuilder.build(tripletWise_matches); -#endif - TracksMap map_selectedTracks; // reconstructed track (visibility per 3D point) - tracksBuilder.exportToSTL(map_selectedTracks); - - // Fill sfm_data with the computed tracks (no 3D yet) - Landmarks & structure = _sfmData.structure; - IndexT idx(0); - for (TracksMap::const_iterator itTracks = map_selectedTracks.begin(); - itTracks != map_selectedTracks.end(); - ++itTracks, ++idx) - { - const Track & track = itTracks->second; - Landmark& newLandmark = structure[idx]; - newLandmark.descType = track.descType; - Observations & obs = newLandmark.observations; - std::vector pos3d; - pos3d.reserve(track.featPerView.size()); - for (Track::FeatureIdPerView::const_iterator it = track.featPerView.begin(); it != track.featPerView.end(); ++it) - { - const size_t imaIndex = it->first; - const size_t featIndex = it->second; - const PointFeature & pt = _featuresPerView->getFeatures(imaIndex, track.descType)[featIndex]; - obs[imaIndex] = Observation(pt.coords().cast(), featIndex); - - { - // back project a feature from the first observation - const sfmData::View * view = _sfmData.views.at(imaIndex).get(); - if (_sfmData.isPoseAndIntrinsicDefined(view)) - { - const IntrinsicBase& cam = *_sfmData.getIntrinsics().at(view->getIntrinsicId()).get(); - const Pose3 pose = _sfmData.getPose(*view).getTransform(); - Vec3 v = cam.backproject(pose, pt.coords().cast(), 10.0, true); - pos3d.push_back(v); - } - } - } - newLandmark.X = Vec3::Zero(); - for(const Vec3& p: pos3d) - newLandmark.X += p; - newLandmark.X /= pos3d.size(); - } - - ALICEVISION_LOG_DEBUG("Track stats"); - { - std::ostringstream osTrack; - //-- Display stats: - // - number of images - // - number of tracks - std::set set_imagesId; - tracksUtilsMap::imageIdInTracks(map_selectedTracks, set_imagesId); - osTrack << "------------------" << "\n" - << "-- Tracks Stats --" << "\n" - << " Tracks number: " << tracksBuilder.nbTracks() << "\n" - << " Images Id: " << "\n"; - std::copy(set_imagesId.begin(), - set_imagesId.end(), - std::ostream_iterator(osTrack, ", ")); - osTrack << "\n------------------" << "\n"; - - std::map map_Occurence_TrackLength; - tracksUtilsMap::tracksLength(map_selectedTracks, map_Occurence_TrackLength); - osTrack << "TrackLength, Occurrence" << "\n"; - for (std::map::const_iterator iter = map_Occurence_TrackLength.begin(); - iter != map_Occurence_TrackLength.end(); ++iter) { - osTrack << "\t" << iter->first << "\t" << iter->second << "\n"; - } - osTrack << "\n"; - ALICEVISION_LOG_DEBUG(osTrack.str()); - } - } - - // Export initial structure - if (!_loggingFile.empty()) - { - sfmDataIO::Save(_sfmData, - (fs::path(_loggingFile).parent_path() / "initial_structure.ply").string(), - sfmDataIO::ESfMData(sfmDataIO::EXTRINSICS | sfmDataIO::STRUCTURE)); - } - return !_sfmData.structure.empty(); -} - // Adjust the scene (& remove outliers) bool ReconstructionEngine_panorama::Adjust() { - // refine sfm scene (in a 3 iteration process (free the parameters regarding their incertainty order)): BundleAdjustmentCeres::CeresOptions options; - options.useParametersOrdering = false; // disable parameters ordering - + options.useParametersOrdering = false; + options.summary = true; + BundleAdjustmentCeres BA(options); - // - refine only Structure and translations - bool success = BA.adjust(_sfmData, BundleAdjustment::REFINE_STRUCTURE); // BundleAdjustment::REFINE_ROTATION | + bool success = BA.adjust(_sfmData, BundleAdjustment::REFINE_ROTATION | BundleAdjustment::REFINE_INTRINSICS_FOCAL | BundleAdjustment::REFINE_INTRINSICS_OPTICALCENTER_IF_ENOUGH_DATA); if(success) { ALICEVISION_LOG_DEBUG("Rotations successfully refined."); @@ -513,30 +409,6 @@ bool ReconstructionEngine_panorama::Adjust() ALICEVISION_LOG_DEBUG("Failed to refine the rotations only."); } - if(success && !_lockAllIntrinsics) - { - success = BA.adjust(_sfmData, BundleAdjustment::REFINE_STRUCTURE | BundleAdjustment::REFINE_INTRINSICS_FOCAL | BundleAdjustment::REFINE_INTRINSICS_DISTORTION); // BundleAdjustment::REFINE_ROTATION | REFINE_INTRINSICS_OPTICALCENTER_ALWAYS - if(success) - { - ALICEVISION_LOG_DEBUG("Rotations and intrinsics successfully refined."); - } - else - { - ALICEVISION_LOG_DEBUG("Failed to refine the rotations/intrinsics."); - } - } - - /* - // Remove outliers (residual error) - const size_t pointcount_initial = _sfmData.structure.size(); - RemoveOutliers_PixelResidualError(_sfmData, 4.0); - const size_t pointcount_pixelresidual_filter = _sfmData.structure.size(); - - ALICEVISION_LOG_DEBUG("Outlier removal (remaining points):\n" - "\t- # landmarks initial: " << pointcount_initial << "\n" - "\t- # landmarks after pixel residual filter: " << pointcount_pixelresidual_filter); - */ - return success; } @@ -559,18 +431,11 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging poseWiseMatches[Pair(v1->getPoseId(), v2->getPoseId())].insert(pair); } - sfm::Constraints2D constraints2d = _sfmData.getConstraints2D(); + sfm::Constraints2D & constraints2d = _sfmData.getConstraints2D(); ALICEVISION_LOG_INFO("Relative pose computation:"); -// boost::progress_display progressBar( poseWiseMatches.size(), std::cout, "\n- Relative pose computation -\n" ); -// #pragma omp parallel for schedule(dynamic) - // Compute the relative pose from pairwise point matches: for (int i = 0; i < poseWiseMatches.size(); ++i) { -// #pragma omp critical -// { -// ++progressBar; -// } { PoseWiseMatches::const_iterator iter (poseWiseMatches.begin()); std::advance(iter, i); diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp index bfe39b2bc8..64f3789b98 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.hpp @@ -149,9 +149,6 @@ class ReconstructionEngine_panorama : public ReconstructionEngine HashMap& map_globalR); public: - /// Compute the initial structure of the scene - bool Compute_Initial_Structure(matching::PairwiseMatches& tripletWise_matches); - /// Adjust the scene (& remove outliers) bool Adjust(); From 382ce56cdcd08bcc986279216613772fe3df44ab Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Wed, 25 Sep 2019 14:55:04 +0200 Subject: [PATCH 053/174] add panorama size estimation algorithm --- .../pipeline/main_panoramaStitching.cpp | 137 +++++++++++++----- 1 file changed, 103 insertions(+), 34 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 63c3e23137..8da7c5fee8 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -68,6 +68,108 @@ inline std::istream& operator>>(std::istream& in, std::pair& out) return in; } +bool estimate_panorama_size(const sfmData::SfMData & sfmData, float fisheyeMaskingMargin, std::pair & panoramaSize) { + + panoramaSize.first = 256; + panoramaSize.second = 128; + + image::Image BufferCoords(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + { + int imageIndex = 0; + std::vector determinants; + + for(auto& viewIt: sfmData.getViews()) + { + IndexT viewId = viewIt.first; + const sfmData::View& view = *viewIt.second.get(); + if(!sfmData.isPoseAndIntrinsicDefined(&view)) + continue; + + + const sfmData::CameraPose camPose = sfmData.getPose(view); + const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); + + + const float maxRadius = std::min(intrinsic.w(), intrinsic.h()) * 0.5f * (1.0 - fisheyeMaskingMargin); + const Vec2i center(intrinsic.w()/2.f, intrinsic.h()/2.f); + + for(int y = 0; y < panoramaSize.second; ++y) + { + for(int x = 0; x < panoramaSize.first; ++x) + { + BufferCoords(y, x).a() = 0.0; + // equirectangular to unit vector + Vec3 ray = SphericalMapping::get3DPoint(Vec2(x,y), panoramaSize.first, panoramaSize.second); + + if(camPose.getTransform().depth(ray) < 0) + { + // point is not in front of the camera + continue; + } + + // unit vector to camera + const Vec2 pix_disto = intrinsic.project(camPose.getTransform(), ray, true); + + if( pix_disto(0) < 0 || pix_disto(0) >= intrinsic.w() || + pix_disto(1) < 0 || pix_disto(1) >= intrinsic.h()) + { + //the pixel is out of the image + continue; + } + + const float dist = std::sqrt((pix_disto(0)-center(0)) * (pix_disto(0)-center(0)) + (pix_disto(1)-center(1)) * (pix_disto(1)-center(1))); + if (dist > maxRadius * 0.8) + { + continue; + } + + BufferCoords(y, x).r() = pix_disto(0); + BufferCoords(y, x).g() = pix_disto(1); + BufferCoords(y, x).a() = 1.0; + } + } + + for(int y = 1; y < panoramaSize.second - 1; ++y) + { + for(int x = 1; x < panoramaSize.first - 1; ++x) + { + double x00 = BufferCoords(y, x).r(); + double x10 = BufferCoords(y, x + 1).r(); + double x01 = BufferCoords(y + 1, x).r(); + + double y00 = BufferCoords(y, x).g(); + double y10 = BufferCoords(y, x + 1).g(); + double y01 = BufferCoords(y + 1, x).g(); + + double m00 = BufferCoords(y, x).a(); + double m10 = BufferCoords(y, x + 1).a(); + double m01 = BufferCoords(y + 1, x).a(); + + if (m00 != 1.0 || m10 != 1.0 || m01 != 1.0) { + continue; + } + + double dxx = x10 - x00; + double dxy = x01 - x00; + double dyx = y10 - y00; + double dyy = y01 - y00; + + float det = std::abs(dxx*dyy - dxy*dyx); + determinants.push_back(det); + } + } + } + + std::nth_element(determinants.begin(), determinants.begin() + determinants.size() / 5, determinants.end()); + double scale = sqrt(determinants[determinants.size() / 5]); + + panoramaSize.first *= scale; + panoramaSize.second *= scale; + } + + return true; +} + int main(int argc, char **argv) { // command-line parameters @@ -172,40 +274,7 @@ int main(int argc, char **argv) if(panoramaSize.first == 0 || panoramaSize.second == 0) { - sfmData::EEXIFOrientation orientation = sfmData.getView(*validViews.begin()).getMetadataOrientation(); - if(panoramaSize.first != 0 || panoramaSize.second != 0) - { - int s = std::max(panoramaSize.first, panoramaSize.second); - panoramaSize.first = panoramaSize.second = s; - } - else - { - ALICEVISION_LOG_INFO("Automatic panorama size choice."); - // TODO: better heuristic to decide the output resolution - panoramaSize.first = 0; - for(auto& viewIt: sfmData.getViews()) - { - IndexT viewId = viewIt.first; - const sfmData::View& view = *viewIt.second.get(); - if(!sfmData.isPoseAndIntrinsicDefined(&view)) - continue; - - if(orientation == sfmData::EEXIFOrientation::RIGHT || - orientation == sfmData::EEXIFOrientation::LEFT || - orientation == sfmData::EEXIFOrientation::RIGHT_REVERSED || - orientation == sfmData::EEXIFOrientation::LEFT_REVERSED) - { - panoramaSize.first += view.getHeight(); - panoramaSize.second = std::max(panoramaSize.second, int(view.getWidth())); - } - else - { - panoramaSize.first += view.getWidth(); - panoramaSize.second = std::max(panoramaSize.second, int(view.getHeight())); - } - ALICEVISION_LOG_INFO("Update output panorama size: " << panoramaSize.first << ", " << panoramaSize.second); - } - } + estimate_panorama_size(sfmData, fisheyeMasking, panoramaSize); } panoramaSize.first *= scaleFactor; From 7f5df6cfa88472a4854107a90f0a4015f1a6e354 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Thu, 26 Sep 2019 10:00:20 +0200 Subject: [PATCH 054/174] Adding distortion estimation in radial1 --- src/aliceVision/sfm/BundleAdjustmentCeres.cpp | 8 +- src/aliceVision/sfm/BundleAdjustmentCeres.hpp | 1 + .../sfm/ResidualErrorConstraintFunctor.hpp | 79 +++++++++++++++---- .../ReconstructionEngine_panorama.cpp | 18 ++++- 4 files changed, 84 insertions(+), 22 deletions(-) diff --git a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp index 7d38d790cc..aef5421748 100644 --- a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp +++ b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp @@ -90,14 +90,12 @@ ceres::CostFunction* createRigCostFunctionFromIntrinsics(const IntrinsicBase* in */ ceres::CostFunction* createConstraintsCostFunctionFromIntrinsics(const IntrinsicBase* intrinsicPtr, const Vec2& observation_first, const Vec2& observation_second) { - - switch(intrinsicPtr->getType()) { case PINHOLE_CAMERA: return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_Pinhole(observation_first.homogeneous(), observation_second.homogeneous())); - /*case PINHOLE_CAMERA: - return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_PinholeRadialK1(observation_first.homogeneous(), observation_second.homogeneous()));*/ + case PINHOLE_CAMERA_RADIAL1: + return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_PinholeRadialK1(observation_first.homogeneous(), observation_second.homogeneous())); default: throw std::logic_error("Cannot create cost function, unrecognized intrinsic type in BA."); } @@ -271,6 +269,7 @@ void BundleAdjustmentCeres::setSolverOptions(ceres::Solver::Options& solverOptio solverOptions.minimizer_progress_to_stdout = _ceresOptions.verbose; solverOptions.logging_type = ceres::SILENT; solverOptions.num_threads = _ceresOptions.nbThreads; + #if CERES_VERSION_MAJOR < 2 solverOptions.num_linear_solver_threads = _ceresOptions.nbThreads; #endif @@ -627,7 +626,6 @@ void BundleAdjustmentCeres::addConstraints2DToProblem(const sfmData::SfMData& sf assert(intrinsicBlockPtr_1 == intrinsicBlockPtr_2); ceres::CostFunction* costFunction = createConstraintsCostFunctionFromIntrinsics(sfmData.getIntrinsicPtr(view_1.getIntrinsicId()), constraint.ObservationFirst.x, constraint.ObservationSecond.x); - problem.AddResidualBlock(costFunction, lossFunction, intrinsicBlockPtr_1, poseBlockPtr_1, poseBlockPtr_2); } } diff --git a/src/aliceVision/sfm/BundleAdjustmentCeres.hpp b/src/aliceVision/sfm/BundleAdjustmentCeres.hpp index 89da1ba9cb..a4c5d89bf5 100644 --- a/src/aliceVision/sfm/BundleAdjustmentCeres.hpp +++ b/src/aliceVision/sfm/BundleAdjustmentCeres.hpp @@ -288,6 +288,7 @@ class BundleAdjustmentCeres : public BundleAdjustment /// rig sub-poses blocks wrapper /// block: ceres angleAxis(3) + translation(3) HashMap>> _rigBlocks; + }; } // namespace sfm diff --git a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp index c1a91f12e2..60448b0681 100644 --- a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp +++ b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp @@ -1,12 +1,25 @@ #pragma once #include +#include #include + + namespace aliceVision { namespace sfm { +template +double getJetValue(const T & val) { + return val.a; +} + +template <> +double getJetValue(const double & val) { + return val; +} + /** * @brief Ceres functor to use a pair of pinhole on a pure rotation 2D constraint. * @@ -32,7 +45,7 @@ struct ResidualErrorConstraintFunctor_Pinhole }; template - void lift(const T* const cam_K, const Vec3 pt, Eigen::Vector & out) const + void lift(const T* const cam_K, const Vec3 pt, Eigen::Vector< T, 3> & out) const { const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; @@ -110,6 +123,8 @@ struct ResidualErrorConstraintFunctor_Pinhole * - 3 => the camera extrinsic data block for the second view * */ + + struct ResidualErrorConstraintFunctor_PinholeRadialK1 { ResidualErrorConstraintFunctor_PinholeRadialK1(const Vec3 & pos_2dpoint_first, const Vec3 & pos_2dpoint_second) @@ -125,23 +140,46 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 OFFSET_DISTO_K1 = 3 }; + static double distoFunctor(const std::vector & params, double r2) + { + const double k1 = params[0]; + return r2 * Square(1.+r2*k1); + } + template void lift(const T* const cam_K, const Vec3 pt, Eigen::Vector & out) const { - /*const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; + const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; const T& k1 = cam_K[OFFSET_DISTO_K1]; - // Apply distortion (xd,yd) = disto(x_u,y_u) - const T r2 = x_u*x_u + y_u*y_u; - const T r_coeff = (T(1) + k1*r2); - const T x_d = x_u * r_coeff; - const T y_d = y_u * r_coeff; - - out(0) = (pt(0) - principal_point_x) / focal; - out(1) = (pt(1) - principal_point_y) / focal; - out(2) = static_cast(1.0);*/ + //Unshift then unscale back to meters + T xd = (pt(0) - principal_point_x) / focal; + T yd = (pt(1) - principal_point_y) / focal; + T distorted_radius = sqrt(xd*xd + yd*yd); + + /*A hack to obtain undistorted point even if using automatic diff*/ + double xd_real, yd_real, k1_real; + xd_real = getJetValue(xd); + yd_real = getJetValue(yd); + k1_real = getJetValue(k1); + + /*get rescaler*/ + std::vector distortionParams = {k1_real}; + double distorted_radius_square = xd_real * xd_real + yd_real * yd_real; + double rescaler = ::sqrt(camera::radial_distortion::bisection_Radius_Solve(distortionParams, distorted_radius_square, distoFunctor));; + + if (distorted_radius < 1e-12) { + out(0) = xd; + out(1) = yd; + out(2) = static_cast(1.0); + } + else { + out(0) = (xd / distorted_radius) * static_cast(rescaler); + out(1) = (yd / distorted_radius) * static_cast(rescaler); + out(2) = static_cast(1.0); + } } template @@ -150,11 +188,20 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; + const T& k1 = cam_K[OFFSET_DISTO_K1]; + //Project on plane Eigen::Vector proj_pt = pt / pt(2); - out(0) = proj_pt(0) * focal + principal_point_x; - out(1) = proj_pt(1) * focal + principal_point_y; + //Apply distortion + const T r2 = proj_pt(0)*proj_pt(0) + proj_pt(1)*proj_pt(1); + const T r_coeff = (T(1) + k1*r2); + const T x_d = proj_pt(0) * r_coeff; + const T y_d = proj_pt(1) * r_coeff; + + //Scale and shift + out(0) = x_d * focal + principal_point_x; + out(1) = y_d * focal + principal_point_y; out(2) = static_cast(1.0); } @@ -176,18 +223,22 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 Eigen::Vector pt3d_1; + //From pixel to meters lift(cam_K, m_pos_2dpoint_first, pt3d_1); + //Build relative rotation ceres::AngleAxisToRotationMatrix(cam_R1, oneRo.data()); ceres::AngleAxisToRotationMatrix(cam_R2, twoRo.data()); - twoRone = twoRo * oneRo.transpose(); + //Transform point Eigen::Vector pt3d_2_est = twoRone * pt3d_1; + //Project back to image space in pixels Eigen::Vector pt2d_2_est; unlift(cam_K, pt3d_2_est, pt2d_2_est); + //Compute residual Eigen::Vector residual = pt2d_2_est - m_pos_2dpoint_second; out_residuals[0] = residual(0); diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 83268fb7bf..8719000725 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -398,15 +398,27 @@ bool ReconstructionEngine_panorama::Adjust() options.useParametersOrdering = false; options.summary = true; + /*Minimize only rotation first*/ BundleAdjustmentCeres BA(options); - bool success = BA.adjust(_sfmData, BundleAdjustment::REFINE_ROTATION | BundleAdjustment::REFINE_INTRINSICS_FOCAL | BundleAdjustment::REFINE_INTRINSICS_OPTICALCENTER_IF_ENOUGH_DATA); + bool success = BA.adjust(_sfmData, BundleAdjustment::REFINE_ROTATION); if(success) { - ALICEVISION_LOG_DEBUG("Rotations successfully refined."); + ALICEVISION_LOG_INFO("Rotations successfully refined."); } else { - ALICEVISION_LOG_DEBUG("Failed to refine the rotations only."); + ALICEVISION_LOG_INFO("Failed to refine the rotations only."); + } + + /*Minimize All then*/ + success = BA.adjust(_sfmData, BundleAdjustment::REFINE_ROTATION | BundleAdjustment::REFINE_INTRINSICS_ALL); + if(success) + { + ALICEVISION_LOG_INFO("Bundle successfully refined."); + } + else + { + ALICEVISION_LOG_INFO("Failed to refine Everything."); } return success; From 804a4afd0c0c675cf02159cd1e6131c8c5c75614 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Thu, 26 Sep 2019 14:19:22 +0200 Subject: [PATCH 055/174] adding check on reprojection --- src/aliceVision/camera/IntrinsicBase.hpp | 6 ++++ src/aliceVision/camera/Pinhole.hpp | 34 +++++++++++++++++++ src/aliceVision/camera/PinholeRadial.hpp | 17 +++++----- .../pipeline/main_panoramaStitching.cpp | 12 ++++--- 4 files changed, 56 insertions(+), 13 deletions(-) diff --git a/src/aliceVision/camera/IntrinsicBase.hpp b/src/aliceVision/camera/IntrinsicBase.hpp index 82a54058b1..1663fcf0db 100644 --- a/src/aliceVision/camera/IntrinsicBase.hpp +++ b/src/aliceVision/camera/IntrinsicBase.hpp @@ -349,6 +349,12 @@ struct IntrinsicBase return false; } + /** + * @brief Return true if this ray should be visible in the image + * @return true if this ray is visible theorically + */ + virtual bool isVisibleRay(const Vec3 & ray) const = 0; + /** * @brief Generate an unique Hash from the camera parameters (used for grouping) * @return Unique Hash from the camera parameters diff --git a/src/aliceVision/camera/Pinhole.hpp b/src/aliceVision/camera/Pinhole.hpp index 949d48dcd6..01e6145735 100644 --- a/src/aliceVision/camera/Pinhole.hpp +++ b/src/aliceVision/camera/Pinhole.hpp @@ -154,6 +154,40 @@ class Pinhole : public IntrinsicBase return true; } + /** + * @brief Return true if this ray should be visible in the image + * @return true if this ray is visible theorically + */ + virtual bool isVisibleRay(const Vec3 & ray) const override { + + if (ray(2) < 0) { + return false; + } + + Vec2 proj = ray.head(2) / ray(2); + + double xmin; + double ymin; + double xmax; + double ymax; + + Vec2 p1 = remove_disto(ima2cam(Vec2(0,0))); + Vec2 p2 = remove_disto(ima2cam(Vec2(_w,0))); + Vec2 p3 = remove_disto(ima2cam(Vec2(_w,_h))); + Vec2 p4 = remove_disto(ima2cam(Vec2(0,_h))); + + xmin = std::min(p4(0), (std::min(p3(0), std::min(p1(0), p2(0))))); + ymin = std::min(p4(1), (std::min(p3(1), std::min(p1(1), p2(1))))); + xmax = std::max(p4(0), (std::max(p3(0), std::max(p1(0), p2(0))))); + ymax = std::max(p4(1), (std::max(p3(1), std::max(p1(1), p2(1))))); + + if (proj(0) < xmin || proj(0) > xmax || proj(1) < ymin || proj(1) > ymax) { + return false; + } + + return true; + } + /// Return the un-distorted pixel (with removed distortion) virtual Vec2 get_ud_pixel(const Vec2& p) const override {return p;} diff --git a/src/aliceVision/camera/PinholeRadial.hpp b/src/aliceVision/camera/PinholeRadial.hpp index ddcf112e9c..2ef440dfa2 100644 --- a/src/aliceVision/camera/PinholeRadial.hpp +++ b/src/aliceVision/camera/PinholeRadial.hpp @@ -60,15 +60,15 @@ class PinholeRadialK1 : public Pinhole { } - PinholeRadialK1* clone() const { return new PinholeRadialK1(*this); } - void assign(const IntrinsicBase& other) { *this = dynamic_cast(other); } + PinholeRadialK1* clone() const override { return new PinholeRadialK1(*this); } + void assign(const IntrinsicBase& other) override { *this = dynamic_cast(other); } - EINTRINSIC getType() const { return PINHOLE_CAMERA_RADIAL1; } + EINTRINSIC getType() const override { return PINHOLE_CAMERA_RADIAL1; } - virtual bool have_disto() const { return true; } + virtual bool have_disto() const override { return true; } /// Add distortion to the point p (assume p is in the camera frame [normalized coordinates]) - virtual Vec2 add_disto(const Vec2 & p) const { + virtual Vec2 add_disto(const Vec2 & p) const override{ const double k1 = _distortionParams.at(0); @@ -79,7 +79,7 @@ class PinholeRadialK1 : public Pinhole } /// Remove distortion (return p' such that disto(p') = p) - virtual Vec2 remove_disto(const Vec2& p) const { + virtual Vec2 remove_disto(const Vec2& p) const override { // Compute the radius from which the point p comes from thanks to a bisection // Minimize disto(radius(p')^2) == actual Squared(radius(p)) @@ -91,17 +91,18 @@ class PinholeRadialK1 : public Pinhole } /// Return the un-distorted pixel (with removed distortion) - virtual Vec2 get_ud_pixel(const Vec2& p) const + virtual Vec2 get_ud_pixel(const Vec2& p) const override { return cam2ima( remove_disto(ima2cam(p)) ); } /// Return the distorted pixel (with added distortion) - virtual Vec2 get_d_pixel(const Vec2& p) const + virtual Vec2 get_d_pixel(const Vec2& p) const override { return cam2ima( add_disto(ima2cam(p)) ); } + private: /// Functor to solve Square(disto(radius(p'))) = r^2 diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 8da7c5fee8..60ecbf16c0 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -274,7 +274,7 @@ int main(int argc, char **argv) if(panoramaSize.first == 0 || panoramaSize.second == 0) { - estimate_panorama_size(sfmData, fisheyeMasking, panoramaSize); + estimate_panorama_size(sfmData, fisheyeMaskingMargin, panoramaSize); } panoramaSize.first *= scaleFactor; @@ -331,11 +331,11 @@ int main(int argc, char **argv) for(int x = 0; x < imageOut.Width(); ++x) { // equirectangular to unit vector - Vec3 ray = SphericalMapping::get3DPoint(Vec2(x,y), imageOut.Width(), imageOut.Height()); + Vec3 ray = SphericalMapping::get3DPoint(Vec2(x,y), imageOut.Width(), imageOut.Height()); - if(camPose.getTransform().depth(ray) < 0) - { - // point is not in front of the camera + //Check that this ray should be visible + Vec3 transformedRay = camPose.getTransform()(ray); + if (!intrinsic.isVisibleRay(transformedRay)) { continue; } @@ -364,6 +364,8 @@ int main(int argc, char **argv) } } + + const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); if(contribution > 0.0f) { From 664e2c7908e929018b0102eeb3e129ae9a9de97a Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Thu, 26 Sep 2019 16:02:26 +0200 Subject: [PATCH 056/174] Add fisheye cost function for constraint ... --- src/aliceVision/camera/PinholeFisheye.hpp | 8 + src/aliceVision/sfm/BundleAdjustmentCeres.cpp | 2 + .../sfm/ResidualErrorConstraintFunctor.hpp | 177 ++++++++++++++++++ 3 files changed, 187 insertions(+) diff --git a/src/aliceVision/camera/PinholeFisheye.hpp b/src/aliceVision/camera/PinholeFisheye.hpp index 1883f5123b..abcc7f8e60 100644 --- a/src/aliceVision/camera/PinholeFisheye.hpp +++ b/src/aliceVision/camera/PinholeFisheye.hpp @@ -99,6 +99,14 @@ class PinholeFisheye : public Pinhole { return cam2ima( add_disto(ima2cam(p)) ); } + + virtual bool isVisibleRay(const Vec3 & ray) const override { + if (ray(2) < 0.0) { + return false; + } + + return true; + } }; } // namespace camera diff --git a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp index aef5421748..7cd978226e 100644 --- a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp +++ b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp @@ -96,6 +96,8 @@ ceres::CostFunction* createConstraintsCostFunctionFromIntrinsics(const Intrinsic return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_Pinhole(observation_first.homogeneous(), observation_second.homogeneous())); case PINHOLE_CAMERA_RADIAL1: return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_PinholeRadialK1(observation_first.homogeneous(), observation_second.homogeneous())); + case PINHOLE_CAMERA_FISHEYE: + return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_PinholeFisheye(observation_first.homogeneous(), observation_second.homogeneous())); default: throw std::logic_error("Cannot create cost function, unrecognized intrinsic type in BA."); } diff --git a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp index 60448b0681..8036e0589c 100644 --- a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp +++ b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp @@ -252,5 +252,182 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 }; +/** + * @brief Ceres functor to use a pair of fisheye on a pure rotation 2D constraint. + * + * Data parameter blocks are the following <2,7,6,6> + * - 2 => dimension of the residuals, + * - 4 => the intrinsic data block for the first view , + * - 3 => the camera extrinsic data block for the first view + * - 3 => the camera extrinsic data block for the second view + * + */ + +struct ResidualErrorConstraintFunctor_PinholeFisheye +{ + ResidualErrorConstraintFunctor_PinholeFisheye(const Vec3 & pos_2dpoint_first, const Vec3 & pos_2dpoint_second) + : m_pos_2dpoint_first(pos_2dpoint_first), m_pos_2dpoint_second(pos_2dpoint_second) + { + } + + enum { + OFFSET_FOCAL_LENGTH = 0, + OFFSET_PRINCIPAL_POINT_X = 1, + OFFSET_PRINCIPAL_POINT_Y = 2, + OFFSET_DISTO_K1 = 3, + OFFSET_DISTO_K2 = 4, + OFFSET_DISTO_K3 = 5, + OFFSET_DISTO_K4 = 6, + }; + + template + void lift(const T* const cam_K, const Vec3 pt, Eigen::Vector & out) const + { + const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; + const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; + const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; + const T& k1 = cam_K[OFFSET_DISTO_K1]; + const T& k2 = cam_K[OFFSET_DISTO_K2]; + const T& k3 = cam_K[OFFSET_DISTO_K3]; + const T& k4 = cam_K[OFFSET_DISTO_K4]; + + //Unshift then unscale back to meters + T xd = (pt(0) - principal_point_x) / focal; + T yd = (pt(1) - principal_point_y) / focal; + T distorted_radius = sqrt(xd*xd + yd*yd); + + /*A hack to obtain undistorted point even if using automatic diff*/ + double xd_real, yd_real, k1_real, k2_real, k3_real, k4_real; + xd_real = getJetValue(xd); + yd_real = getJetValue(yd); + k1_real = getJetValue(k1); + k2_real = getJetValue(k2); + k3_real = getJetValue(k3); + k4_real = getJetValue(k4); + + double scale = 1.0; + { + const double eps = 1e-8; + const double theta_dist = sqrt(xd_real * xd_real + yd_real * yd_real); + if (theta_dist > eps) + { + double theta = theta_dist; + for (int j = 0; j < 10; ++j) + { + const double theta2 = theta*theta; + const double theta4 = theta2*theta2; + const double theta6 = theta4*theta2; + const double theta8 = theta6*theta2; + + double theta_fix = (theta * (1.0 + k1_real * theta2 + k2_real * theta4 + k3_real * theta6 + k4_real * theta8) - theta_dist) / (1.0 + 3.0 * k1_real * theta2 + 5.0 * k2_real * theta4 + 7.0 * k3_real * theta6 + 9.0 *k4_real * theta8); + + theta = theta - theta_fix; + } + + scale = std::tan(theta); + } + } + + if (distorted_radius < 1e-12) { + out(0) = xd; + out(1) = yd; + out(2) = static_cast(1.0); + } + else { + out(0) = (xd / distorted_radius) * static_cast(scale); + out(1) = (yd / distorted_radius) * static_cast(scale); + out(2) = static_cast(1.0); + } + } + + template + void unlift(const T* const cam_K, const Eigen::Vector & pt, Eigen::Vector & out) const + { + const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; + const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; + const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; + const T& k1 = cam_K[OFFSET_DISTO_K1]; + const T& k2 = cam_K[OFFSET_DISTO_K2]; + const T& k3 = cam_K[OFFSET_DISTO_K3]; + const T& k4 = cam_K[OFFSET_DISTO_K4]; + + //Project on plane + Eigen::Vector proj_pt = pt / pt(2); + + //Apply distortion + const T x_u = proj_pt(0); + const T y_u = proj_pt(1); + const T dist = sqrt(x_u*x_u + y_u*y_u); + const T theta = atan(dist); + const T theta2 = theta*theta; + const T theta3 = theta2*theta; + const T theta4 = theta2*theta2; + const T theta5 = theta4*theta; + const T theta6 = theta3*theta3; + const T theta7 = theta6*theta; + const T theta8 = theta4*theta4; + const T theta9 = theta8*theta; + + const T theta_dist = theta + k1*theta3 + k2*theta5 + k3*theta7 + k4*theta9; + + const T inv_r = dist > T(1e-8) ? T(1.0)/dist : T(1.0); + const T cdist = dist > T(1e-8) ? theta_dist * inv_r : T(1); + + const T x_d = x_u * cdist; + const T y_d = y_u * cdist; + + //Scale and shift + out(0) = x_d * focal + principal_point_x; + out(1) = y_d * focal + principal_point_y; + out(2) = static_cast(1.0); + } + + /** + * @param[in] cam_K: Camera intrinsics( focal, principal point [x,y] ) + * @param[in] cam_Rt: Camera parameterized using one block of 6 parameters [R;t]: + * - 3 for rotation(angle axis), 3 for translation + * @param[in] pos_3dpoint + * @param[out] out_residuals + */ + template + bool operator()( + const T* const cam_K, + const T* const cam_R1, + const T* const cam_R2, + T* out_residuals) const + { + Eigen::Matrix oneRo, twoRo, twoRone; + + Eigen::Vector pt3d_1; + + //From pixel to meters + lift(cam_K, m_pos_2dpoint_first, pt3d_1); + + //Build relative rotation + ceres::AngleAxisToRotationMatrix(cam_R1, oneRo.data()); + ceres::AngleAxisToRotationMatrix(cam_R2, twoRo.data()); + twoRone = twoRo * oneRo.transpose(); + + //Transform point + Eigen::Vector pt3d_2_est = twoRone * pt3d_1; + + //Project back to image space in pixels + Eigen::Vector pt2d_2_est; + unlift(cam_K, pt3d_2_est, pt2d_2_est); + + //Compute residual + Eigen::Vector residual = pt2d_2_est - m_pos_2dpoint_second; + + out_residuals[0] = residual(0); + out_residuals[1] = residual(1); + + return true; + } + + Vec3 m_pos_2dpoint_first; // The 2D observation in first view + Vec3 m_pos_2dpoint_second; // The 2D observation in second view +}; + + } // namespace sfm } // namespace aliceVision From fce190999a8d0c069c988497955807d780a1b43e Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Fri, 27 Sep 2019 09:58:46 +0200 Subject: [PATCH 057/174] Adding a skeleton for equidistant camera --- src/aliceVision/camera/Equidistant.hpp | 184 +++++++++++++++++++++++++ 1 file changed, 184 insertions(+) create mode 100644 src/aliceVision/camera/Equidistant.hpp diff --git a/src/aliceVision/camera/Equidistant.hpp b/src/aliceVision/camera/Equidistant.hpp new file mode 100644 index 0000000000..6dcd1d6bfa --- /dev/null +++ b/src/aliceVision/camera/Equidistant.hpp @@ -0,0 +1,184 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2016 AliceVision contributors. +// Copyright (c) 2012 openMVG contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#pragma once + +#include +#include +#include +#include +#include + +#include +#include + + +namespace aliceVision { +namespace camera { + +/// Define an equidistant camera (store a K 3x3 matrix) +/// with intrinsic parameters defining the K calibration matrix +class Equidistant : public IntrinsicBase +{ + public: + + Equidistant() = default; + + Equidistant( + unsigned int w, unsigned int h, + const Mat3 K) + :IntrinsicBase(w,h) + { + _K = K; + _Kinv = _K.inverse(); + } + + Equidistant( + unsigned int w, unsigned int h, + double focal_length_pix, + double ppx, double ppy, const std::vector& distortionParams = {}) + : IntrinsicBase(w,h) + , _distortionParams(distortionParams) + { + setK(focal_length_pix, ppx, ppy); + } + + virtual ~Equidistant() {} + + virtual Equidistant* clone() const override { return new Equidistant(*this); } + virtual void assign(const IntrinsicBase& other) override { *this = dynamic_cast(other); } + + virtual bool isValid() const override { return focal() > 0 && IntrinsicBase::isValid(); } + + virtual EINTRINSIC getType() const override { return EQUIDISTANT_CAMERA; } + std::string getTypeStr() const { return EINTRINSIC_enumToString(getType()); } + + double getFocalLengthPix() const { return _K(0,0); } + + Vec2 getPrincipalPoint() const { return Vec2(_K(0,2), _K(1,2)); } + + const Mat3& K() const { return _K; } + const Mat3& Kinv() const { return _Kinv; } + void setK(double focal_length_pix, double ppx, double ppy) + { + _K << focal_length_pix, 0., ppx, 0., focal_length_pix, ppy, 0., 0., 1.; + _Kinv = _K.inverse(); + } + void setK(const Mat3 &K) { _K = K;} + /// Return the value of the focal in pixels + inline double focal() const {return _K(0,0);} + inline Vec2 principal_point() const {return Vec2(_K(0,2), _K(1,2));} + + // Get bearing vector of p point (image coord) + Vec3 operator () (const Vec2& p) const override + { + Vec3 p3(p(0),p(1),1.0); + return (_Kinv * p3).normalized(); + } + + // Transform a point from the camera plane to the image plane + Vec2 cam2ima(const Vec2& p) const override + { + return focal() * p + principal_point(); + } + + // Transform a point from the image plane to the camera plane + Vec2 ima2cam(const Vec2& p) const override + { + return ( p - principal_point() ) / focal(); + } + + virtual bool have_disto() const override { return false; } + + virtual Vec2 add_disto(const Vec2& p) const override { return p; } + + virtual Vec2 remove_disto(const Vec2& p) const override { return p; } + + virtual double imagePlane_toCameraPlaneError(double value) const override + { + return value / focal(); + } + + virtual Mat34 get_projective_equivalent(const geometry::Pose3 & pose) const override + { + Mat34 P; + P_From_KRt(K(), pose.rotation(), pose.translation(), &P); + return P; + } + + // Data wrapper for non linear optimization (get data) + std::vector getParams() const override + { + std::vector params = {_K(0,0), _K(0,2), _K(1,2)}; + params.insert(params.end(), _distortionParams.begin(), _distortionParams.end()); + return params; + } + + bool hasDistortion() const override + { + for(double d: _distortionParams) + if(d != 0.0) + return true; + return false; + } + + const std::vector& getDistortionParams() const + { + return _distortionParams; + } + + void setDistortionParams(const std::vector& distortionParams) + { + if(distortionParams.size() != _distortionParams.size()) + { + std::stringstream s; + s << "Pinhole::setDistortionParams: wrong number of distortion parameters (expected: " << _distortionParams.size() << ", given:" << distortionParams.size() << ")."; + throw std::runtime_error(s.str()); + } + _distortionParams = distortionParams; + } + + // Data wrapper for non linear optimization (update from data) + bool updateFromParams(const std::vector& params) override + { + if (params.size() != (3 + _distortionParams.size())) + return false; + + this->setK(params[0], params[1], params[2]); + setDistortionParams({params.begin() + 3, params.end()}); + + return true; + } + + /** + * @brief Return true if this ray should be visible in the image + * @return true if this ray is visible theorically + */ + virtual bool isVisibleRay(const Vec3 & ray) const override { + + if (ray(2) < 0) { + return false; + } + + return true; + } + + /// Return the un-distorted pixel (with removed distortion) + virtual Vec2 get_ud_pixel(const Vec2& p) const override {return p;} + + /// Return the distorted pixel (with added distortion) + virtual Vec2 get_d_pixel(const Vec2& p) const override {return p;} + +private: + // Focal & principal point are embed into the calibration matrix K + Mat3 _K, _Kinv; +protected: + std::vector _distortionParams; +}; + +} // namespace camera +} // namespace aliceVision From f927ee3336cc9db21b2264b5cb0fe680b3b485d4 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Mon, 30 Sep 2019 08:27:03 +0200 Subject: [PATCH 058/174] Reader for ninja xml file --- .vscode/c_cpp_properties.json | 16 ++ src/software/pipeline/CMakeLists.txt | 11 ++ .../pipeline/main_panoramaExternalInfo.cpp | 186 ++++++++++++++++++ 3 files changed, 213 insertions(+) create mode 100644 .vscode/c_cpp_properties.json create mode 100644 src/software/pipeline/main_panoramaExternalInfo.cpp diff --git a/.vscode/c_cpp_properties.json b/.vscode/c_cpp_properties.json new file mode 100644 index 0000000000..836216394a --- /dev/null +++ b/.vscode/c_cpp_properties.json @@ -0,0 +1,16 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "intelliSenseMode": "clang-x64", + "configurationProvider": "vector-of-bool.cmake-tools", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + } + ], + "version": 4 +} \ No newline at end of file diff --git a/src/software/pipeline/CMakeLists.txt b/src/software/pipeline/CMakeLists.txt index fa7de3cf89..05cac968e4 100644 --- a/src/software/pipeline/CMakeLists.txt +++ b/src/software/pipeline/CMakeLists.txt @@ -111,6 +111,17 @@ if(ALICEVISION_BUILD_SFM) aliceVision_sfmDataIO ${Boost_LIBRARIES} ) + alicevision_add_software(aliceVision_panoramaExternalInfo + SOURCE main_panoramaExternalInfo.cpp + FOLDER ${FOLDER_SOFTWARE_PIPELINE} + LINKS aliceVision_system + aliceVision_image + aliceVision_feature + aliceVision_sfm + aliceVision_sfmData + aliceVision_sfmDataIO + ${Boost_LIBRARIES} + ) # Compute structure from known camera poses alicevision_add_software(aliceVision_computeStructureFromKnownPoses diff --git a/src/software/pipeline/main_panoramaExternalInfo.cpp b/src/software/pipeline/main_panoramaExternalInfo.cpp new file mode 100644 index 0000000000..d23bf65635 --- /dev/null +++ b/src/software/pipeline/main_panoramaExternalInfo.cpp @@ -0,0 +1,186 @@ +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +// These constants define the current software version. +// They must be updated when the command line is changed. +#define ALICEVISION_SOFTWARE_VERSION_MAJOR 2 +#define ALICEVISION_SOFTWARE_VERSION_MINOR 0 + +using namespace aliceVision; +namespace po = boost::program_options; +namespace fs = boost::filesystem; +namespace pt = boost::property_tree; + +int main(int argc, char * argv[]) { + std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); + std::string externalInfoFilename = ""; + std::string sfmInputDataFilename = ""; + std::string sfmOutputDataFilename = ""; + + /***** + * DESCRIBE COMMAND LINE PARAMETERS + */ + po::options_description allParams( + "Parse external information about cameras used in a panorama.\n" + "AliceVision PanoramaExternalInfo"); + + po::options_description requiredParams("Required parameters"); + requiredParams.add_options() + ("config,c", po::value(&externalInfoFilename)->required(), "External info xml file.") + ("input,i", po::value(&sfmInputDataFilename)->required(), "SfMData file input.") + ("output,o", po::value(&sfmOutputDataFilename)->required(), "SfMData file output.") + ; + + po::options_description optionalParams("Optional parameters"); + + po::options_description logParams("Log parameters"); + logParams.add_options() + ("verboseLevel,v", po::value(&verboseLevel)->default_value(verboseLevel), + "verbosity level (fatal, error, warning, info, debug, trace)."); + + allParams.add(requiredParams).add(optionalParams).add(logParams); + + + /** + * READ COMMAND LINE + */ + po::variables_map vm; + try + { + po::store(po::parse_command_line(argc, argv, allParams), vm); + + if(vm.count("help") || (argc == 1)) + { + ALICEVISION_COUT(allParams); + return EXIT_SUCCESS; + } + po::notify(vm); + } + catch(boost::program_options::required_option& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + catch(boost::program_options::error& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + + ALICEVISION_COUT("Program called with the following parameters:"); + ALICEVISION_COUT(vm); + + /** + * set verbose level + **/ + system::Logger::get()->setLogLevel(verboseLevel); + + + pt::ptree tree; + + try { + pt::read_xml(externalInfoFilename, tree); + } + catch (...) { + ALICEVISION_CERR("Error parsing input file"); + } + + pt::ptree lens = tree.get_child("papywizard.header.lens"); + pt::ptree shoot = tree.get_child("papywizard.shoot"); + + std::string lensType = lens.get(".type"); + double lensFocal = lens.get("focal"); + + /*Make sure we control everything for debug purpose*/ + if (lensType != "rectilinear") { + std::cout << "Lens type is unusual !" << std::endl; + return EXIT_FAILURE; + } + + std::map rotations; + + for (auto it : shoot) { + + int id = it.second.get(".id"); + int bracket = it.second.get(".bracket"); + + if (rotations.find(id) != rotations.end()) { + std::cout << "Multiple information with a same id !" << std::endl; + return EXIT_FAILURE; + } + + double yaw_degree = it.second.get("position..yaw"); + double pitch_degree = it.second.get("position..pitch"); + double roll_degree = it.second.get("position..roll"); + + double yaw = degreeToRadian(yaw_degree); + double pitch = degreeToRadian(pitch_degree); + double roll = degreeToRadian(roll_degree); + + Eigen::AngleAxis Myaw(yaw, Eigen::Vector3d::UnitZ()); + Eigen::AngleAxis Mpitch(pitch, Eigen::Vector3d::UnitY()); + Eigen::AngleAxis Mroll(roll, Eigen::Vector3d::UnitX()); + + Eigen::Matrix3d R = Myaw.toRotationMatrix() * Mpitch.toRotationMatrix() * Mroll.toRotationMatrix(); + + rotations[id] = R; + } + + std::vector> names_with_id; + + /** + * Update sfm accordingly + */ + sfmData::SfMData sfmData; + if(!sfmDataIO::Load(sfmData, sfmInputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS|sfmDataIO::EXTRINSICS))) + { + ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmInputDataFilename << "' cannot be read."); + return EXIT_FAILURE; + } + + if (sfmData.getViews().size() != rotations.size()) { + ALICEVISION_LOG_ERROR("The input SfMData has an incorrect number of views."); + return EXIT_FAILURE; + } + + /** + * HEURISTIC : + * The xml file describe rotations for view ids which are not correlated with Alicevision view id + * Let assume that the order of xml views ids is the lexicographic order of the image names. + */ + for (auto v : sfmData.getViews()) { + names_with_id.push_back(std::make_pair(v.second->getImagePath(), v.first)); + } + std::sort(names_with_id.begin(), names_with_id.end()); + + size_t index = 0; + for (auto &item_rotation: rotations) { + + IndexT viewIdx = names_with_id[index].second; + + sfmData::CameraPose pose(geometry::Pose3(item_rotation.second, Eigen::Vector3d::Zero())); + + sfmData.setAbsolutePose(viewIdx, pose); + + index++; + } + + + if (!sfmDataIO::Save(sfmData, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS | sfmDataIO::INTRINSICS | sfmDataIO::EXTRINSICS))) { + ALICEVISION_LOG_ERROR("The output SfMData file '" << sfmOutputDataFilename << "' cannot be write."); + return EXIT_FAILURE; + } + + return 0; +} \ No newline at end of file From db13568fb0df4258c7a77c0e3995af07b27d1bd5 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 1 Oct 2019 08:44:37 +0200 Subject: [PATCH 059/174] prior on rotation --- .../ReconstructionEngine_panorama.cpp | 63 ++++++++++--------- .../pipeline/main_panoramaEstimation.cpp | 2 +- .../pipeline/main_panoramaExternalInfo.cpp | 37 ++++++++--- .../pipeline/main_panoramaStitching.cpp | 12 ++++ 4 files changed, 75 insertions(+), 39 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 8719000725..d9222f8b58 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -434,8 +434,7 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging // List shared correspondences (pairs) between poses PoseWiseMatches poseWiseMatches; - for (matching::PairwiseMatches::const_iterator iterMatches = _pairwiseMatches->begin(); - iterMatches != _pairwiseMatches->end(); ++iterMatches) + for (matching::PairwiseMatches::const_iterator iterMatches = _pairwiseMatches->begin(); iterMatches != _pairwiseMatches->end(); ++iterMatches) { const Pair pair = iterMatches->first; const View* v1 = _sfmData.getViews().at(pair.first).get(); @@ -446,6 +445,7 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging sfm::Constraints2D & constraints2d = _sfmData.getConstraints2D(); ALICEVISION_LOG_INFO("Relative pose computation:"); + /*For each pair of views, compute the relative pose*/ for (int i = 0; i < poseWiseMatches.size(); ++i) { { @@ -469,19 +469,18 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging } const Pair pairIterator = *(match_pairs.begin()); - const IndexT I = pairIterator.first; const IndexT J = pairIterator.second; - const View* view_I = _sfmData.views[I].get(); const View* view_J = _sfmData.views[J].get(); - // Check that valid cameras are existing for the pair of view - if (_sfmData.getIntrinsics().count(view_I->getIntrinsicId()) == 0 || - _sfmData.getIntrinsics().count(view_J->getIntrinsicId()) == 0) + //Check that valid cameras are existing for the pair of view + if (_sfmData.getIntrinsics().count(view_I->getIntrinsicId()) == 0 || _sfmData.getIntrinsics().count(view_J->getIntrinsicId()) == 0) { continue; + } - // Setup corresponding bearing vector + + /* Build a list of pairs in meters*/ const matching::MatchesPerDescType & matchesPerDesc = _pairwiseMatches->at(pairIterator); const std::size_t nbBearing = matchesPerDesc.getNbAllMatches(); std::size_t iBearing = 0; @@ -506,10 +505,7 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging RelativePoseInfo relativePose_info; // Compute max authorized error as geometric mean of camera plane tolerated residual error - relativePose_info.initial_residual_tolerance = std::pow( - cam_I->imagePlane_toCameraPlaneError(2.5) * - cam_J->imagePlane_toCameraPlaneError(2.5), - 1./2.); + relativePose_info.initial_residual_tolerance = std::pow(cam_I->imagePlane_toCameraPlaneError(2.5) * cam_J->imagePlane_toCameraPlaneError(2.5), 1./2.); // Since we use normalized features, we will use unit image size and intrinsic matrix: const std::pair imageSize(1., 1.); @@ -529,15 +525,14 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging case RELATIVE_ROTATION_FROM_H: { RelativeRotationInfo relativeRotation_info; - relativeRotation_info._initialResidualTolerance = std::pow( - cam_I->imagePlane_toCameraPlaneError(2.5) * - cam_J->imagePlane_toCameraPlaneError(2.5), - 1./2.); + relativeRotation_info._initialResidualTolerance = std::pow(cam_I->imagePlane_toCameraPlaneError(2.5) * cam_J->imagePlane_toCameraPlaneError(2.5), 1./2.); + if(!robustRelativeRotation_fromH(K, K, x1, x2, imageSize, imageSize, relativeRotation_info)) { ALICEVISION_LOG_INFO("Relative pose computation: i: " << i << ", (" << I << ", " << J <<") => FAILED"); continue; } + relativePose_info.relativePose = geometry::Pose3(relativeRotation_info._relativeRotation, Vec3::Zero()); relativePose_info.initial_residual_tolerance = relativeRotation_info._initialResidualTolerance; relativePose_info.found_residual_precision = relativeRotation_info._foundResidualPrecision; @@ -550,10 +545,27 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging } ALICEVISION_LOG_INFO("Relative pose computation: i: " << i << ", (" << I << ", " << J <<") => SUCCESS"); - ALICEVISION_LOG_INFO("Nb inliers: " << relativePose_info.vec_inliers.size() - << ", initial_residual_tolerance: " << relativePose_info.initial_residual_tolerance - << ", found_residual_precision: " << relativePose_info.found_residual_precision); + ALICEVISION_LOG_INFO("Nb inliers: " << relativePose_info.vec_inliers.size() << ", initial_residual_tolerance: " << relativePose_info.initial_residual_tolerance << ", found_residual_precision: " << relativePose_info.found_residual_precision); + + /* + If an existing prior on rotation exists, then make sure the found detected rotation is not stupid + */ + if (_sfmData.isPoseAndIntrinsicDefined(view_I) && _sfmData.isPoseAndIntrinsicDefined(view_J)) { + CameraPose iTo = _sfmData.getAbsolutePose(view_I->getPoseId()); + CameraPose jTo = _sfmData.getAbsolutePose(view_J->getPoseId()); + Eigen::Matrix3d iRo = iTo.getTransform().rotation(); + Eigen::Matrix3d jRo = jTo.getTransform().rotation(); + Eigen::Matrix3d jRi = jRo * iRo.transpose(); + + Eigen::Matrix3d jRi_est = relativePose_info.relativePose.rotation(); + + Eigen::AngleAxisd checker; + checker.fromRotationMatrix(jRi_est * jRi.transpose()); + if (std::abs(radianToDegree(checker.angle())) > 5) { + continue; + } + } /*Sort all inliers by increasing ids*/ std::sort(relativePose_info.vec_inliers.begin(), relativePose_info.vec_inliers.end()); @@ -587,24 +599,19 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging { // Add the relative rotation to the relative 'rotation' pose graph using namespace aliceVision::rotationAveraging; - vec_relatives_R.emplace_back( - relative_pose_pair.first, relative_pose_pair.second, - relativePose_info.relativePose.rotation(), relativePose_info.vec_inliers.size()); + vec_relatives_R.emplace_back(relative_pose_pair.first, relative_pose_pair.second, relativePose_info.relativePose.rotation(), relativePose_info.vec_inliers.size()); } } } // for all relative pose + /*Debug result*/ ALICEVISION_LOG_DEBUG("Compute_Relative_Rotations: vec_relatives_R.size(): " << vec_relatives_R.size()); - for(rotationAveraging::RelativeRotation& rotation: vec_relatives_R) { - ALICEVISION_LOG_DEBUG("Relative_Rotation:\n" - << "i: " << rotation.i << ", j: " << rotation.j << ", weight: " << rotation.weight << "\n" - << "Rij" << rotation.Rij - ); + ALICEVISION_LOG_DEBUG("Relative_Rotation:\n" << "i: " << rotation.i << ", j: " << rotation.j << ", weight: " << rotation.weight << "\n" << "Rij" << rotation.Rij); } - // Re-weight rotation in [0,1] + /* Re-weight rotation in [0,1] : Does it do anything ??? */ if (vec_relatives_R.size() > 1) { std::vector vec_count; diff --git a/src/software/pipeline/main_panoramaEstimation.cpp b/src/software/pipeline/main_panoramaEstimation.cpp index 900178c39a..25fb8f91f9 100644 --- a/src/software/pipeline/main_panoramaEstimation.cpp +++ b/src/software/pipeline/main_panoramaEstimation.cpp @@ -150,7 +150,7 @@ int main(int argc, char **argv) // load input SfMData scene sfmData::SfMData inputSfmData; - if(!sfmDataIO::Load(inputSfmData, sfmDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) + if(!sfmDataIO::Load(inputSfmData, sfmDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS|sfmDataIO::EXTRINSICS))) { ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmDataFilename << "' cannot be read."); return EXIT_FAILURE; diff --git a/src/software/pipeline/main_panoramaExternalInfo.cpp b/src/software/pipeline/main_panoramaExternalInfo.cpp index d23bf65635..50a68f57fd 100644 --- a/src/software/pipeline/main_panoramaExternalInfo.cpp +++ b/src/software/pipeline/main_panoramaExternalInfo.cpp @@ -37,7 +37,7 @@ int main(int argc, char * argv[]) { requiredParams.add_options() ("config,c", po::value(&externalInfoFilename)->required(), "External info xml file.") ("input,i", po::value(&sfmInputDataFilename)->required(), "SfMData file input.") - ("output,o", po::value(&sfmOutputDataFilename)->required(), "SfMData file output.") + ("outSfMDataFilename,o", po::value(&sfmOutputDataFilename)->required(), "SfMData file output.") ; po::options_description optionalParams("Optional parameters"); @@ -120,24 +120,38 @@ int main(int argc, char * argv[]) { return EXIT_FAILURE; } + + + + + Eigen::AngleAxis Fyaw(0, Eigen::Vector3d::UnitY()); + Eigen::AngleAxis Fpitch(M_PI_2, Eigen::Vector3d::UnitX()); + Eigen::AngleAxis Froll(M_PI_2, Eigen::Vector3d::UnitZ()); + Eigen::Matrix3d oRi = Fyaw.toRotationMatrix() * Fpitch.toRotationMatrix() * Froll.toRotationMatrix(); + double yaw_degree = it.second.get("position..yaw"); double pitch_degree = it.second.get("position..pitch"); double roll_degree = it.second.get("position..roll"); - double yaw = degreeToRadian(yaw_degree); double pitch = degreeToRadian(pitch_degree); double roll = degreeToRadian(roll_degree); - Eigen::AngleAxis Myaw(yaw, Eigen::Vector3d::UnitZ()); - Eigen::AngleAxis Mpitch(pitch, Eigen::Vector3d::UnitY()); - Eigen::AngleAxis Mroll(roll, Eigen::Vector3d::UnitX()); + Eigen::AngleAxis Myaw(yaw, Eigen::Vector3d::UnitY()); + Eigen::AngleAxis Mpitch(pitch, Eigen::Vector3d::UnitX()); + Eigen::AngleAxis Mroll(roll, Eigen::Vector3d::UnitZ()); + + Eigen::AngleAxis Mimage(M_PI, Eigen::Vector3d::UnitZ()); + + Eigen::Matrix3d cRo = Mimage.toRotationMatrix() * Mroll.toRotationMatrix() * Mpitch.toRotationMatrix() * Myaw.toRotationMatrix(); + Eigen::Matrix3d iRo = Mimage.toRotationMatrix() * Froll.toRotationMatrix() * Fpitch.toRotationMatrix() * Fyaw.toRotationMatrix(); + Eigen::Matrix3d cRi = cRo * iRo.transpose(); - Eigen::Matrix3d R = Myaw.toRotationMatrix() * Mpitch.toRotationMatrix() * Mroll.toRotationMatrix(); - rotations[id] = R; + + rotations[id] = cRi; } - std::vector> names_with_id; + /** * Update sfm accordingly @@ -159,6 +173,7 @@ int main(int argc, char * argv[]) { * The xml file describe rotations for view ids which are not correlated with Alicevision view id * Let assume that the order of xml views ids is the lexicographic order of the image names. */ + std::vector> names_with_id; for (auto v : sfmData.getViews()) { names_with_id.push_back(std::make_pair(v.second->getImagePath(), v.first)); } @@ -169,9 +184,11 @@ int main(int argc, char * argv[]) { IndexT viewIdx = names_with_id[index].second; - sfmData::CameraPose pose(geometry::Pose3(item_rotation.second, Eigen::Vector3d::Zero())); - sfmData.setAbsolutePose(viewIdx, pose); + if (item_rotation.second.trace() != 0) { + sfmData::CameraPose pose(geometry::Pose3 (item_rotation.second, Eigen::Vector3d::Zero())); + sfmData.setAbsolutePose(viewIdx, pose); + } index++; } diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 60ecbf16c0..461dec0205 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -280,6 +280,7 @@ int main(int argc, char **argv) panoramaSize.first *= scaleFactor; panoramaSize.second *= scaleFactor; + ALICEVISION_LOG_INFO("Output panorama size: " << panoramaSize.first << ", " << panoramaSize.second); // Create panorama buffer @@ -288,7 +289,13 @@ int main(int argc, char **argv) int imageIndex = 0; for(auto& viewIt: sfmData.getViews()) { + IndexT viewId = viewIt.first; + + if (!sfmData.isPoseAndIntrinsicDefined(viewId)) { + continue; + } + const sfmData::View& view = *viewIt.second.get(); if(!sfmData.isPoseAndIntrinsicDefined(&view)) continue; @@ -306,15 +313,20 @@ int main(int argc, char **argv) } } ++imageIndex; + + const sfmData::CameraPose camPose = sfmData.getPose(view); const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); std::string imagePath = view.getImagePath(); + // Image RGB image::Image imageIn; + + ALICEVISION_LOG_INFO("Reading " << imagePath); image::readImage(imagePath, imageIn, image::EImageColorSpace::LINEAR); ALICEVISION_LOG_INFO(" - " << imageIn.Width() << "x" << imageIn.Height()); From 8517dd41fcd44fad78d348d87a37095747ddbf1c Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 1 Oct 2019 13:57:15 +0200 Subject: [PATCH 060/174] amman working --- .../ReconstructionEngine_panorama.cpp | 162 ++++++++++++------ 1 file changed, 107 insertions(+), 55 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index d9222f8b58..15ee1aa44a 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -184,8 +184,8 @@ bool robustRelativeRotation_fromH(const Mat3 &K1, } relativeRotationInfo._relativeRotation = decomposePureRotationHomography(relativeRotationInfo._homography, K1, K2); - ALICEVISION_LOG_INFO("Found homography H:\n" << relativeRotationInfo._homography); - ALICEVISION_LOG_INFO("Homography H decomposes to rotation R:\n" << relativeRotationInfo._relativeRotation); + //ALICEVISION_LOG_INFO("Found homography H:\n" << relativeRotationInfo._homography); + //ALICEVISION_LOG_INFO("Homography H decomposes to rotation R:\n" << relativeRotationInfo._relativeRotation); return true; } @@ -329,31 +329,62 @@ bool ReconstructionEngine_panorama::process() } /// Compute from relative rotations the global rotations of the camera poses -bool ReconstructionEngine_panorama::Compute_Global_Rotations(const rotationAveraging::RelativeRotations& relatives_R, - HashMap& global_rotations) +bool ReconstructionEngine_panorama::Compute_Global_Rotations(const rotationAveraging::RelativeRotations& relatives_R, HashMap& global_rotations) { - if(relatives_R.empty()) + if(relatives_R.empty()) { return false; - // Log statistics about the relative rotation graph + } + + rotationAveraging::RelativeRotations local_relatives_R = relatives_R; + + + std::set set_pose_ids; + for (const auto & relative_R : local_relatives_R) { - std::set set_pose_ids; - for (const auto & relative_R : relatives_R) - { - set_pose_ids.insert(relative_R.i); - set_pose_ids.insert(relative_R.j); - } + set_pose_ids.insert(relative_R.i); + set_pose_ids.insert(relative_R.j); + } - ALICEVISION_LOG_DEBUG("Global rotations computation: " << "\n" - "\t- relative rotations: " << relatives_R.size() << "\n" + ALICEVISION_LOG_INFO("Global rotations computation: " << "\n" + "\t- relative rotations: " << relatives_R.size() << "\n" "\t- global rotations: " << set_pose_ids.size()); + + /* + If a view with a pose prior is not found in the relative rotation, + make sure we add a fake link to adjust globally everything. + */ + size_t count = 0; + sfmData::Poses & poses = _sfmData.getPoses(); + if (poses.size() > 0) { + + IndexT firstViewId = *set_pose_ids.begin(); + IndexT firstPoseId = _sfmData.getView(firstViewId).getPoseId(); + + Eigen::Matrix3d i1Ro = poses[firstPoseId].getTransform().rotation(); + + for (auto & currentPose : _sfmData.getPoses()) { + + IndexT poseId = currentPose.first; + if (set_pose_ids.find(poseId) == set_pose_ids.end()) { + + set_pose_ids.insert(poseId); + + /*Add a fake relative pose between this pose and the first found pose*/ + Eigen::Matrix3d iRo = currentPose.second.getTransform().rotation(); + Eigen::Matrix3d iR1 = iRo * i1Ro.transpose(); + local_relatives_R.emplace_back(firstPoseId, currentPose.first, iR1, 1.0); + } + } } + + // Global Rotation solver: const ERelativeRotationInferenceMethod eRelativeRotationInferenceMethod = TRIPLET_ROTATION_INFERENCE_NONE; // TRIPLET_ROTATION_INFERENCE_COMPOSITION_ERROR; GlobalSfMRotationAveragingSolver rotationAveraging_solver; //-- Rejection triplet that are 'not' identity rotation (error to identity > 50°) - const bool b_rotationAveraging = rotationAveraging_solver.Run(_eRotationAveragingMethod, eRelativeRotationInferenceMethod, relatives_R, 100.0, global_rotations); + const bool b_rotationAveraging = rotationAveraging_solver.Run(_eRotationAveragingMethod, eRelativeRotationInferenceMethod, local_relatives_R, 100.0, global_rotations); ALICEVISION_LOG_DEBUG("Found #global_rotations: " << global_rotations.size()); @@ -443,6 +474,7 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging } sfm::Constraints2D & constraints2d = _sfmData.getConstraints2D(); + std::map connection_size; ALICEVISION_LOG_INFO("Relative pose computation:"); /*For each pair of views, compute the relative pose*/ @@ -544,13 +576,14 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging "Unknown relative rotation method: " << ERelativeRotationMethod_enumToString(_eRelativeRotationMethod)); } - ALICEVISION_LOG_INFO("Relative pose computation: i: " << i << ", (" << I << ", " << J <<") => SUCCESS"); - ALICEVISION_LOG_INFO("Nb inliers: " << relativePose_info.vec_inliers.size() << ", initial_residual_tolerance: " << relativePose_info.initial_residual_tolerance << ", found_residual_precision: " << relativePose_info.found_residual_precision); + //ALICEVISION_LOG_INFO("Relative pose computation: i: " << i << ", (" << I << ", " << J <<") => SUCCESS"); + //ALICEVISION_LOG_INFO("Nb inliers: " << relativePose_info.vec_inliers.size() << ", initial_residual_tolerance: " << relativePose_info.initial_residual_tolerance << ", found_residual_precision: " << relativePose_info.found_residual_precision); /* If an existing prior on rotation exists, then make sure the found detected rotation is not stupid */ + double weight = relativePose_info.vec_inliers.size(); if (_sfmData.isPoseAndIntrinsicDefined(view_I) && _sfmData.isPoseAndIntrinsicDefined(view_J)) { CameraPose iTo = _sfmData.getAbsolutePose(view_I->getPoseId()); CameraPose jTo = _sfmData.getAbsolutePose(view_J->getPoseId()); @@ -563,35 +596,49 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging Eigen::AngleAxisd checker; checker.fromRotationMatrix(jRi_est * jRi.transpose()); if (std::abs(radianToDegree(checker.angle())) > 5) { - continue; + relativePose_info.relativePose = geometry::Pose3(jRi, Vec3::Zero()); + relativePose_info.vec_inliers.clear(); + weight = 1.0; } } - /*Sort all inliers by increasing ids*/ - std::sort(relativePose_info.vec_inliers.begin(), relativePose_info.vec_inliers.end()); + /*Add connection to find best constraints*/ + if (connection_size.find(I) == connection_size.end()) { + connection_size[I] = 0; + } + connection_size[I] += relativePose_info.vec_inliers.size(); + if (connection_size.find(J) == connection_size.end()) { + connection_size[J] = 0; + } + connection_size[J] += relativePose_info.vec_inliers.size(); - size_t index = 0; - size_t index_inlier = 0; - for(const auto& matchesPerDescIt: matchesPerDesc) - { - const feature::EImageDescriberType descType = matchesPerDescIt.first; - const matching::IndMatches & matches = matchesPerDescIt.second; + /*Sort all inliers by increasing ids*/ + if (relativePose_info.vec_inliers.size() > 0) { + std::sort(relativePose_info.vec_inliers.begin(), relativePose_info.vec_inliers.end()); - for (const auto & match : matches) + size_t index = 0; + size_t index_inlier = 0; + for(const auto& matchesPerDescIt: matchesPerDesc) { - size_t next_inlier = relativePose_info.vec_inliers[index_inlier]; - if (index == next_inlier) { - - Vec2 pt1 = _featuresPerView->getFeatures(I, descType)[match._i].coords().cast(); - Vec2 pt2 = _featuresPerView->getFeatures(J, descType)[match._j].coords().cast(); + const feature::EImageDescriberType descType = matchesPerDescIt.first; + const matching::IndMatches & matches = matchesPerDescIt.second; - sfm::Constraint2D constraint(I, sfm::Observation(pt1, 0), J, sfm::Observation(pt2, 0)); - constraints2d.push_back(constraint); + for (const auto & match : matches) + { + size_t next_inlier = relativePose_info.vec_inliers[index_inlier]; + if (index == next_inlier) { + + Vec2 pt1 = _featuresPerView->getFeatures(I, descType)[match._i].coords().cast(); + Vec2 pt2 = _featuresPerView->getFeatures(J, descType)[match._j].coords().cast(); - index_inlier++; - } + sfm::Constraint2D constraint(I, sfm::Observation(pt1, 0), J, sfm::Observation(pt2, 0)); + constraints2d.push_back(constraint); + + index_inlier++; + } - index++; + index++; + } } } @@ -599,11 +646,33 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging { // Add the relative rotation to the relative 'rotation' pose graph using namespace aliceVision::rotationAveraging; - vec_relatives_R.emplace_back(relative_pose_pair.first, relative_pose_pair.second, relativePose_info.relativePose.rotation(), relativePose_info.vec_inliers.size()); + vec_relatives_R.emplace_back(relative_pose_pair.first, relative_pose_pair.second, relativePose_info.relativePose.rotation(), 1.0); } } } // for all relative pose + /*Find best connection with pose prior*/ + size_t max_val = 0; + IndexT max_index = UndefinedIndexT; + for (auto & item : connection_size) { + if (_sfmData.isPoseAndIntrinsicDefined(item.first)) { + if (item.second > max_val) { + max_index = item.first; + max_val = item.second; + } + } + } + + /*If a best view is defined, lock it*/ + sfmData::Poses & poses = _sfmData.getPoses(); + if (max_index != UndefinedIndexT) { + sfmData::View & v = _sfmData.getView(max_index); + IndexT poseid = v.getPoseId(); + if (poseid != UndefinedIndexT) { + poses[v.getPoseId()].lock(); + } + } + /*Debug result*/ ALICEVISION_LOG_DEBUG("Compute_Relative_Rotations: vec_relatives_R.size(): " << vec_relatives_R.size()); for(rotationAveraging::RelativeRotation& rotation: vec_relatives_R) @@ -611,23 +680,6 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging ALICEVISION_LOG_DEBUG("Relative_Rotation:\n" << "i: " << rotation.i << ", j: " << rotation.j << ", weight: " << rotation.weight << "\n" << "Rij" << rotation.Rij); } - /* Re-weight rotation in [0,1] : Does it do anything ??? */ - if (vec_relatives_R.size() > 1) - { - std::vector vec_count; - vec_count.reserve(vec_relatives_R.size()); - for(const auto & relative_rotation_info : vec_relatives_R) - { - vec_count.push_back(relative_rotation_info.weight); - } - std::partial_sort(vec_count.begin(), vec_count.begin() + vec_count.size() / 2, vec_count.end()); - // const float thTrustPair = vec_count[vec_count.size() / 2]; - for(auto & relative_rotation_info : vec_relatives_R) - { - relative_rotation_info.weight = std::min(relative_rotation_info.weight, 1.f); - } - } - // Log input graph to the HTML report if (!_loggingFile.empty() && !_outputFolder.empty()) { From e999483481587d9ca1e53ff922f6e7aabaaadbb6 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Wed, 2 Oct 2019 10:51:50 +0200 Subject: [PATCH 061/174] corrections for xml read --- .../pipeline/main_panoramaExternalInfo.cpp | 25 ++++++------------- 1 file changed, 7 insertions(+), 18 deletions(-) diff --git a/src/software/pipeline/main_panoramaExternalInfo.cpp b/src/software/pipeline/main_panoramaExternalInfo.cpp index 50a68f57fd..e73e512401 100644 --- a/src/software/pipeline/main_panoramaExternalInfo.cpp +++ b/src/software/pipeline/main_panoramaExternalInfo.cpp @@ -108,6 +108,8 @@ int main(int argc, char * argv[]) { return EXIT_FAILURE; } + + std::map rotations; for (auto it : shoot) { @@ -120,18 +122,10 @@ int main(int argc, char * argv[]) { return EXIT_FAILURE; } - - - - - Eigen::AngleAxis Fyaw(0, Eigen::Vector3d::UnitY()); - Eigen::AngleAxis Fpitch(M_PI_2, Eigen::Vector3d::UnitX()); - Eigen::AngleAxis Froll(M_PI_2, Eigen::Vector3d::UnitZ()); - Eigen::Matrix3d oRi = Fyaw.toRotationMatrix() * Fpitch.toRotationMatrix() * Froll.toRotationMatrix(); - double yaw_degree = it.second.get("position..yaw"); double pitch_degree = it.second.get("position..pitch"); double roll_degree = it.second.get("position..roll"); + double yaw = degreeToRadian(yaw_degree); double pitch = degreeToRadian(pitch_degree); double roll = degreeToRadian(roll_degree); @@ -139,20 +133,14 @@ int main(int argc, char * argv[]) { Eigen::AngleAxis Myaw(yaw, Eigen::Vector3d::UnitY()); Eigen::AngleAxis Mpitch(pitch, Eigen::Vector3d::UnitX()); Eigen::AngleAxis Mroll(roll, Eigen::Vector3d::UnitZ()); + Eigen::AngleAxis Mimage(-M_PI_2, Eigen::Vector3d::UnitZ()); - Eigen::AngleAxis Mimage(M_PI, Eigen::Vector3d::UnitZ()); + Eigen::Matrix3d cRo = Myaw.toRotationMatrix() * Mpitch.toRotationMatrix() * Mroll.toRotationMatrix() * Mimage.toRotationMatrix() ; - Eigen::Matrix3d cRo = Mimage.toRotationMatrix() * Mroll.toRotationMatrix() * Mpitch.toRotationMatrix() * Myaw.toRotationMatrix(); - Eigen::Matrix3d iRo = Mimage.toRotationMatrix() * Froll.toRotationMatrix() * Fpitch.toRotationMatrix() * Fyaw.toRotationMatrix(); - Eigen::Matrix3d cRi = cRo * iRo.transpose(); - - - - rotations[id] = cRi; + rotations[id] = cRo.transpose(); } - /** * Update sfm accordingly */ @@ -168,6 +156,7 @@ int main(int argc, char * argv[]) { return EXIT_FAILURE; } + /** * HEURISTIC : * The xml file describe rotations for view ids which are not correlated with Alicevision view id From 1ed1753a9cc029bd9efacd0a0c08d6ffa53d2eaf Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Thu, 3 Oct 2019 08:08:56 +0200 Subject: [PATCH 062/174] Sign error in spherical mapping from equirectangular --- src/software/pipeline/main_panoramaStitching.cpp | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 461dec0205..53fe4e340e 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -45,13 +45,10 @@ float sigmoid(float x, float sigwidth, float sigMid) */ namespace SphericalMapping { - Vec3 get3DPoint(const Vec2& pos2d, int width, int height) + Vec3 fromEquirectangular(const Vec2& pos2d, int width, int height) { - const double x = pos2d(0) - width; - const double y = height/2.0 - pos2d(1); - - const double longitude = M_PI * 2.0 * x / width; // between -PI and PI - const double latitude = M_PI * y / height; // between -PI/2 and PI/2 + const double latitude = (pos2d(1) / double(height)) * M_PI - M_PI_2; + const double longitude = ((pos2d(0) / double(width)) * 2.0 * M_PI) - M_PI; const double Px = cos(latitude) * sin(longitude); const double Py = sin(latitude); @@ -88,7 +85,6 @@ bool estimate_panorama_size(const sfmData::SfMData & sfmData, float fisheyeMaski const sfmData::CameraPose camPose = sfmData.getPose(view); const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - const float maxRadius = std::min(intrinsic.w(), intrinsic.h()) * 0.5f * (1.0 - fisheyeMaskingMargin); const Vec2i center(intrinsic.w()/2.f, intrinsic.h()/2.f); @@ -99,7 +95,7 @@ bool estimate_panorama_size(const sfmData::SfMData & sfmData, float fisheyeMaski { BufferCoords(y, x).a() = 0.0; // equirectangular to unit vector - Vec3 ray = SphericalMapping::get3DPoint(Vec2(x,y), panoramaSize.first, panoramaSize.second); + Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), panoramaSize.first, panoramaSize.second); if(camPose.getTransform().depth(ray) < 0) { @@ -343,7 +339,7 @@ int main(int argc, char **argv) for(int x = 0; x < imageOut.Width(); ++x) { // equirectangular to unit vector - Vec3 ray = SphericalMapping::get3DPoint(Vec2(x,y), imageOut.Width(), imageOut.Height()); + Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), imageOut.Width(), imageOut.Height()); //Check that this ray should be visible Vec3 transformedRay = camPose.getTransform()(ray); From a1c832ed077599aa050a0a66e383807c0defa443 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Fri, 4 Oct 2019 15:34:21 +0200 Subject: [PATCH 063/174] Currently applying laplacian blending (WIP) --- .../pipeline/main_panoramaStitching.cpp | 356 ++++++++++++++++-- 1 file changed, 327 insertions(+), 29 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 53fe4e340e..13825aaf91 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -35,20 +35,238 @@ using namespace aliceVision; namespace po = boost::program_options; namespace fs = boost::filesystem; +class LaplacianPyramid { +public: + LaplacianPyramid(const size_t width_base, const size_t height_base) : + _width_base(width_base), + _height_base(height_base) + { + _kernel = oiio::ImageBufAlgo::make_kernel("gaussian", 5.0f, 5.0f); + + size_t min_dim = std::min(_width_base, _height_base); + size_t scales = static_cast(floor(log2(double(min_dim)))); + scales = 4; + + size_t new_width = _width_base; + size_t new_height = _height_base; + for (int i = 0; i < scales; i++) { + + _difference_buffers.push_back(image::Image(new_width, new_height)); + _work_buffers_1.push_back(image::Image(new_width, new_height)); + _work_buffers_2.push_back(image::Image(new_width, new_height)); + _work_buffers_3.push_back(image::Image(new_width, new_height)); + + new_height /= 2; + new_width /= 2; + } + } + + bool process(const image::Image & input) { + + + _work_buffers_1[0] = input; + + + /* + Loop over the levels. + 0 being the base level with full resolution + levels.size() - 1 being the least defined image + */ + for (int lvl = 0; lvl < _difference_buffers.size() - 1; lvl++) { + + oiio::ImageSpec spec_base(_difference_buffers[lvl].Width(), _difference_buffers[lvl].Height(), 4, oiio::TypeDesc::FLOAT); + oiio::ImageSpec spec_base_half(_difference_buffers[lvl + 1].Width(), _difference_buffers[lvl + 1].Height(), 4, oiio::TypeDesc::FLOAT); + + oiio::ImageBuf _difference_buffer_oiio(spec_base, (void*)(_difference_buffers[lvl].data())); + oiio::ImageBuf _work_buffer_1_oiio(spec_base, (void*)(_work_buffers_1[lvl].data())); + oiio::ImageBuf _work_buffer_2_oiio(spec_base, (void*)(_work_buffers_2[lvl].data())); + oiio::ImageBuf _work_buffer_3_oiio(spec_base, (void*)(_work_buffers_3[lvl].data())); + oiio::ImageBuf _difference_buffer_half_oiio(spec_base_half, (void*)(_difference_buffers[lvl + 1].data())); + oiio::ImageBuf _work_buffer_1_half_oiio(spec_base_half, (void*)(_work_buffers_1[lvl + 1].data())); + oiio::ImageBuf _work_buffer_2_half_oiio(spec_base_half, (void*)(_work_buffers_2[lvl + 1].data())); + + + + /*First, low pass the image*/ + oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _work_buffer_1_oiio, _kernel, false); + divideByAlpha(_work_buffers_2[lvl]); + copyMask(_work_buffers_2[lvl], _work_buffers_1[lvl]); + + /*Subsample image*/ + oiio::ImageBufAlgo::resample(_work_buffer_1_half_oiio, _work_buffer_2_oiio, false); + divideByAlpha(_work_buffers_1[lvl + 1]); + + /*Upsample back reduced image*/ + oiio::ImageBufAlgo::resample(_work_buffer_2_oiio, _work_buffer_1_half_oiio, false); + divideByAlpha(_work_buffers_2[lvl]); + + /*Low pass upsampled image*/ + oiio::ImageBufAlgo::convolve(_work_buffer_3_oiio, _work_buffer_2_oiio, _kernel, true); + divideByAlpha(_work_buffers_3[lvl]); + copyMask(_work_buffers_3[lvl], _work_buffers_2[lvl]); + + /*Difference*/ + substract(_difference_buffers[lvl], _work_buffers_1[lvl], _work_buffers_3[lvl]); + } + + _difference_buffers[_difference_buffers.size() - 1] = _work_buffers_1[_work_buffers_1.size() - 1]; + + return true; + } + + bool rebuild(image::Image & output) { + + _work_buffers_1[_difference_buffers.size() - 1] = _difference_buffers[_difference_buffers.size() - 1]; + + for (int lvl = _difference_buffers.size() - 2; lvl >= 0; lvl--) { + + oiio::ImageSpec spec_base(_difference_buffers[lvl].Width(), _difference_buffers[lvl].Height(), 4, oiio::TypeDesc::FLOAT); + oiio::ImageSpec spec_base_half(_difference_buffers[lvl + 1].Width(), _difference_buffers[lvl + 1].Height(), 4, oiio::TypeDesc::FLOAT); + + oiio::ImageBuf _work_buffer_1_oiio(spec_base, (void*)(_work_buffers_1[lvl].data())); + oiio::ImageBuf _work_buffer_2_oiio(spec_base, (void*)(_work_buffers_2[lvl].data())); + oiio::ImageBuf _work_buffer_1_half_oiio(spec_base_half, (void*)(_work_buffers_1[lvl + 1].data())); + oiio::ImageBuf _difference_buffer_half_oiio(spec_base_half, (void*)(_difference_buffers[lvl + 1].data())); + + /*Upsample back reduced image*/ + oiio::ImageBufAlgo::resample(_work_buffer_1_oiio, _work_buffer_1_half_oiio, false); + divideByAlpha(_work_buffers_1[lvl]); + + /*Low pass upsampled image*/ + oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _work_buffer_1_oiio, _kernel, true); + divideByAlpha(_work_buffers_2[lvl]); + copyMask(_work_buffers_2[lvl], _work_buffers_1[lvl]); + + /*Add with next*/ + add(_work_buffers_1[lvl], _difference_buffers[lvl], _work_buffers_2[lvl]); + } + + output = _work_buffers_1[0]; + + return true; + } + + void divideByAlpha(image::Image & image) { + + for(int y = 0; y < image.Height(); ++y) { + for(int x = 0; x < image.Width(); ++x) { + image::RGBAfColor& pixel = image(y, x); + if(pixel.a() > 0.01f) { + pixel = pixel / pixel.a(); + } + } + } + } + + void copyMask(image::Image & dst, const image::Image & src) { + + for(int y = 0; y < src.Height(); ++y) { + for(int x = 0; x < src.Width(); ++x) { + const image::RGBAfColor & pixelSrc = src(y, x); + image::RGBAfColor & pixelDst = dst(y, x); + + pixelDst = pixelDst * pixelSrc.a(); + } + } + } + + void substract(image::Image & result, const image::Image & A, const image::Image & B) { + + for(int y = 0; y < A.Height(); ++y) { + for(int x = 0; x < A.Width(); ++x) { + const image::RGBAfColor & pixelA = A(y, x); + const image::RGBAfColor & pixelB = B(y, x); + image::RGBAfColor & pixelR = result(y, x); + + if(pixelA.a() > std::numeric_limits::epsilon()) { + pixelR.r() = pixelA.r() - pixelB.r(); + pixelR.g() = pixelA.g() - pixelB.g(); + pixelR.b() = pixelA.b() - pixelB.b(); + pixelR.a() = 1.0f; + } + else { + pixelR.r() = 0.0f; + pixelR.g() = 0.0f; + pixelR.b() = 0.0f; + pixelR.a() = 0.0f; + } + } + } + } + + + void add(image::Image & result, const image::Image & A, const image::Image & B) { + + for(int y = 0; y < A.Height(); ++y) { + for(int x = 0; x < A.Width(); ++x) { + const image::RGBAfColor & pixelA = A(y, x); + const image::RGBAfColor & pixelB = B(y, x); + image::RGBAfColor & pixelR = result(y, x); + + if(pixelA.a() > std::numeric_limits::epsilon()) { + pixelR.r() = pixelA.r() + pixelB.r(); + pixelR.g() = pixelA.g() + pixelB.g(); + pixelR.b() = pixelA.b() + pixelB.b(); + pixelR.a() = 1.0f; + } + else { + pixelR.r() = 0.0f; + pixelR.g() = 0.0f; + pixelR.b() = 0.0f; + pixelR.a() = 0.0f; + } + } + } + } + + + +public: + size_t _width_base; + size_t _height_base; + oiio::ImageBuf _kernel; + + std::vector> _difference_buffers; + std::vector> _work_buffers_1; + std::vector> _work_buffers_2; + std::vector> _work_buffers_3; +}; + float sigmoid(float x, float sigwidth, float sigMid) { return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); } +Eigen::Matrix3d rotationBetweenVectors(const Vec3 & v1, const Vec3 & v2) { + + const Vec3 v1n = v1.normalized(); + const Vec3 v2n = v2.normalized(); + const Vec3 cr = v1n.cross(v2n); + const Vec3 axis = cr.normalized(); + + double cosangle = v1n.dot(v2n); + double sinangle = cr.norm(); + + Eigen::Matrix3d S = Eigen::Matrix3d::Zero(); + S(0, 1) = -axis(2); + S(0, 2) = axis(1); + S(1, 0) = axis(2); + S(1, 2) = -axis(0); + S(2, 0) = -axis(1); + S(2, 1) = axis(0); + + return Eigen::Matrix3d::Identity() + sinangle * S + (1 - cosangle) * S * S; +} + /** * @brief Function to map equirectangular coordinates onto a world unit vector according a spherical projection */ namespace SphericalMapping { - Vec3 fromEquirectangular(const Vec2& pos2d, int width, int height) + Vec3 fromEquirectangular(const Vec2 & equirectangular, int width, int height) { - const double latitude = (pos2d(1) / double(height)) * M_PI - M_PI_2; - const double longitude = ((pos2d(0) / double(width)) * 2.0 * M_PI) - M_PI; + const double latitude = (equirectangular(1) / double(height)) * M_PI - M_PI_2; + const double longitude = ((equirectangular(0) / double(width)) * 2.0 * M_PI) - M_PI; const double Px = cos(latitude) * sin(longitude); const double Py = sin(latitude); @@ -56,6 +274,25 @@ namespace SphericalMapping return Vec3(Px, Py, Pz); } + + Vec2 toEquirectangular(const Vec3 & spherical, int width, int height) { + + double vertical_angle = asin(spherical(1)); + double horizontal_angle = atan2(spherical(0), spherical(2)); + + double latitude = ((vertical_angle + M_PI_2) / M_PI) * height; + double longitude = ((horizontal_angle + M_PI) / (2.0 * M_PI)) * width; + + return Vec2(longitude, latitude); + } + + Vec2 toLongitudeLatitude(const Vec3 & spherical) { + + double latitude = asin(spherical(1)); + double longitude = atan2(spherical(0), spherical(2)); + + return Vec2(longitude, latitude); + } } inline std::istream& operator>>(std::istream& in, std::pair& out) @@ -67,8 +304,8 @@ inline std::istream& operator>>(std::istream& in, std::pair& out) bool estimate_panorama_size(const sfmData::SfMData & sfmData, float fisheyeMaskingMargin, std::pair & panoramaSize) { - panoramaSize.first = 256; - panoramaSize.second = 128; + panoramaSize.first = 512; + panoramaSize.second = 256; image::Image BufferCoords(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); { @@ -106,8 +343,7 @@ bool estimate_panorama_size(const sfmData::SfMData & sfmData, float fisheyeMaski // unit vector to camera const Vec2 pix_disto = intrinsic.project(camPose.getTransform(), ray, true); - if( pix_disto(0) < 0 || pix_disto(0) >= intrinsic.w() || - pix_disto(1) < 0 || pix_disto(1) >= intrinsic.h()) + if( pix_disto(0) < 0 || pix_disto(0) >= intrinsic.w() || pix_disto(1) < 0 || pix_disto(1) >= intrinsic.h()) { //the pixel is out of the image continue; @@ -125,6 +361,8 @@ bool estimate_panorama_size(const sfmData::SfMData & sfmData, float fisheyeMaski } } + + for(int y = 1; y < panoramaSize.second - 1; ++y) { for(int x = 1; x < panoramaSize.first - 1; ++x) @@ -154,10 +392,12 @@ bool estimate_panorama_size(const sfmData::SfMData & sfmData, float fisheyeMaski determinants.push_back(det); } } + + break; } std::nth_element(determinants.begin(), determinants.begin() + determinants.size() / 5, determinants.end()); - double scale = sqrt(determinants[determinants.size() / 5]); + double scale = sqrt(determinants[determinants.size() / 2]); panoramaSize.first *= scale; panoramaSize.second *= scale; @@ -166,6 +406,7 @@ bool estimate_panorama_size(const sfmData::SfMData & sfmData, float fisheyeMaski return true; } + int main(int argc, char **argv) { // command-line parameters @@ -179,7 +420,6 @@ int main(int argc, char **argv) bool fisheyeMasking = false; float fisheyeMaskingMargin = 0.05f; float transitionSize = 10.0f; - int debugSubsetStart = 0; int debugSubsetSize = 0; @@ -276,6 +516,9 @@ int main(int argc, char **argv) panoramaSize.first *= scaleFactor; panoramaSize.second *= scaleFactor; + /*Make sure panorama size is even */ + panoramaSize.first = 2 * (panoramaSize.first / 2); + panoramaSize.second = 2 * (panoramaSize.second / 2); ALICEVISION_LOG_INFO("Output panorama size: " << panoramaSize.first << ", " << panoramaSize.second); @@ -314,13 +557,52 @@ int main(int argc, char **argv) const sfmData::CameraPose camPose = sfmData.getPose(view); const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - std::string imagePath = view.getImagePath(); + /*Vec2 pix_hg(0, 0); + Vec2 pix_hd(intrinsic.w(), 0); + Vec2 pix_bd(intrinsic.w(), intrinsic.h()); + Vec2 pix_bg(0, intrinsic.h()); + + Vec2 pix_h = 0.5 * (pix_hg + pix_bg); + Vec2 pix_b = 0.5 * (pix_bg + pix_bd); + Vec2 pix_g = 0.5 * (pix_hg + pix_bg); + Vec2 pix_d = 0.5 * (pix_hd + pix_bd); + + Vec3 cam_hg = intrinsic.remove_disto(intrinsic.ima2cam(pix_hg)).homogeneous().normalized(); + Vec3 cam_hd = intrinsic.remove_disto(intrinsic.ima2cam(pix_hd)).homogeneous().normalized(); + Vec3 cam_bg = intrinsic.remove_disto(intrinsic.ima2cam(pix_bg)).homogeneous().normalized(); + Vec3 cam_bd = intrinsic.remove_disto(intrinsic.ima2cam(pix_bd)).homogeneous().normalized(); + + Vec3 cam_h = intrinsic.remove_disto(intrinsic.ima2cam(pix_h)).homogeneous().normalized(); + Vec3 cam_b = intrinsic.remove_disto(intrinsic.ima2cam(pix_b)).homogeneous().normalized(); + Vec3 cam_g = intrinsic.remove_disto(intrinsic.ima2cam(pix_g)).homogeneous().normalized(); + Vec3 cam_d = intrinsic.remove_disto(intrinsic.ima2cam(pix_d)).homogeneous().normalized(); + + Vec3 pano_hg = camPose.getTransform().rotation().transpose() * cam_hg; + Vec3 pano_hd = camPose.getTransform().rotation().transpose() * cam_hd; + Vec3 pano_bd = camPose.getTransform().rotation().transpose() * cam_bd; + Vec3 pano_bg = camPose.getTransform().rotation().transpose() * cam_bg; + + Vec2 pix_pano_hg = SphericalMapping::toEquirectangular(pano_hg, panoramaSize.first, panoramaSize.second); + Vec2 pix_pano_hd = SphericalMapping::toEquirectangular(pano_hd, panoramaSize.first, panoramaSize.second); + Vec2 pix_pano_bd = SphericalMapping::toEquirectangular(pano_bd, panoramaSize.first, panoramaSize.second); + Vec2 pix_pano_bg = SphericalMapping::toEquirectangular(pano_bg, panoramaSize.first, panoramaSize.second); + + double minx = std::min(std::min(std::min(pix_pano_hg(0), pix_pano_hd(0)), pix_pano_bd(0)), pix_pano_bg(0)); + double miny = std::min(std::min(std::min(pix_pano_hg(1), pix_pano_hd(1)), pix_pano_bd(1)), pix_pano_bg(1)); + double maxx = std::max(std::max(std::max(pix_pano_hg(0), pix_pano_hd(0)), pix_pano_bd(0)), pix_pano_bg(0)); + double maxy = std::max(std::max(std::max(pix_pano_hg(1), pix_pano_hd(1)), pix_pano_bd(1)), pix_pano_bg(1)); + + int iminx = std::max(0, int(floor(minx))); + int iminy = std::max(0, int(floor(miny))); + int imaxx = std::max(panoramaSize.first, int(ceil(maxx))); + int imaxy = std::max(panoramaSize.second, int(ceil(maxy))); */ + + // Image RGB image::Image imageIn; - ALICEVISION_LOG_INFO("Reading " << imagePath); @@ -332,14 +614,16 @@ int main(int argc, char **argv) const float blurMid = maxRadius - transitionSize/2.f; const Vec2i center(imageIn.Width()/2.f, imageIn.Height()/2.f); - const image::Sampler2d sampler; + const image::Sampler2d sampler; - for(int y = 0; y < imageOut.Height(); ++y) + image::Image perCameraOutput(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + + for(int y = 0; y < panoramaSize.second; ++y) { - for(int x = 0; x < imageOut.Width(); ++x) + for(int x = 0; x < panoramaSize.first; ++x) { // equirectangular to unit vector - Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), imageOut.Width(), imageOut.Height()); + Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), imageOut.Width(), imageOut.Height()); //Check that this ray should be visible Vec3 transformedRay = camPose.getTransform()(ray); @@ -350,8 +634,7 @@ int main(int argc, char **argv) // unit vector to camera const Vec2 pix_disto = intrinsic.project(camPose.getTransform(), ray, true); - if( pix_disto(0) < 0 || pix_disto(0) >= imageIn.Width() || - pix_disto(1) < 0 || pix_disto(1) >= imageIn.Height()) + if( pix_disto(0) < 0 || pix_disto(0) >= imageIn.Width() || pix_disto(1) < 0 || pix_disto(1) >= imageIn.Height()) { // the pixel is out of the image continue; @@ -370,23 +653,36 @@ int main(int argc, char **argv) { contribution = sigmoid(dist, transitionSize, blurMid); } - } - - + } const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); if(contribution > 0.0f) { - imageOut(y, x).r() += pixel.r() * contribution; - imageOut(y, x).g() += pixel.g() * contribution; - imageOut(y, x).b() += pixel.b() * contribution; - imageOut(y, x).a() += contribution; + perCameraOutput(y, x).r() += pixel.r() * contribution; + perCameraOutput(y, x).g() += pixel.g() * contribution; + perCameraOutput(y, x).b() += pixel.b() * contribution; + perCameraOutput(y, x).a() += contribution; } } - } + } + + + LaplacianPyramid pyramid(panoramaSize.first, panoramaSize.second); + image::Image output; + + pyramid.process(perCameraOutput); + pyramid.rebuild(output); + + + + char filename[512]; + sprintf(filename, "%s%d_old.png", outputPanorama.c_str(), imageIndex); + image::writeImage(filename, perCameraOutput, image::EImageColorSpace::AUTO); + sprintf(filename, "%s%d.png", outputPanorama.c_str(), imageIndex); + image::writeImage(filename, output, image::EImageColorSpace::AUTO); } - for(int y = 0; y < imageOut.Height(); ++y) + /*for(int y = 0; y < imageOut.Height(); ++y) { for(int x = 0; x < imageOut.Width(); ++x) { @@ -399,9 +695,11 @@ int main(int argc, char **argv) pixel.a() = 1.0f; // TMP: comment to keep the alpha with the number of contribution for debugging } } - } + }*/ - image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::AUTO); + + + //image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::AUTO); return EXIT_SUCCESS; -} +} \ No newline at end of file From 2d7c36c4fc5466a46fdd816166bceb1cc5d51338 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Mon, 7 Oct 2019 10:52:06 +0200 Subject: [PATCH 064/174] Trying to make hdr work on stitching --- .../pipeline/main_panoramaStitching.cpp | 120 +++++++++--------- 1 file changed, 57 insertions(+), 63 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 13825aaf91..d589091e51 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -45,7 +45,7 @@ class LaplacianPyramid { size_t min_dim = std::min(_width_base, _height_base); size_t scales = static_cast(floor(log2(double(min_dim)))); - scales = 4; + scales = 6; size_t new_width = _width_base; size_t new_height = _height_base; @@ -146,6 +146,38 @@ class LaplacianPyramid { return true; } + bool blend(const LaplacianPyramid & other) { + + if (_difference_buffers.size() != other._difference_buffers.size()) { + return false; + } + + for (size_t lvl = 0; lvl < _difference_buffers.size(); lvl++) { + + image::Image & img_first = _difference_buffers[lvl]; + const image::Image & img_second = other._difference_buffers[lvl]; + + for (int i = 0; i < img_first.Height(); i++) { + for (int j = 0; j < img_first.Width(); j++) { + + image::RGBAfColor & pix_first = img_first(i, j); + const image::RGBAfColor & pix_second = img_second(i, j); + + float sum = pix_first.a() + pix_second.a(); + if (std::abs(sum) > std::numeric_limits::epsilon()) { + float scale = 1.0f / sum; + pix_first.r() = scale * (pix_first.a() * pix_first.r() + pix_second.a() * pix_second.r()); + pix_first.g() = scale * (pix_first.a() * pix_first.g() + pix_second.a() * pix_second.g()); + pix_first.b() = scale * (pix_first.a() * pix_first.b() + pix_second.a() * pix_second.b()); + pix_first.a() = 1.0f; + } + } + } + } + + return true; + } + void divideByAlpha(image::Image & image) { for(int y = 0; y < image.Height(); ++y) { @@ -523,8 +555,11 @@ int main(int argc, char **argv) ALICEVISION_LOG_INFO("Output panorama size: " << panoramaSize.first << ", " << panoramaSize.second); // Create panorama buffer + LaplacianPyramid blending_pyramid(panoramaSize.first, panoramaSize.second); + image::Image output; image::Image imageOut(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + int imageIndex = 0; for(auto& viewIt: sfmData.getViews()) { @@ -559,52 +594,8 @@ int main(int argc, char **argv) const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); std::string imagePath = view.getImagePath(); - - /*Vec2 pix_hg(0, 0); - Vec2 pix_hd(intrinsic.w(), 0); - Vec2 pix_bd(intrinsic.w(), intrinsic.h()); - Vec2 pix_bg(0, intrinsic.h()); - - Vec2 pix_h = 0.5 * (pix_hg + pix_bg); - Vec2 pix_b = 0.5 * (pix_bg + pix_bd); - Vec2 pix_g = 0.5 * (pix_hg + pix_bg); - Vec2 pix_d = 0.5 * (pix_hd + pix_bd); - - Vec3 cam_hg = intrinsic.remove_disto(intrinsic.ima2cam(pix_hg)).homogeneous().normalized(); - Vec3 cam_hd = intrinsic.remove_disto(intrinsic.ima2cam(pix_hd)).homogeneous().normalized(); - Vec3 cam_bg = intrinsic.remove_disto(intrinsic.ima2cam(pix_bg)).homogeneous().normalized(); - Vec3 cam_bd = intrinsic.remove_disto(intrinsic.ima2cam(pix_bd)).homogeneous().normalized(); - - Vec3 cam_h = intrinsic.remove_disto(intrinsic.ima2cam(pix_h)).homogeneous().normalized(); - Vec3 cam_b = intrinsic.remove_disto(intrinsic.ima2cam(pix_b)).homogeneous().normalized(); - Vec3 cam_g = intrinsic.remove_disto(intrinsic.ima2cam(pix_g)).homogeneous().normalized(); - Vec3 cam_d = intrinsic.remove_disto(intrinsic.ima2cam(pix_d)).homogeneous().normalized(); - - Vec3 pano_hg = camPose.getTransform().rotation().transpose() * cam_hg; - Vec3 pano_hd = camPose.getTransform().rotation().transpose() * cam_hd; - Vec3 pano_bd = camPose.getTransform().rotation().transpose() * cam_bd; - Vec3 pano_bg = camPose.getTransform().rotation().transpose() * cam_bg; - - Vec2 pix_pano_hg = SphericalMapping::toEquirectangular(pano_hg, panoramaSize.first, panoramaSize.second); - Vec2 pix_pano_hd = SphericalMapping::toEquirectangular(pano_hd, panoramaSize.first, panoramaSize.second); - Vec2 pix_pano_bd = SphericalMapping::toEquirectangular(pano_bd, panoramaSize.first, panoramaSize.second); - Vec2 pix_pano_bg = SphericalMapping::toEquirectangular(pano_bg, panoramaSize.first, panoramaSize.second); - - double minx = std::min(std::min(std::min(pix_pano_hg(0), pix_pano_hd(0)), pix_pano_bd(0)), pix_pano_bg(0)); - double miny = std::min(std::min(std::min(pix_pano_hg(1), pix_pano_hd(1)), pix_pano_bd(1)), pix_pano_bg(1)); - double maxx = std::max(std::max(std::max(pix_pano_hg(0), pix_pano_hd(0)), pix_pano_bd(0)), pix_pano_bg(0)); - double maxy = std::max(std::max(std::max(pix_pano_hg(1), pix_pano_hd(1)), pix_pano_bd(1)), pix_pano_bg(1)); - - int iminx = std::max(0, int(floor(minx))); - int iminy = std::max(0, int(floor(miny))); - int imaxx = std::max(panoramaSize.first, int(ceil(maxx))); - int imaxy = std::max(panoramaSize.second, int(ceil(maxy))); */ - - // Image RGB image::Image imageIn; - - ALICEVISION_LOG_INFO("Reading " << imagePath); image::readImage(imagePath, imageIn, image::EImageColorSpace::LINEAR); ALICEVISION_LOG_INFO(" - " << imageIn.Width() << "x" << imageIn.Height()); @@ -617,13 +608,14 @@ int main(int argc, char **argv) const image::Sampler2d sampler; image::Image perCameraOutput(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + for(int y = 0; y < panoramaSize.second; ++y) { for(int x = 0; x < panoramaSize.first; ++x) { // equirectangular to unit vector - Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), imageOut.Width(), imageOut.Height()); + Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), panoramaSize.first, panoramaSize.second); //Check that this ray should be visible Vec3 transformedRay = camPose.getTransform()(ray); @@ -658,36 +650,40 @@ int main(int argc, char **argv) const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); if(contribution > 0.0f) { - perCameraOutput(y, x).r() += pixel.r() * contribution; - perCameraOutput(y, x).g() += pixel.g() * contribution; - perCameraOutput(y, x).b() += pixel.b() * contribution; - perCameraOutput(y, x).a() += contribution; + contribution = 1.0f; + imageOut(y, x).r() += pixel.r() * contribution; + imageOut(y, x).g() += pixel.g() * contribution; + imageOut(y, x).b() += pixel.b() * contribution; + imageOut(y, x).a() += contribution; } } } - LaplacianPyramid pyramid(panoramaSize.first, panoramaSize.second); - image::Image output; - + /*LaplacianPyramid pyramid(panoramaSize.first, panoramaSize.second); pyramid.process(perCameraOutput); + blending_pyramid.blend(pyramid); + + image::Image output; pyramid.rebuild(output); - char filename[512]; sprintf(filename, "%s%d_old.png", outputPanorama.c_str(), imageIndex); image::writeImage(filename, perCameraOutput, image::EImageColorSpace::AUTO); sprintf(filename, "%s%d.png", outputPanorama.c_str(), imageIndex); - image::writeImage(filename, output, image::EImageColorSpace::AUTO); + image::writeImage(filename, output, image::EImageColorSpace::AUTO);*/ } - /*for(int y = 0; y < imageOut.Height(); ++y) + + //blending_pyramid.rebuild(output); + + for(int y = 0; y < output.Height(); ++y) { - for(int x = 0; x < imageOut.Width(); ++x) + for(int x = 0; x < output.Width(); ++x) { - image::RGBAfColor& pixel = imageOut(y, x); - if(pixel.a() > 0.0001f) + image::RGBAfColor& pixel = output(y, x); + if(pixel.a() > std::numeric_limits::epsilon()) { pixel.r() /= pixel.a(); pixel.g() /= pixel.a(); @@ -695,11 +691,9 @@ int main(int argc, char **argv) pixel.a() = 1.0f; // TMP: comment to keep the alpha with the number of contribution for debugging } } - }*/ - - + } - //image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::AUTO); + image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::AUTO); return EXIT_SUCCESS; } \ No newline at end of file From cdab531b5bd31f624130b203c7519466392d752f Mon Sep 17 00:00:00 2001 From: fabien servant Date: Mon, 7 Oct 2019 14:02:34 +0200 Subject: [PATCH 065/174] At least remove unused images ... --- src/software/convert/main_convertLDRToHDR.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 35cf14b4c1..d6ac16717e 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -544,6 +544,8 @@ int main(int argc, char** argv) ldrImageGroups_sorted[g].at(i) = ldrImages.at(std::distance(ldrTimes.begin(), it)); } + ldrImages.clear(); + // find target exposure time corresponding to the image name given by user if(!targets[g].empty()) { From e223770fa7591f573c73676fef720426963bef68 Mon Sep 17 00:00:00 2001 From: fabien servant Date: Mon, 7 Oct 2019 14:09:26 +0200 Subject: [PATCH 066/174] make sure output can be sorted alphabetically correctly --- src/software/convert/main_convertLDRToHDR.cpp | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index d6ac16717e..2040ac4c15 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -20,6 +20,7 @@ #include #include +#include #include // These constants define the current software version. @@ -413,8 +414,11 @@ int main(int argc, char** argv) if(nbGroups == 1) outputHDRImagesPath.front() = (fs::path(outputPath) / ("hdr.exr")).string(); else - for(int g = 0; g < nbGroups; ++g) - outputHDRImagesPath[g] = (fs::path(outputPath) / ("hdr_" + std::to_string(g) + ".exr")).string(); + for(int g = 0; g < nbGroups; ++g) { + std::ostringstream ss; + ss << std::setw(4) << std::setfill('0') << g; + outputHDRImagesPath[g] = (fs::path(outputPath) / ("hdr_" + ss.str() + ".exr")).string(); + } } else if(outputHDRImagesPath.size() != nbGroups) { From 9099d786d31f3d4e65b0fc9aabf266d81bf6f4ff Mon Sep 17 00:00:00 2001 From: fabien servant Date: Mon, 7 Oct 2019 14:41:23 +0200 Subject: [PATCH 067/174] backup --- .../pipeline/main_panoramaStitching.cpp | 32 +++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index d589091e51..624da57734 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -650,7 +650,35 @@ int main(int argc, char **argv) const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); if(contribution > 0.0f) { - contribution = 1.0f; + image::RGBfColor linear, linear2; + double a = 0.055; + + if (pixel.r() <= 0.04045) { + linear.r() = pixel.r() / 12.92; + } + else { + linear.r() = pow(double((pixel.r() + a)/(1.0 + a)), 2.4); + } + + if (pixel.g() <= 0.04045) { + linear.g() = pixel.g() / 12.92; + } + else { + linear.g() = pow(double((pixel.g() + a)/(1.0 + a)), 2.4); + } + + if (pixel.b() <= 0.04045) { + linear.b() = pixel.b() / 12.92; + } + else { + linear.b() = pow(double((pixel.b() + a)/(1.0 + a)), 2.4); + } + + linear2.r() = 0.4124 * linear.r() + 0.3576 * linear.g() + 0.1805 * linear.b(); + linear2.g() = 0.2126 * linear.r() + 0.7152 * linear.g() + 0.0722 * linear.b(); + linear2.b() = 0.0193 * linear.r() + 0.1192 * linear.g() + 0.9505 * linear.b(); + + imageOut(y, x).r() += pixel.r() * contribution; imageOut(y, x).g() += pixel.g() * contribution; imageOut(y, x).b() += pixel.b() * contribution; @@ -693,7 +721,7 @@ int main(int argc, char **argv) } } - image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::AUTO); + image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::LINEAR); return EXIT_SUCCESS; } \ No newline at end of file From 4bacac0d62b2db09fb35c8450365269d09179482 Mon Sep 17 00:00:00 2001 From: fabien servant Date: Tue, 8 Oct 2019 14:14:43 +0200 Subject: [PATCH 068/174] laplacian results --- .../pipeline/main_panoramaStitching.cpp | 158 ++++++++++-------- 1 file changed, 89 insertions(+), 69 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 624da57734..538c2214f3 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -41,17 +41,18 @@ class LaplacianPyramid { _width_base(width_base), _height_base(height_base) { - _kernel = oiio::ImageBufAlgo::make_kernel("gaussian", 5.0f, 5.0f); + _kernel = oiio::ImageBufAlgo::make_kernel("gaussian", 11.0f, 11.0f); + _kernel_alphas = oiio::ImageBufAlgo::make_kernel("gaussian", 21.0f, 21.0f); size_t min_dim = std::min(_width_base, _height_base); size_t scales = static_cast(floor(log2(double(min_dim)))); - scales = 6; size_t new_width = _width_base; size_t new_height = _height_base; for (int i = 0; i < scales; i++) { - _difference_buffers.push_back(image::Image(new_width, new_height)); + _difference_buffers.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + _alphas.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); _work_buffers_1.push_back(image::Image(new_width, new_height)); _work_buffers_2.push_back(image::Image(new_width, new_height)); _work_buffers_3.push_back(image::Image(new_width, new_height)); @@ -66,6 +67,19 @@ class LaplacianPyramid { _work_buffers_1[0] = input; + for (int i = 0; i < input.Height(); i++) { + for (int j = 0; j < input.Width(); j++) { + _alphas[0](i, j).r() = input(i, j).a(); + _alphas[0](i, j).g() = input(i, j).a(); + _alphas[0](i, j).b() = input(i, j).a(); + _alphas[0](i, j).a() = 1.0; + + if (_work_buffers_1[0](i, j).a() > std::numeric_limits::epsilon()) { + _work_buffers_1[0](i, j).a() = 1.0f; + } + } + } + /* Loop over the levels. @@ -81,29 +95,34 @@ class LaplacianPyramid { oiio::ImageBuf _work_buffer_1_oiio(spec_base, (void*)(_work_buffers_1[lvl].data())); oiio::ImageBuf _work_buffer_2_oiio(spec_base, (void*)(_work_buffers_2[lvl].data())); oiio::ImageBuf _work_buffer_3_oiio(spec_base, (void*)(_work_buffers_3[lvl].data())); + oiio::ImageBuf _alphas_buffer_oiio(spec_base, (void*)(_alphas[lvl].data())); oiio::ImageBuf _difference_buffer_half_oiio(spec_base_half, (void*)(_difference_buffers[lvl + 1].data())); oiio::ImageBuf _work_buffer_1_half_oiio(spec_base_half, (void*)(_work_buffers_1[lvl + 1].data())); oiio::ImageBuf _work_buffer_2_half_oiio(spec_base_half, (void*)(_work_buffers_2[lvl + 1].data())); + oiio::ImageBuf _alphas_buffer_half_oiio(spec_base_half, (void*)(_alphas[lvl + 1].data())); - + /*Low pass alpha*/ + oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _alphas_buffer_oiio, _kernel_alphas, false); + + /*Subsample alpha*/ + oiio::ImageBufAlgo::resample(_alphas_buffer_half_oiio, _work_buffer_2_oiio, false); + /*First, low pass the image*/ oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _work_buffer_1_oiio, _kernel, false); - divideByAlpha(_work_buffers_2[lvl]); - copyMask(_work_buffers_2[lvl], _work_buffers_1[lvl]); + trimAlpha(_work_buffers_2[lvl]); /*Subsample image*/ oiio::ImageBufAlgo::resample(_work_buffer_1_half_oiio, _work_buffer_2_oiio, false); - divideByAlpha(_work_buffers_1[lvl + 1]); + trimAlpha(_work_buffers_1[lvl + 1]); /*Upsample back reduced image*/ oiio::ImageBufAlgo::resample(_work_buffer_2_oiio, _work_buffer_1_half_oiio, false); - divideByAlpha(_work_buffers_2[lvl]); + trimAlpha(_work_buffers_2[lvl]); /*Low pass upsampled image*/ oiio::ImageBufAlgo::convolve(_work_buffer_3_oiio, _work_buffer_2_oiio, _kernel, true); - divideByAlpha(_work_buffers_3[lvl]); - copyMask(_work_buffers_3[lvl], _work_buffers_2[lvl]); + trimAlpha(_work_buffers_3[lvl]); /*Difference*/ substract(_difference_buffers[lvl], _work_buffers_1[lvl], _work_buffers_3[lvl]); @@ -130,12 +149,11 @@ class LaplacianPyramid { /*Upsample back reduced image*/ oiio::ImageBufAlgo::resample(_work_buffer_1_oiio, _work_buffer_1_half_oiio, false); - divideByAlpha(_work_buffers_1[lvl]); + trimAlpha(_work_buffers_1[lvl]); /*Low pass upsampled image*/ - oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _work_buffer_1_oiio, _kernel, true); - divideByAlpha(_work_buffers_2[lvl]); - copyMask(_work_buffers_2[lvl], _work_buffers_1[lvl]); + oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _work_buffer_1_oiio, _kernel, false); + trimAlpha(_work_buffers_2[lvl]); /*Add with next*/ add(_work_buffers_1[lvl], _difference_buffers[lvl], _work_buffers_2[lvl]); @@ -154,23 +172,39 @@ class LaplacianPyramid { for (size_t lvl = 0; lvl < _difference_buffers.size(); lvl++) { + //_difference_buffers[lvl].fill(image::RGBAfColor(0.0f,0.0f,0.0f,0.0f)); + image::Image & img_first = _difference_buffers[lvl]; const image::Image & img_second = other._difference_buffers[lvl]; + const image::Image & alpha = other._alphas[lvl]; for (int i = 0; i < img_first.Height(); i++) { for (int j = 0; j < img_first.Width(); j++) { image::RGBAfColor & pix_first = img_first(i, j); const image::RGBAfColor & pix_second = img_second(i, j); + const image::RGBAfColor & pix_alpha = alpha(i, j); - float sum = pix_first.a() + pix_second.a(); - if (std::abs(sum) > std::numeric_limits::epsilon()) { - float scale = 1.0f / sum; - pix_first.r() = scale * (pix_first.a() * pix_first.r() + pix_second.a() * pix_second.r()); - pix_first.g() = scale * (pix_first.a() * pix_first.g() + pix_second.a() * pix_second.g()); - pix_first.b() = scale * (pix_first.a() * pix_first.b() + pix_second.a() * pix_second.b()); - pix_first.a() = 1.0f; + double weight = 0.5f; + + if (pix_second.a() == 0.0) { + continue; } + + if (pix_first.a() == 0.0) { + pix_first.r() = pix_second.r(); + pix_first.g() = pix_second.g(); + pix_first.b() = pix_second.b(); + pix_first.a() = pix_second.a(); + continue; + } + + double mweight = 1.0 - weight; + + pix_first.r() = mweight * pix_first.r() + weight * pix_second.r(); + pix_first.g() = mweight * pix_first.g() + weight * pix_second.g(); + pix_first.b() = mweight * pix_first.b() + weight * pix_second.b(); + pix_first.a() = 1.0f; } } } @@ -190,6 +224,21 @@ class LaplacianPyramid { } } + void trimAlpha(image::Image & image) { + + for(int y = 0; y < image.Height(); ++y) { + for(int x = 0; x < image.Width(); ++x) { + image::RGBAfColor& pixel = image(y, x); + if(pixel.a() != 1.0f) { + pixel.r() = 0.0; + pixel.g() = 0.0; + pixel.b() = 0.0; + pixel.a() = 0.0; + } + } + } + } + void copyMask(image::Image & dst, const image::Image & src) { for(int y = 0; y < src.Height(); ++y) { @@ -257,8 +306,10 @@ class LaplacianPyramid { size_t _width_base; size_t _height_base; oiio::ImageBuf _kernel; + oiio::ImageBuf _kernel_alphas; std::vector> _difference_buffers; + std::vector> _alphas; std::vector> _work_buffers_1; std::vector> _work_buffers_2; std::vector> _work_buffers_3; @@ -451,7 +502,7 @@ int main(int argc, char **argv) std::pair panoramaSize = {0, 0}; bool fisheyeMasking = false; float fisheyeMaskingMargin = 0.05f; - float transitionSize = 10.0f; + float transitionSize = 1000.0f; int debugSubsetStart = 0; int debugSubsetSize = 0; @@ -607,8 +658,7 @@ int main(int argc, char **argv) const image::Sampler2d sampler; - image::Image perCameraOutput(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); - + image::Image perCameraOutput(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); for(int y = 0; y < panoramaSize.second; ++y) { @@ -650,67 +700,37 @@ int main(int argc, char **argv) const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); if(contribution > 0.0f) { - image::RGBfColor linear, linear2; - double a = 0.055; - - if (pixel.r() <= 0.04045) { - linear.r() = pixel.r() / 12.92; - } - else { - linear.r() = pow(double((pixel.r() + a)/(1.0 + a)), 2.4); - } - - if (pixel.g() <= 0.04045) { - linear.g() = pixel.g() / 12.92; - } - else { - linear.g() = pow(double((pixel.g() + a)/(1.0 + a)), 2.4); - } - - if (pixel.b() <= 0.04045) { - linear.b() = pixel.b() / 12.92; - } - else { - linear.b() = pow(double((pixel.b() + a)/(1.0 + a)), 2.4); - } - - linear2.r() = 0.4124 * linear.r() + 0.3576 * linear.g() + 0.1805 * linear.b(); - linear2.g() = 0.2126 * linear.r() + 0.7152 * linear.g() + 0.0722 * linear.b(); - linear2.b() = 0.0193 * linear.r() + 0.1192 * linear.g() + 0.9505 * linear.b(); - - - imageOut(y, x).r() += pixel.r() * contribution; - imageOut(y, x).g() += pixel.g() * contribution; - imageOut(y, x).b() += pixel.b() * contribution; - imageOut(y, x).a() += contribution; + perCameraOutput(y, x).r() += pixel.r() * contribution; + perCameraOutput(y, x).g() += pixel.g() * contribution; + perCameraOutput(y, x).b() += pixel.b() * contribution; + perCameraOutput(y, x).a() += contribution; } } } - /*LaplacianPyramid pyramid(panoramaSize.first, panoramaSize.second); + LaplacianPyramid pyramid(panoramaSize.first, panoramaSize.second); pyramid.process(perCameraOutput); blending_pyramid.blend(pyramid); - image::Image output; - pyramid.rebuild(output); - + /*image::Image output; + pyramid.rebuild(output);*/ + /*blending_pyramid.rebuild(imageOut); + char filename[512]; - sprintf(filename, "%s%d_old.png", outputPanorama.c_str(), imageIndex); - image::writeImage(filename, perCameraOutput, image::EImageColorSpace::AUTO); sprintf(filename, "%s%d.png", outputPanorama.c_str(), imageIndex); - image::writeImage(filename, output, image::EImageColorSpace::AUTO);*/ + image::writeImage(filename, imageOut, image::EImageColorSpace::AUTO);*/ } - //blending_pyramid.rebuild(output); + blending_pyramid.rebuild(imageOut); - for(int y = 0; y < output.Height(); ++y) + for(int y = 0; y < imageOut.Height(); ++y) { - for(int x = 0; x < output.Width(); ++x) + for(int x = 0; x < imageOut.Width(); ++x) { - image::RGBAfColor& pixel = output(y, x); + image::RGBAfColor& pixel = imageOut(y, x); if(pixel.a() > std::numeric_limits::epsilon()) { pixel.r() /= pixel.a(); @@ -721,7 +741,7 @@ int main(int argc, char **argv) } } - image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::LINEAR); + image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::SRGB); return EXIT_SUCCESS; } \ No newline at end of file From b8c299c5dd37dab543acac1f87763cd186045346 Mon Sep 17 00:00:00 2001 From: fabien servant Date: Wed, 9 Oct 2019 12:43:01 +0200 Subject: [PATCH 069/174] Still working on laplacian blending (wip) --- .../pipeline/main_panoramaStitching.cpp | 131 +++++++++++++----- 1 file changed, 93 insertions(+), 38 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 538c2214f3..7cb293f139 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -41,21 +41,23 @@ class LaplacianPyramid { _width_base(width_base), _height_base(height_base) { - _kernel = oiio::ImageBufAlgo::make_kernel("gaussian", 11.0f, 11.0f); - _kernel_alphas = oiio::ImageBufAlgo::make_kernel("gaussian", 21.0f, 21.0f); + _kernel = oiio::ImageBufAlgo::make_kernel("gaussian", 5.0f, 5.0f); + _kernel_alphas = oiio::ImageBufAlgo::make_kernel("gaussian", 5.0f, 5.0f); size_t min_dim = std::min(_width_base, _height_base); - size_t scales = static_cast(floor(log2(double(min_dim)))); - + size_t scales = static_cast(floor(log2(double(min_dim) / 32.0))); + scales = 2; + + /*Create pyramid*/ size_t new_width = _width_base; size_t new_height = _height_base; for (int i = 0; i < scales; i++) { _difference_buffers.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + _work_buffers_1.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + _work_buffers_2.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + _work_buffers_3.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); _alphas.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - _work_buffers_1.push_back(image::Image(new_width, new_height)); - _work_buffers_2.push_back(image::Image(new_width, new_height)); - _work_buffers_3.push_back(image::Image(new_width, new_height)); new_height /= 2; new_width /= 2; @@ -79,7 +81,6 @@ class LaplacianPyramid { } } } - /* Loop over the levels. @@ -104,25 +105,25 @@ class LaplacianPyramid { /*Low pass alpha*/ oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _alphas_buffer_oiio, _kernel_alphas, false); - + divideByAlpha(_work_buffers_2[lvl]); + /*Subsample alpha*/ oiio::ImageBufAlgo::resample(_alphas_buffer_half_oiio, _work_buffer_2_oiio, false); /*First, low pass the image*/ - oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _work_buffer_1_oiio, _kernel, false); - trimAlpha(_work_buffers_2[lvl]); + oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _work_buffer_1_oiio, _kernel, true); + divideByAlpha(_work_buffers_2[lvl]); //Take into account borders + /*Subsample image*/ oiio::ImageBufAlgo::resample(_work_buffer_1_half_oiio, _work_buffer_2_oiio, false); - trimAlpha(_work_buffers_1[lvl + 1]); /*Upsample back reduced image*/ oiio::ImageBufAlgo::resample(_work_buffer_2_oiio, _work_buffer_1_half_oiio, false); - trimAlpha(_work_buffers_2[lvl]); /*Low pass upsampled image*/ - oiio::ImageBufAlgo::convolve(_work_buffer_3_oiio, _work_buffer_2_oiio, _kernel, true); - trimAlpha(_work_buffers_3[lvl]); + oiio::ImageBufAlgo::convolve(_work_buffer_3_oiio, _work_buffer_2_oiio, _kernel, false); + divideByAlpha(_work_buffers_3[lvl]); //Take into account borders /*Difference*/ substract(_difference_buffers[lvl], _work_buffers_1[lvl], _work_buffers_3[lvl]); @@ -130,6 +131,8 @@ class LaplacianPyramid { _difference_buffers[_difference_buffers.size() - 1] = _work_buffers_1[_work_buffers_1.size() - 1]; + + return true; } @@ -144,23 +147,34 @@ class LaplacianPyramid { oiio::ImageBuf _work_buffer_1_oiio(spec_base, (void*)(_work_buffers_1[lvl].data())); oiio::ImageBuf _work_buffer_2_oiio(spec_base, (void*)(_work_buffers_2[lvl].data())); + oiio::ImageBuf _work_buffer_3_oiio(spec_base, (void*)(_work_buffers_3[lvl].data())); oiio::ImageBuf _work_buffer_1_half_oiio(spec_base_half, (void*)(_work_buffers_1[lvl + 1].data())); oiio::ImageBuf _difference_buffer_half_oiio(spec_base_half, (void*)(_difference_buffers[lvl + 1].data())); /*Upsample back reduced image*/ - oiio::ImageBufAlgo::resample(_work_buffer_1_oiio, _work_buffer_1_half_oiio, false); - trimAlpha(_work_buffers_1[lvl]); + oiio::ImageBufAlgo::resample(_work_buffer_2_oiio, _work_buffer_1_half_oiio, false); /*Low pass upsampled image*/ - oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _work_buffer_1_oiio, _kernel, false); - trimAlpha(_work_buffers_2[lvl]); + oiio::ImageBufAlgo::convolve(_work_buffer_3_oiio, _work_buffer_2_oiio, _kernel, false); + divideByAlpha(_work_buffers_3[lvl]); /*Add with next*/ - add(_work_buffers_1[lvl], _difference_buffers[lvl], _work_buffers_2[lvl]); + add(_work_buffers_1[lvl], _difference_buffers[lvl], _work_buffers_3[lvl]); } - + + output = _work_buffers_1[0]; + const image::Image & test = _difference_buffers[_difference_buffers.size() - 1]; + for (int i = 0; i < test.Height(); i++) { + for (int j = 0; j < test.Width(); j++) { + + if (test(i ,j).r() < 0.0 || test(i, j).g() < 0.0 || test(i, j).b() < 0.0) { + std::cout << "???" << std::endl; + } + } + } + return true; } @@ -185,7 +199,7 @@ class LaplacianPyramid { const image::RGBAfColor & pix_second = img_second(i, j); const image::RGBAfColor & pix_alpha = alpha(i, j); - double weight = 0.5f; + double weight = 1.0f;//pix_alpha.r(); if (pix_second.a() == 0.0) { continue; @@ -195,7 +209,7 @@ class LaplacianPyramid { pix_first.r() = pix_second.r(); pix_first.g() = pix_second.g(); pix_first.b() = pix_second.b(); - pix_first.a() = pix_second.a(); + pix_first.a() = 1.0f; continue; } @@ -217,7 +231,7 @@ class LaplacianPyramid { for(int y = 0; y < image.Height(); ++y) { for(int x = 0; x < image.Width(); ++x) { image::RGBAfColor& pixel = image(y, x); - if(pixel.a() > 0.01f) { + if(pixel.a() > std::numeric_limits::epsilon()) { pixel = pixel / pixel.a(); } } @@ -229,7 +243,8 @@ class LaplacianPyramid { for(int y = 0; y < image.Height(); ++y) { for(int x = 0; x < image.Width(); ++x) { image::RGBAfColor& pixel = image(y, x); - if(pixel.a() != 1.0f) { + + if(std::abs(pixel.a() - 1.0f) > 1e-02) { pixel.r() = 0.0; pixel.g() = 0.0; pixel.b() = 0.0; @@ -259,7 +274,7 @@ class LaplacianPyramid { const image::RGBAfColor & pixelB = B(y, x); image::RGBAfColor & pixelR = result(y, x); - if(pixelA.a() > std::numeric_limits::epsilon()) { + if ((pixelA.a() > std::numeric_limits::epsilon()) && (pixelB.a() > std::numeric_limits::epsilon())) { pixelR.r() = pixelA.r() - pixelB.r(); pixelR.g() = pixelA.g() - pixelB.g(); pixelR.b() = pixelA.b() - pixelB.b(); @@ -284,11 +299,11 @@ class LaplacianPyramid { const image::RGBAfColor & pixelB = B(y, x); image::RGBAfColor & pixelR = result(y, x); - if(pixelA.a() > std::numeric_limits::epsilon()) { + if(pixelA.a() > std::numeric_limits::epsilon() && pixelB.a() > std::numeric_limits::epsilon()) { pixelR.r() = pixelA.r() + pixelB.r(); pixelR.g() = pixelA.g() + pixelB.g(); pixelR.b() = pixelA.b() + pixelB.b(); - pixelR.a() = 1.0f; + pixelR.a() = 1.0f; } else { pixelR.r() = 0.0f; @@ -505,6 +520,7 @@ int main(int argc, char **argv) float transitionSize = 1000.0f; int debugSubsetStart = 0; int debugSubsetSize = 0; + char filename[512]; po::options_description allParams( "Perform panorama stiching of cameras around a nodal point for 360° panorama creation.\n" @@ -606,7 +622,6 @@ int main(int argc, char **argv) ALICEVISION_LOG_INFO("Output panorama size: " << panoramaSize.first << ", " << panoramaSize.second); // Create panorama buffer - LaplacianPyramid blending_pyramid(panoramaSize.first, panoramaSize.second); image::Image output; image::Image imageOut(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); @@ -648,7 +663,12 @@ int main(int argc, char **argv) // Image RGB image::Image imageIn; ALICEVISION_LOG_INFO("Reading " << imagePath); + //sprintf(filename, "%s.png", imagePath.c_str()); image::readImage(imagePath, imageIn, image::EImageColorSpace::LINEAR); + + + + //image::writeImage(filename, imageIn, image::EImageColorSpace::AUTO); ALICEVISION_LOG_INFO(" - " << imageIn.Width() << "x" << imageIn.Height()); const float maxRadius = std::min(imageIn.Width(), imageIn.Height()) * 0.5f * (1.0 - fisheyeMaskingMargin); @@ -676,7 +696,7 @@ int main(int argc, char **argv) // unit vector to camera const Vec2 pix_disto = intrinsic.project(camPose.getTransform(), ray, true); - if( pix_disto(0) < 0 || pix_disto(0) >= imageIn.Width() || pix_disto(1) < 0 || pix_disto(1) >= imageIn.Height()) + if( pix_disto(0) < 0 || pix_disto(0) >= imageIn.Width() - 1 || pix_disto(1) < 0 || pix_disto(1) >= imageIn.Height() - 1) { // the pixel is out of the image continue; @@ -700,6 +720,7 @@ int main(int argc, char **argv) const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); if(contribution > 0.0f) { + //contribution = 1.0f; perCameraOutput(y, x).r() += pixel.r() * contribution; perCameraOutput(y, x).g() += pixel.g() * contribution; perCameraOutput(y, x).b() += pixel.b() * contribution; @@ -711,20 +732,54 @@ int main(int argc, char **argv) LaplacianPyramid pyramid(panoramaSize.first, panoramaSize.second); pyramid.process(perCameraOutput); + + + + LaplacianPyramid blending_pyramid(panoramaSize.first, panoramaSize.second); + blending_pyramid.process(imageOut); blending_pyramid.blend(pyramid); - /*image::Image output; - pyramid.rebuild(output);*/ + /*for (int i = 0; i < pyramid._difference_buffers.size(); i++) { + char filename[512]; + sprintf(filename, "%s_differences_%d_%d.exr", outputPanorama.c_str(), imageIndex, i); + image::writeImage(filename, pyramid._difference_buffers[i], image::EImageColorSpace::LINEAR); + }*/ + + //image::Image output; + //pyramid.rebuild(output); - /*blending_pyramid.rebuild(imageOut); + blending_pyramid.rebuild(imageOut); - char filename[512]; - sprintf(filename, "%s%d.png", outputPanorama.c_str(), imageIndex); - image::writeImage(filename, imageOut, image::EImageColorSpace::AUTO);*/ + for(int y = 0; y < imageOut.Height(); ++y) + { + for(int x = 0; x < imageOut.Width(); ++x) + { + image::RGBAfColor& pixel = imageOut(y, x); + if(pixel.a() > std::numeric_limits::epsilon()) + { + pixel.r() /= pixel.a(); + pixel.g() /= pixel.a(); + pixel.b() /= pixel.a(); + + /*pixel.r() = std::min(1.0f, std::max(0.0f, pixel.r())); + pixel.g() = std::min(1.0f, std::max(0.0f, pixel.g())); + pixel.b() = std::min(1.0f, std::max(0.0f, pixel.b()));*/ + + pixel.a() = 1.0f; // TMP: comment to keep the alpha with the number of contribution for debugging + } + } + } + + + sprintf(filename, "%s_rebuild_%d.exr", outputPanorama.c_str(), imageIndex); + image::writeImage(filename, perCameraOutput, image::EImageColorSpace::SRGB); + + sprintf(filename, "%s_blend_%d.exr", outputPanorama.c_str(), imageIndex); + image::writeImage(filename, imageOut, image::EImageColorSpace::SRGB); } - blending_pyramid.rebuild(imageOut); + /*blending_pyramid.rebuild(imageOut); for(int y = 0; y < imageOut.Height(); ++y) { @@ -741,7 +796,7 @@ int main(int argc, char **argv) } } - image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::SRGB); + image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::LINEAR);*/ return EXIT_SUCCESS; } \ No newline at end of file From 6179a4381b48af2ee501dd16e5084ea8b0810320 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Fri, 11 Oct 2019 15:31:28 +0200 Subject: [PATCH 070/174] rescratch laplacian --- .../pipeline/main_panoramaStitching.cpp | 514 +++++++++--------- 1 file changed, 269 insertions(+), 245 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 7cb293f139..162c132a88 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -41,23 +41,20 @@ class LaplacianPyramid { _width_base(width_base), _height_base(height_base) { - _kernel = oiio::ImageBufAlgo::make_kernel("gaussian", 5.0f, 5.0f); - _kernel_alphas = oiio::ImageBufAlgo::make_kernel("gaussian", 5.0f, 5.0f); - size_t min_dim = std::min(_width_base, _height_base); - size_t scales = static_cast(floor(log2(double(min_dim) / 32.0))); - scales = 2; + _scales = static_cast(floor(log2(double(min_dim) / 32.0))); /*Create pyramid*/ size_t new_width = _width_base; size_t new_height = _height_base; - for (int i = 0; i < scales; i++) { + for (int i = 0; i < _scales; i++) { - _difference_buffers.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - _work_buffers_1.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - _work_buffers_2.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - _work_buffers_3.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - _alphas.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + _pyramid.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + _differences.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + _differences_full.push_back(image::Image(_width_base, _height_base, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + _buffer_1.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + _buffer_2.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); + new_height /= 2; new_width /= 2; @@ -66,268 +63,208 @@ class LaplacianPyramid { bool process(const image::Image & input) { - - _work_buffers_1[0] = input; - - for (int i = 0; i < input.Height(); i++) { - for (int j = 0; j < input.Width(); j++) { - _alphas[0](i, j).r() = input(i, j).a(); - _alphas[0](i, j).g() = input(i, j).a(); - _alphas[0](i, j).b() = input(i, j).a(); - _alphas[0](i, j).a() = 1.0; + //Build pyramid + _pyramid[0] = input; + for (int lvl = 1; lvl < _scales; lvl++) { + downscale(_pyramid[lvl], _pyramid[lvl - 1]); + } - if (_work_buffers_1[0](i, j).a() > std::numeric_limits::epsilon()) { - _work_buffers_1[0](i, j).a() = 1.0f; - } - } + //Differences + for (int lvl = 0; lvl < _scales - 1; lvl++) { + upscale(_buffer_1[lvl], _pyramid[lvl + 1]); + substract(_differences[lvl], _pyramid[lvl], _buffer_1[lvl]); } - /* - Loop over the levels. - 0 being the base level with full resolution - levels.size() - 1 being the least defined image - */ - for (int lvl = 0; lvl < _difference_buffers.size() - 1; lvl++) { - - oiio::ImageSpec spec_base(_difference_buffers[lvl].Width(), _difference_buffers[lvl].Height(), 4, oiio::TypeDesc::FLOAT); - oiio::ImageSpec spec_base_half(_difference_buffers[lvl + 1].Width(), _difference_buffers[lvl + 1].Height(), 4, oiio::TypeDesc::FLOAT); - - oiio::ImageBuf _difference_buffer_oiio(spec_base, (void*)(_difference_buffers[lvl].data())); - oiio::ImageBuf _work_buffer_1_oiio(spec_base, (void*)(_work_buffers_1[lvl].data())); - oiio::ImageBuf _work_buffer_2_oiio(spec_base, (void*)(_work_buffers_2[lvl].data())); - oiio::ImageBuf _work_buffer_3_oiio(spec_base, (void*)(_work_buffers_3[lvl].data())); - oiio::ImageBuf _alphas_buffer_oiio(spec_base, (void*)(_alphas[lvl].data())); - oiio::ImageBuf _difference_buffer_half_oiio(spec_base_half, (void*)(_difference_buffers[lvl + 1].data())); - oiio::ImageBuf _work_buffer_1_half_oiio(spec_base_half, (void*)(_work_buffers_1[lvl + 1].data())); - oiio::ImageBuf _work_buffer_2_half_oiio(spec_base_half, (void*)(_work_buffers_2[lvl + 1].data())); - oiio::ImageBuf _alphas_buffer_half_oiio(spec_base_half, (void*)(_alphas[lvl + 1].data())); - - - /*Low pass alpha*/ - oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _alphas_buffer_oiio, _kernel_alphas, false); - divideByAlpha(_work_buffers_2[lvl]); - - /*Subsample alpha*/ - oiio::ImageBufAlgo::resample(_alphas_buffer_half_oiio, _work_buffer_2_oiio, false); + _differences[_differences.size() - 1] = _pyramid[_differences.size() - 1]; - /*First, low pass the image*/ - oiio::ImageBufAlgo::convolve(_work_buffer_2_oiio, _work_buffer_1_oiio, _kernel, true); - divideByAlpha(_work_buffers_2[lvl]); //Take into account borders + /*Upscale to the max*/ + for (int max_level = 0; max_level < _scales; max_level++) { + image::Image & refOut = _differences_full[max_level]; - /*Subsample image*/ - oiio::ImageBufAlgo::resample(_work_buffer_1_half_oiio, _work_buffer_2_oiio, false); - - /*Upsample back reduced image*/ - oiio::ImageBufAlgo::resample(_work_buffer_2_oiio, _work_buffer_1_half_oiio, false); - - /*Low pass upsampled image*/ - oiio::ImageBufAlgo::convolve(_work_buffer_3_oiio, _work_buffer_2_oiio, _kernel, false); - divideByAlpha(_work_buffers_3[lvl]); //Take into account borders + _buffer_1[max_level] = _differences[max_level]; + for (int lvl = max_level - 1; lvl >= 0; lvl--) { + upscale(_buffer_1[lvl], _buffer_1[lvl + 1]); + } + refOut = _buffer_1[0]; - /*Difference*/ - substract(_difference_buffers[lvl], _work_buffers_1[lvl], _work_buffers_3[lvl]); + + for (int i = 0; i < refOut.Height(); i++) { + for (int j = 0; j < refOut.Width(); j++) { + if (!(input(i, j).a() > std::numeric_limits::epsilon())) { + refOut(i, j).r() = 0.0f; + refOut(i, j).g() = 0.0f; + refOut(i, j).b() = 0.0f; + refOut(i, j).a() = 0.0f; + } + } + } } - _difference_buffers[_difference_buffers.size() - 1] = _work_buffers_1[_work_buffers_1.size() - 1]; - - - return true; } bool rebuild(image::Image & output) { - _work_buffers_1[_difference_buffers.size() - 1] = _difference_buffers[_difference_buffers.size() - 1]; - - for (int lvl = _difference_buffers.size() - 2; lvl >= 0; lvl--) { - - oiio::ImageSpec spec_base(_difference_buffers[lvl].Width(), _difference_buffers[lvl].Height(), 4, oiio::TypeDesc::FLOAT); - oiio::ImageSpec spec_base_half(_difference_buffers[lvl + 1].Width(), _difference_buffers[lvl + 1].Height(), 4, oiio::TypeDesc::FLOAT); + output = _differences_full[_scales - 1]; + for (int lvl = _scales - 2; lvl >= 0; lvl--) { + add(output, output, _differences_full[lvl]); + } - oiio::ImageBuf _work_buffer_1_oiio(spec_base, (void*)(_work_buffers_1[lvl].data())); - oiio::ImageBuf _work_buffer_2_oiio(spec_base, (void*)(_work_buffers_2[lvl].data())); - oiio::ImageBuf _work_buffer_3_oiio(spec_base, (void*)(_work_buffers_3[lvl].data())); - oiio::ImageBuf _work_buffer_1_half_oiio(spec_base_half, (void*)(_work_buffers_1[lvl + 1].data())); - oiio::ImageBuf _difference_buffer_half_oiio(spec_base_half, (void*)(_difference_buffers[lvl + 1].data())); + return true; + } - /*Upsample back reduced image*/ - oiio::ImageBufAlgo::resample(_work_buffer_2_oiio, _work_buffer_1_half_oiio, false); + bool blend(const LaplacianPyramid & other, const image::Image & weightMap) { - /*Low pass upsampled image*/ - oiio::ImageBufAlgo::convolve(_work_buffer_3_oiio, _work_buffer_2_oiio, _kernel, false); - divideByAlpha(_work_buffers_3[lvl]); + for (int lvl = 0; lvl < _scales; lvl++) { - /*Add with next*/ - add(_work_buffers_1[lvl], _difference_buffers[lvl], _work_buffers_3[lvl]); + blend(_differences_full[lvl], _differences_full[lvl], other._differences_full[lvl], weightMap); } - - - output = _work_buffers_1[0]; - - const image::Image & test = _difference_buffers[_difference_buffers.size() - 1]; - for (int i = 0; i < test.Height(); i++) { - for (int j = 0; j < test.Width(); j++) { - - if (test(i ,j).r() < 0.0 || test(i, j).g() < 0.0 || test(i, j).b() < 0.0) { - std::cout << "???" << std::endl; - } - } - } return true; } + + bool downscale(image::Image & output, const image::Image & input) { - bool blend(const LaplacianPyramid & other) { + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { - if (_difference_buffers.size() != other._difference_buffers.size()) { - return false; + output(i, j) = input(i * 2, j * 2); + } } - for (size_t lvl = 0; lvl < _difference_buffers.size(); lvl++) { + return true; + } - //_difference_buffers[lvl].fill(image::RGBAfColor(0.0f,0.0f,0.0f,0.0f)); + bool upscale(image::Image & output, const image::Image & input) { - image::Image & img_first = _difference_buffers[lvl]; - const image::Image & img_second = other._difference_buffers[lvl]; - const image::Image & alpha = other._alphas[lvl]; - - for (int i = 0; i < img_first.Height(); i++) { - for (int j = 0; j < img_first.Width(); j++) { - - image::RGBAfColor & pix_first = img_first(i, j); - const image::RGBAfColor & pix_second = img_second(i, j); - const image::RGBAfColor & pix_alpha = alpha(i, j); + int width = output.Width(); + int height = output.Height(); - double weight = 1.0f;//pix_alpha.r(); - - if (pix_second.a() == 0.0) { - continue; - } - - if (pix_first.a() == 0.0) { - pix_first.r() = pix_second.r(); - pix_first.g() = pix_second.g(); - pix_first.b() = pix_second.b(); - pix_first.a() = 1.0f; - continue; - } + width = 2 * (width / 2); + height = 2 * (height / 2); - double mweight = 1.0 - weight; - pix_first.r() = mweight * pix_first.r() + weight * pix_second.r(); - pix_first.g() = mweight * pix_first.g() + weight * pix_second.g(); - pix_first.b() = mweight * pix_first.b() + weight * pix_second.b(); - pix_first.a() = 1.0f; - } + for (int i = 0; i < height; i++) { + for (int j = 0; j < width; j++) { + + output(i, j) = input(i / 2, j / 2); } } return true; } - void divideByAlpha(image::Image & image) { + bool blend(image::Image & output, const image::Image & inputA, const image::Image & inputB, const image::Image & weightMap) { - for(int y = 0; y < image.Height(); ++y) { - for(int x = 0; x < image.Width(); ++x) { - image::RGBAfColor& pixel = image(y, x); - if(pixel.a() > std::numeric_limits::epsilon()) { - pixel = pixel / pixel.a(); - } - } - } - } + - void trimAlpha(image::Image & image) { + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { - for(int y = 0; y < image.Height(); ++y) { - for(int x = 0; x < image.Width(); ++x) { - image::RGBAfColor& pixel = image(y, x); - - if(std::abs(pixel.a() - 1.0f) > 1e-02) { - pixel.r() = 0.0; - pixel.g() = 0.0; - pixel.b() = 0.0; - pixel.a() = 0.0; - } - } - } - } + float weight = weightMap(i, j); + float mweight = 1.0f - weight; + + + + const image::RGBAfColor & pixA = inputA(i, j); + const image::RGBAfColor & pixB = inputB(i, j); + + if (pixA.a() > std::numeric_limits::epsilon()) { - void copyMask(image::Image & dst, const image::Image & src) { + if (pixB.a() > std::numeric_limits::epsilon()) { + output(i, j).r() = mweight * pixA.r() + weight * pixB.r(); + output(i, j).g() = mweight * pixA.g() + weight * pixB.g(); + output(i, j).b() = mweight * pixA.b() + weight * pixB.b(); + output(i, j).a() = 1.0f; + } + else { + output(i, j) = pixA; + } + } + else { + if (pixB.a() > std::numeric_limits::epsilon()) { + output(i, j) = pixB; + } + else { + output(i, j).r() = 0.0f; + output(i, j).g() = 0.0f; + output(i, j).b() = 0.0f; + output(i, j).a() = 0.0f; + } + } - for(int y = 0; y < src.Height(); ++y) { - for(int x = 0; x < src.Width(); ++x) { - const image::RGBAfColor & pixelSrc = src(y, x); - image::RGBAfColor & pixelDst = dst(y, x); - pixelDst = pixelDst * pixelSrc.a(); } } + + return true; } - void substract(image::Image & result, const image::Image & A, const image::Image & B) { + bool substract(image::Image & output, const image::Image & inputA, const image::Image & inputB) { - for(int y = 0; y < A.Height(); ++y) { - for(int x = 0; x < A.Width(); ++x) { - const image::RGBAfColor & pixelA = A(y, x); - const image::RGBAfColor & pixelB = B(y, x); - image::RGBAfColor & pixelR = result(y, x); - - if ((pixelA.a() > std::numeric_limits::epsilon()) && (pixelB.a() > std::numeric_limits::epsilon())) { - pixelR.r() = pixelA.r() - pixelB.r(); - pixelR.g() = pixelA.g() - pixelB.g(); - pixelR.b() = pixelA.b() - pixelB.b(); - pixelR.a() = 1.0f; - } - else { - pixelR.r() = 0.0f; - pixelR.g() = 0.0f; - pixelR.b() = 0.0f; - pixelR.a() = 0.0f; + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + output(i, j).r() = inputA(i, j).r() - inputB(i, j).r(); + output(i, j).g() = inputA(i, j).g() - inputB(i, j).g(); + output(i, j).b() = inputA(i, j).b() - inputB(i, j).b(); + output(i, j).a() = 1.0f; + + if (!(inputA(i, j).a() > std::numeric_limits::epsilon())) { + output(i, j).r() = 0.0f; + output(i, j).g() = 0.0f; + output(i, j).b() = 0.0f; + output(i, j).a() = 0.0f; } } } + + return true; } + bool add(image::Image & output, const image::Image & inputA, const image::Image & inputB) { - void add(image::Image & result, const image::Image & A, const image::Image & B) { + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { - for(int y = 0; y < A.Height(); ++y) { - for(int x = 0; x < A.Width(); ++x) { - const image::RGBAfColor & pixelA = A(y, x); - const image::RGBAfColor & pixelB = B(y, x); - image::RGBAfColor & pixelR = result(y, x); - - if(pixelA.a() > std::numeric_limits::epsilon() && pixelB.a() > std::numeric_limits::epsilon()) { - pixelR.r() = pixelA.r() + pixelB.r(); - pixelR.g() = pixelA.g() + pixelB.g(); - pixelR.b() = pixelA.b() + pixelB.b(); - pixelR.a() = 1.0f; + + if (inputA(i, j).a() > std::numeric_limits::epsilon()) { + if (inputB(i, j).a() > std::numeric_limits::epsilon()) { + output(i, j).r() = inputA(i, j).r() + inputB(i, j).r(); + output(i, j).g() = inputA(i, j).g() + inputB(i, j).g(); + output(i, j).b() = inputA(i, j).b() + inputB(i, j).b(); + output(i, j).a() = 1.0f; + } + else { + output(i, j).r() = inputA(i, j).r(); + output(i, j).g() = inputA(i, j).g(); + output(i, j).b() = inputA(i, j).b(); + output(i, j).a() = 1.0f; + } } else { - pixelR.r() = 0.0f; - pixelR.g() = 0.0f; - pixelR.b() = 0.0f; - pixelR.a() = 0.0f; + output(i, j).r() = 0.0f; + output(i, j).g() = 0.0f; + output(i, j).b() = 0.0f; + output(i, j).a() = 0.0f; } } } - } - + return true; + } public: + std::vector> _pyramid; + std::vector> _differences; + std::vector> _differences_full; + std::vector> _buffer_1; + std::vector> _buffer_2; + size_t _width_base; size_t _height_base; - oiio::ImageBuf _kernel; - oiio::ImageBuf _kernel_alphas; - - std::vector> _difference_buffers; - std::vector> _alphas; - std::vector> _work_buffers_1; - std::vector> _work_buffers_2; - std::vector> _work_buffers_3; + size_t _scales; }; float sigmoid(float x, float sigwidth, float sigMid) @@ -356,6 +293,97 @@ Eigen::Matrix3d rotationBetweenVectors(const Vec3 & v1, const Vec3 & v2) { return Eigen::Matrix3d::Identity() + sinangle * S + (1 - cosangle) * S * S; } +bool computeDistanceMap(image::Image & distance, const image::Image & imageWithMask) { + + int m = imageWithMask.Height(); + int n = imageWithMask.Width(); + + int maxval = m * n; + + distance = image::Image (n, m, false); + for(int x = 0; x < n; ++x) { + + //A corner is when mask becomes 0 + bool b = (imageWithMask(0, x).a() > std::numeric_limits::epsilon()) ? false : true; + if (b) { + distance(0, x) = 0; + } + else { + distance(0, x) = maxval * maxval; + } + + for (int y = 1; y < m; y++) { + bool b = (imageWithMask(y, x).a() > std::numeric_limits::epsilon()) ? false : true; + if (b) { + distance(y, x) = 0; + } + else { + distance(y, x) = 1 + distance(y - 1, x); + } + } + + for (int y = m - 2; y >= 0; y--) { + if (distance(y + 1, x) < distance(y, x)) { + distance(y, x) = 1 + distance(y + 1, x); + } + } + } + + for (int y = 0; y < m; y++) { + int q; + std::map s; + std::map t; + + q = 0; + s[0] = 0; + t[0] = 0; + + std::function f = [distance, y](int x, int i) { + int gi = distance(y, i); + return (x - i)*(x - i) + gi * gi; + }; + + std::function sep = [distance, y](int i, int u) { + int gu = distance(y, u); + int gi = distance(y, i); + + int nom = (u * u) - (i * i) + (gu * gu) - (gi * gi); + int denom = 2 * (u - i); + + return nom / denom; + }; + + for (int u = 1; u < n; u++) { + + while (q >= 0 && (f(t[q], s[q]) > f(t[q], u))) { + q = q - 1; + } + + if (q < 0) { + q = 0; + s[0] = u; + } + else { + int w = 1 + sep(s[q], u); + if (w < n) { + q = q + 1; + s[q] = u; + t[q] = w; + } + } + } + + for (int u = n - 1; u >= 0; u--) { + distance(y, u) = f(u, s[q]); + if (u == t[q]) { + q = q - 1; + } + } + } + + return true; +} + /** * @brief Function to map equirectangular coordinates onto a world unit vector according a spherical projection */ @@ -653,7 +681,7 @@ int main(int argc, char **argv) } } ++imageIndex; - + //if (imageIndex != 0 && imageIndex != 4 && imageIndex != 8) continue; const sfmData::CameraPose camPose = sfmData.getPose(view); @@ -684,6 +712,9 @@ int main(int argc, char **argv) { for(int x = 0; x < panoramaSize.first; ++x) { + //if (x < 596 || x > 643) continue; + //if (y < 255 || y > 285) continue; + // equirectangular to unit vector Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), panoramaSize.first, panoramaSize.second); @@ -720,7 +751,7 @@ int main(int argc, char **argv) const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); if(contribution > 0.0f) { - //contribution = 1.0f; + contribution = 1.0f; perCameraOutput(y, x).r() += pixel.r() * contribution; perCameraOutput(y, x).g() += pixel.g() * contribution; perCameraOutput(y, x).b() += pixel.b() * contribution; @@ -729,53 +760,46 @@ int main(int argc, char **argv) } } - - LaplacianPyramid pyramid(panoramaSize.first, panoramaSize.second); - pyramid.process(perCameraOutput); - - LaplacianPyramid blending_pyramid(panoramaSize.first, panoramaSize.second); - blending_pyramid.process(imageOut); - blending_pyramid.blend(pyramid); - - /*for (int i = 0; i < pyramid._difference_buffers.size(); i++) { - char filename[512]; - sprintf(filename, "%s_differences_%d_%d.exr", outputPanorama.c_str(), imageIndex, i); - image::writeImage(filename, pyramid._difference_buffers[i], image::EImageColorSpace::LINEAR); - }*/ - - //image::Image output; - //pyramid.rebuild(output); + image::Image distanceMap; + image::Image weightMap(perCameraOutput.Width(), perCameraOutput.Height()); + computeDistanceMap(distanceMap, perCameraOutput); - blending_pyramid.rebuild(imageOut); - - for(int y = 0; y < imageOut.Height(); ++y) + for(int y = 0; y < perCameraOutput.Height(); ++y) { - for(int x = 0; x < imageOut.Width(); ++x) - { - image::RGBAfColor& pixel = imageOut(y, x); - if(pixel.a() > std::numeric_limits::epsilon()) - { - pixel.r() /= pixel.a(); - pixel.g() /= pixel.a(); - pixel.b() /= pixel.a(); + for(int x = 0; x < perCameraOutput.Width(); ++x) + { + int dist = distanceMap(y, x); - /*pixel.r() = std::min(1.0f, std::max(0.0f, pixel.r())); - pixel.g() = std::min(1.0f, std::max(0.0f, pixel.g())); - pixel.b() = std::min(1.0f, std::max(0.0f, pixel.b()));*/ - - pixel.a() = 1.0f; // TMP: comment to keep the alpha with the number of contribution for debugging + weightMap(y, x) = 0.0f; + if (dist > 0) + { + float fdist = sqrtf(float(dist)); + weightMap(y, x) = 1.0f - sigmoid(fdist, 100, 50); } } } + LaplacianPyramid pyramid(panoramaSize.first, panoramaSize.second); + pyramid.process(perCameraOutput); - sprintf(filename, "%s_rebuild_%d.exr", outputPanorama.c_str(), imageIndex); - image::writeImage(filename, perCameraOutput, image::EImageColorSpace::SRGB); + LaplacianPyramid blending_pyramid(panoramaSize.first, panoramaSize.second); + blending_pyramid.process(imageOut); + blending_pyramid.blend(pyramid, weightMap); + blending_pyramid.rebuild(imageOut); + sprintf(filename, "%s_blend_%d.exr", outputPanorama.c_str(), imageIndex); - image::writeImage(filename, imageOut, image::EImageColorSpace::SRGB); + image::writeImage(filename, imageOut, image::EImageColorSpace::NO_CONVERSION); + + /*for (int i = 0; i < pyramid._scales; i++) { + sprintf(filename, "%s_diff_%d_%d.exr", outputPanorama.c_str(), imageIndex, i); + image::writeImage(filename, pyramid._differences_full[i], image::EImageColorSpace::NO_CONVERSION); + } + + sprintf(filename, "%s_cam_%d.exr", outputPanorama.c_str(), imageIndex); + image::writeImage(filename, perCameraOutput, image::EImageColorSpace::NO_CONVERSION);*/ } From 84e86c0fdce8fee40a079b11524df27883d25835 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Mon, 14 Oct 2019 13:11:51 +0200 Subject: [PATCH 071/174] Restart stitching for cleaner code --- src/aliceVision/camera/IntrinsicBase.hpp | 15 + .../pipeline/main_panoramaStitching.cpp | 958 ++++++------------ 2 files changed, 297 insertions(+), 676 deletions(-) diff --git a/src/aliceVision/camera/IntrinsicBase.hpp b/src/aliceVision/camera/IntrinsicBase.hpp index 1663fcf0db..ef17eb47f5 100644 --- a/src/aliceVision/camera/IntrinsicBase.hpp +++ b/src/aliceVision/camera/IntrinsicBase.hpp @@ -351,10 +351,25 @@ struct IntrinsicBase /** * @brief Return true if this ray should be visible in the image + * @param ray input ray to check for visibility * @return true if this ray is visible theorically */ virtual bool isVisibleRay(const Vec3 & ray) const = 0; + /** + * @brief Return true if these pixel coordinates should be visible in the image + * @param pix input pixel coordinates to check for visibility + * @return true if visible + */ + virtual bool isVisible(const Vec2 & pix) const { + + if (pix(0) < 0 || pix(0) >= _w || pix(1) < 0 || pix(1) >= _h) { + return false; + } + + return true; + } + /** * @brief Generate an unique Hash from the camera parameters (used for grouping) * @return Unique Hash from the camera parameters diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 162c132a88..67414535ed 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -1,29 +1,20 @@ -// This file is part of the AliceVision project. -// Copyright (c) 2017 AliceVision contributors. -// Copyright (c) 2012 openMVG contributors. -// This Source Code Form is subject to the terms of the Mozilla Public License, -// v. 2.0. If a copy of the MPL was not distributed with this file, -// You can obtain one at https://mozilla.org/MPL/2.0/. - +/** + * Input and geometry +*/ #include #include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include +/** + * Image stuff + */ +#include -#include -#include -#include +/*Logging stuff*/ +#include -#include +/*Reading command line options*/ +#include +#include // These constants define the current software version. // They must be updated when the command line is changed. @@ -33,559 +24,297 @@ using namespace aliceVision; namespace po = boost::program_options; -namespace fs = boost::filesystem; -class LaplacianPyramid { -public: - LaplacianPyramid(const size_t width_base, const size_t height_base) : - _width_base(width_base), - _height_base(height_base) +namespace SphericalMapping +{ + /** + * Map from equirectangular to spherical coordinates + * @param equirectangular equirectangular coordinates + * @param width number of pixels used to represent longitude + * @param height number of pixels used to represent latitude + * @return spherical coordinates + */ + Vec3 fromEquirectangular(const Vec2 & equirectangular, int width, int height) { - size_t min_dim = std::min(_width_base, _height_base); - _scales = static_cast(floor(log2(double(min_dim) / 32.0))); - - /*Create pyramid*/ - size_t new_width = _width_base; - size_t new_height = _height_base; - for (int i = 0; i < _scales; i++) { - - _pyramid.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - _differences.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - _differences_full.push_back(image::Image(_width_base, _height_base, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - _buffer_1.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - _buffer_2.push_back(image::Image(new_width, new_height, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f))); - - - new_height /= 2; - new_width /= 2; - } - } - - bool process(const image::Image & input) { - - //Build pyramid - _pyramid[0] = input; - for (int lvl = 1; lvl < _scales; lvl++) { - downscale(_pyramid[lvl], _pyramid[lvl - 1]); - } - - //Differences - for (int lvl = 0; lvl < _scales - 1; lvl++) { - upscale(_buffer_1[lvl], _pyramid[lvl + 1]); - substract(_differences[lvl], _pyramid[lvl], _buffer_1[lvl]); - } - - _differences[_differences.size() - 1] = _pyramid[_differences.size() - 1]; - - /*Upscale to the max*/ - for (int max_level = 0; max_level < _scales; max_level++) { - - image::Image & refOut = _differences_full[max_level]; - - _buffer_1[max_level] = _differences[max_level]; - for (int lvl = max_level - 1; lvl >= 0; lvl--) { - upscale(_buffer_1[lvl], _buffer_1[lvl + 1]); - } - refOut = _buffer_1[0]; - - - for (int i = 0; i < refOut.Height(); i++) { - for (int j = 0; j < refOut.Width(); j++) { - if (!(input(i, j).a() > std::numeric_limits::epsilon())) { - refOut(i, j).r() = 0.0f; - refOut(i, j).g() = 0.0f; - refOut(i, j).b() = 0.0f; - refOut(i, j).a() = 0.0f; - } - } - } - } - - return true; - } - - bool rebuild(image::Image & output) { + const double latitude = (equirectangular(1) / double(height)) * M_PI - M_PI_2; + const double longitude = ((equirectangular(0) / double(width)) * 2.0 * M_PI) - M_PI; - output = _differences_full[_scales - 1]; - for (int lvl = _scales - 2; lvl >= 0; lvl--) { - add(output, output, _differences_full[lvl]); - } + const double Px = cos(latitude) * sin(longitude); + const double Py = sin(latitude); + const double Pz = cos(latitude) * cos(longitude); - return true; + return Vec3(Px, Py, Pz); } - bool blend(const LaplacianPyramid & other, const image::Image & weightMap) { + /** + * Map from Spherical to equirectangular coordinates + * @param spherical spherical coordinates + * @param width number of pixels used to represent longitude + * @param height number of pixels used to represent latitude + * @return equirectangular coordinates + */ + Vec2 toEquirectangular(const Vec3 & spherical, int width, int height) { - for (int lvl = 0; lvl < _scales; lvl++) { + double vertical_angle = asin(spherical(1)); + double horizontal_angle = atan2(spherical(0), spherical(2)); - blend(_differences_full[lvl], _differences_full[lvl], other._differences_full[lvl], weightMap); - } + double latitude = ((vertical_angle + M_PI_2) / M_PI) * height; + double longitude = ((horizontal_angle + M_PI) / (2.0 * M_PI)) * width; - return true; + return Vec2(longitude, latitude); } - - bool downscale(image::Image & output, const image::Image & input) { - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { - output(i, j) = input(i * 2, j * 2); - } - } + /** + * Map from Spherical to equirectangular coordinates in radians + * @param spherical spherical coordinates + * @return equirectangular coordinates + */ + Vec2 toLongitudeLatitude(const Vec3 & spherical) { + + double latitude = asin(spherical(1)); + double longitude = atan2(spherical(0), spherical(2)); - return true; + return Vec2(longitude, latitude); } +} - bool upscale(image::Image & output, const image::Image & input) { - - int width = output.Width(); - int height = output.Height(); - - width = 2 * (width / 2); - height = 2 * (height / 2); +class CoordinatesMap { +public: + /** + * Build coordinates map given camera properties + * @param panoramaSize desired output panoramaSize + * @param pose the camera pose wrt an arbitrary reference frame + * @param intrinsics the camera intrinsics + */ + bool build(const std::pair & panoramaSize, const geometry::Pose3 & pose, const aliceVision::camera::IntrinsicBase & intrinsics) { + + aliceVision::image::Image buffer_coordinates(panoramaSize.first, panoramaSize.second, false); + aliceVision::image::Image buffer_mask(panoramaSize.first, panoramaSize.second, true, false); + + size_t max_x = 0; + size_t max_y = 0; + size_t min_x = panoramaSize.first; + size_t min_y = panoramaSize.second; + + for (size_t y = 0; y < panoramaSize.second; y++) { + + for (size_t x = 0; x < panoramaSize.first; x++) { + + Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), panoramaSize.first, panoramaSize.second); + + /** + * Check that this ray should be visible. + * This test is camera type dependent + */ + Vec3 transformedRay = pose(ray); + if (!intrinsics.isVisibleRay(transformedRay)) { + continue; + } + /** + * Project this ray to camera pixel coordinates + */ + const Vec2 pix_disto = intrinsics.project(pose, ray, true); - for (int i = 0; i < height; i++) { - for (int j = 0; j < width; j++) { + /** + * Ignore invalid coordinates + */ + if (!intrinsics.isVisible(pix_disto)) { + continue; + } - output(i, j) = input(i / 2, j / 2); + buffer_coordinates(y, x) = pix_disto; + buffer_mask(y, x) = true; + + min_x = std::min(x, min_x); + min_y = std::min(y, min_y); + max_x = std::max(x, max_x); + max_y = std::max(y, max_y); } } - return true; - } + _offset_x = min_x; + _offset_y = min_y; - bool blend(image::Image & output, const image::Image & inputA, const image::Image & inputB, const image::Image & weightMap) { + _real_width = max_x - min_x + 1; + _real_height = max_y - min_y + 1; - - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { + /* Make sure the buffer is a power of 2 for potential pyramids*/ + size_t rectified_width = pow(2.0, ceil(log2(double(_real_width)))); + size_t rectified_height = pow(2.0, ceil(log2(double(_real_height)))); - float weight = weightMap(i, j); - float mweight = 1.0f - weight; - - + /* Resize buffers */ + _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); + _mask = aliceVision::image::Image(rectified_width, rectified_height, true, false); - const image::RGBAfColor & pixA = inputA(i, j); - const image::RGBAfColor & pixB = inputB(i, j); + for (int i = 0; i < _real_height; i++) { + for (int j = 0; j < _real_width; j++) { - if (pixA.a() > std::numeric_limits::epsilon()) { - - if (pixB.a() > std::numeric_limits::epsilon()) { - output(i, j).r() = mweight * pixA.r() + weight * pixB.r(); - output(i, j).g() = mweight * pixA.g() + weight * pixB.g(); - output(i, j).b() = mweight * pixA.b() + weight * pixB.b(); - output(i, j).a() = 1.0f; - } - else { - output(i, j) = pixA; - } - } - else { - if (pixB.a() > std::numeric_limits::epsilon()) { - output(i, j) = pixB; - } - else { - output(i, j).r() = 0.0f; - output(i, j).g() = 0.0f; - output(i, j).b() = 0.0f; - output(i, j).a() = 0.0f; - } - } - - + _coordinates(i, j) = buffer_coordinates(_offset_y + i, _offset_x + j); + _mask(i, j) = buffer_mask(_offset_y + i, _offset_x + j); } } return true; } - bool substract(image::Image & output, const image::Image & inputA, const image::Image & inputB) { - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { - - output(i, j).r() = inputA(i, j).r() - inputB(i, j).r(); - output(i, j).g() = inputA(i, j).g() - inputB(i, j).g(); - output(i, j).b() = inputA(i, j).b() - inputB(i, j).b(); - output(i, j).a() = 1.0f; - - if (!(inputA(i, j).a() > std::numeric_limits::epsilon())) { - output(i, j).r() = 0.0f; - output(i, j).g() = 0.0f; - output(i, j).b() = 0.0f; - output(i, j).a() = 0.0f; - } - } - } - - return true; + size_t getOffsetX() const { + return _offset_x; } - bool add(image::Image & output, const image::Image & inputA, const image::Image & inputB) { - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { - - - if (inputA(i, j).a() > std::numeric_limits::epsilon()) { - if (inputB(i, j).a() > std::numeric_limits::epsilon()) { - output(i, j).r() = inputA(i, j).r() + inputB(i, j).r(); - output(i, j).g() = inputA(i, j).g() + inputB(i, j).g(); - output(i, j).b() = inputA(i, j).b() + inputB(i, j).b(); - output(i, j).a() = 1.0f; - } - else { - output(i, j).r() = inputA(i, j).r(); - output(i, j).g() = inputA(i, j).g(); - output(i, j).b() = inputA(i, j).b(); - output(i, j).a() = 1.0f; - } - } - else { - output(i, j).r() = 0.0f; - output(i, j).g() = 0.0f; - output(i, j).b() = 0.0f; - output(i, j).a() = 0.0f; - } - } - } - - return true; + size_t getOffsetY() const { + return _offset_y; } -public: - std::vector> _pyramid; - std::vector> _differences; - std::vector> _differences_full; - std::vector> _buffer_1; - std::vector> _buffer_2; - - size_t _width_base; - size_t _height_base; - size_t _scales; -}; - -float sigmoid(float x, float sigwidth, float sigMid) -{ - return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); -} - -Eigen::Matrix3d rotationBetweenVectors(const Vec3 & v1, const Vec3 & v2) { - - const Vec3 v1n = v1.normalized(); - const Vec3 v2n = v2.normalized(); - const Vec3 cr = v1n.cross(v2n); - const Vec3 axis = cr.normalized(); - - double cosangle = v1n.dot(v2n); - double sinangle = cr.norm(); - - Eigen::Matrix3d S = Eigen::Matrix3d::Zero(); - S(0, 1) = -axis(2); - S(0, 2) = axis(1); - S(1, 0) = axis(2); - S(1, 2) = -axis(0); - S(2, 0) = -axis(1); - S(2, 1) = axis(0); - - return Eigen::Matrix3d::Identity() + sinangle * S + (1 - cosangle) * S * S; -} - -bool computeDistanceMap(image::Image & distance, const image::Image & imageWithMask) { - - int m = imageWithMask.Height(); - int n = imageWithMask.Width(); - - int maxval = m * n; - - distance = image::Image (n, m, false); - for(int x = 0; x < n; ++x) { - - //A corner is when mask becomes 0 - bool b = (imageWithMask(0, x).a() > std::numeric_limits::epsilon()) ? false : true; - if (b) { - distance(0, x) = 0; - } - else { - distance(0, x) = maxval * maxval; - } - - for (int y = 1; y < m; y++) { - bool b = (imageWithMask(y, x).a() > std::numeric_limits::epsilon()) ? false : true; - if (b) { - distance(y, x) = 0; - } - else { - distance(y, x) = 1 + distance(y - 1, x); - } - } + size_t getWidth() const { + return _real_width; + } - for (int y = m - 2; y >= 0; y--) { - if (distance(y + 1, x) < distance(y, x)) { - distance(y, x) = 1 + distance(y + 1, x); - } - } + size_t getHeight() const { + return _real_height; } - for (int y = 0; y < m; y++) { - int q; - std::map s; - std::map t; + const aliceVision::image::Image & getCoordinates() const { + return _coordinates; + } - q = 0; - s[0] = 0; - t[0] = 0; + const aliceVision::image::Image & getMask() const { + return _mask; + } - std::function f = [distance, y](int x, int i) { - int gi = distance(y, i); - return (x - i)*(x - i) + gi * gi; - }; +private: + size_t _offset_x = 0; + size_t _offset_y = 0; + size_t _real_width = 0; + size_t _real_height = 0; - std::function sep = [distance, y](int i, int u) { - int gu = distance(y, u); - int gi = distance(y, i); + aliceVision::image::Image _coordinates; + aliceVision::image::Image _mask; +}; - int nom = (u * u) - (i * i) + (gu * gu) - (gi * gi); - int denom = 2 * (u - i); +class Warper { +public: + virtual bool warp(const CoordinatesMap & map, const aliceVision::image::Image & source) { - return nom / denom; - }; + const aliceVision::image::Image & coordinates = map.getCoordinates(); + const aliceVision::image::Image & mask = map.getMask(); - for (int u = 1; u < n; u++) { + _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); + + const image::Sampler2d sampler; - while (q >= 0 && (f(t[q], s[q]) > f(t[q], u))) { - q = q - 1; - } + for (size_t i = 0; i < _color.Height(); i++) { + for (size_t j = 0; j < _color.Width(); j++) { - if (q < 0) { - q = 0; - s[0] = u; - } - else { - int w = 1 + sep(s[q], u); - if (w < n) { - q = q + 1; - s[q] = u; - t[q] = w; + bool valid = mask(i, j); + if (!valid) { + continue; } - } - } - for (int u = n - 1; u >= 0; u--) { - distance(y, u) = f(u, s[q]); - if (u == t[q]) { - q = q - 1; + const Eigen::Vector2d & coord = coordinates(i, j); + const image::RGBfColor pixel = sampler(source, coord(1), coord(0)); + + _color(i, j) = pixel; } } - } - - return true; -} -/** - * @brief Function to map equirectangular coordinates onto a world unit vector according a spherical projection - */ -namespace SphericalMapping -{ - Vec3 fromEquirectangular(const Vec2 & equirectangular, int width, int height) - { - const double latitude = (equirectangular(1) / double(height)) * M_PI - M_PI_2; - const double longitude = ((equirectangular(0) / double(width)) * 2.0 * M_PI) - M_PI; - - const double Px = cos(latitude) * sin(longitude); - const double Py = sin(latitude); - const double Pz = cos(latitude) * cos(longitude); - - return Vec3(Px, Py, Pz); + return true; } - Vec2 toEquirectangular(const Vec3 & spherical, int width, int height) { - - double vertical_angle = asin(spherical(1)); - double horizontal_angle = atan2(spherical(0), spherical(2)); - - double latitude = ((vertical_angle + M_PI_2) / M_PI) * height; - double longitude = ((horizontal_angle + M_PI) / (2.0 * M_PI)) * width; - - return Vec2(longitude, latitude); + const aliceVision::image::Image & getColor() const { + return _color; } - Vec2 toLongitudeLatitude(const Vec3 & spherical) { - - double latitude = asin(spherical(1)); - double longitude = atan2(spherical(0), spherical(2)); +private: + aliceVision::image::Image _color; +}; - return Vec2(longitude, latitude); +class Compositer { +public: + Compositer(size_t outputWidth, size_t outputHeight) : + _panorama(outputWidth, outputHeight, true, image::RGBfColor(0.0f, 0.0f, 0.0f)) { } -} -inline std::istream& operator>>(std::istream& in, std::pair& out) -{ - in >> out.first; - in >> out.second; - return in; -} - -bool estimate_panorama_size(const sfmData::SfMData & sfmData, float fisheyeMaskingMargin, std::pair & panoramaSize) { + bool append(const CoordinatesMap & map, const Warper & warper) { - panoramaSize.first = 512; - panoramaSize.second = 256; + const aliceVision::image::Image & mask = map.getMask(); + const aliceVision::image::Image & color = warper.getColor(); - image::Image BufferCoords(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); - { - int imageIndex = 0; - std::vector determinants; + for (size_t i = 0; i < map.getHeight(); i++) { - for(auto& viewIt: sfmData.getViews()) - { - IndexT viewId = viewIt.first; - const sfmData::View& view = *viewIt.second.get(); - if(!sfmData.isPoseAndIntrinsicDefined(&view)) + size_t pano_i = map.getOffsetY() + i; + if (pano_i >= _panorama.Height()) { continue; - - - const sfmData::CameraPose camPose = sfmData.getPose(view); - const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - - const float maxRadius = std::min(intrinsic.w(), intrinsic.h()) * 0.5f * (1.0 - fisheyeMaskingMargin); - const Vec2i center(intrinsic.w()/2.f, intrinsic.h()/2.f); - - for(int y = 0; y < panoramaSize.second; ++y) - { - for(int x = 0; x < panoramaSize.first; ++x) - { - BufferCoords(y, x).a() = 0.0; - // equirectangular to unit vector - Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), panoramaSize.first, panoramaSize.second); - - if(camPose.getTransform().depth(ray) < 0) - { - // point is not in front of the camera - continue; - } - - // unit vector to camera - const Vec2 pix_disto = intrinsic.project(camPose.getTransform(), ray, true); - - if( pix_disto(0) < 0 || pix_disto(0) >= intrinsic.w() || pix_disto(1) < 0 || pix_disto(1) >= intrinsic.h()) - { - //the pixel is out of the image - continue; - } - - const float dist = std::sqrt((pix_disto(0)-center(0)) * (pix_disto(0)-center(0)) + (pix_disto(1)-center(1)) * (pix_disto(1)-center(1))); - if (dist > maxRadius * 0.8) - { - continue; - } - - BufferCoords(y, x).r() = pix_disto(0); - BufferCoords(y, x).g() = pix_disto(1); - BufferCoords(y, x).a() = 1.0; - } } - - - for(int y = 1; y < panoramaSize.second - 1; ++y) - { - for(int x = 1; x < panoramaSize.first - 1; ++x) - { - double x00 = BufferCoords(y, x).r(); - double x10 = BufferCoords(y, x + 1).r(); - double x01 = BufferCoords(y + 1, x).r(); - - double y00 = BufferCoords(y, x).g(); - double y10 = BufferCoords(y, x + 1).g(); - double y01 = BufferCoords(y + 1, x).g(); - - double m00 = BufferCoords(y, x).a(); - double m10 = BufferCoords(y, x + 1).a(); - double m01 = BufferCoords(y + 1, x).a(); - - if (m00 != 1.0 || m10 != 1.0 || m01 != 1.0) { - continue; - } - - double dxx = x10 - x00; - double dxy = x01 - x00; - double dyx = y10 - y00; - double dyy = y01 - y00; - - float det = std::abs(dxx*dyy - dxy*dyx); - determinants.push_back(det); + for (size_t j = 0; j < map.getWidth(); j++) { + + if (!mask(i, j)) { + continue; + } + + size_t pano_j = map.getOffsetX() + j; + if (pano_j >= _panorama.Width()) { + continue; } - } - break; + _panorama(pano_i, pano_j) = color(i, j); + } } - std::nth_element(determinants.begin(), determinants.begin() + determinants.size() / 5, determinants.end()); - double scale = sqrt(determinants[determinants.size() / 2]); - - panoramaSize.first *= scale; - panoramaSize.second *= scale; + return true; } - return true; -} - + const aliceVision::image::Image & getPanorama() const { + return _panorama; + } -int main(int argc, char **argv) -{ - // command-line parameters +private: + aliceVision::image::Image _panorama; +}; - std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); - std::string sfmDataFilename; - std::string outputPanorama; +int main(int argc, char **argv) { - float scaleFactor = 0.2f; - std::pair panoramaSize = {0, 0}; - bool fisheyeMasking = false; - float fisheyeMaskingMargin = 0.05f; - float transitionSize = 1000.0f; - int debugSubsetStart = 0; - int debugSubsetSize = 0; - char filename[512]; - po::options_description allParams( + /** + * Program description + */ + po::options_description allParams ( "Perform panorama stiching of cameras around a nodal point for 360° panorama creation.\n" - "AliceVision PanoramaStitching"); + "AliceVision PanoramaStitching" + ); + /** + * Description of mandatory parameters + */ + std::string sfmDataFilename; + std::string outputPanorama; po::options_description requiredParams("Required parameters"); requiredParams.add_options() - ("input,i", po::value(&sfmDataFilename)->required(), - "SfMData file.") - ("output,o", po::value(&outputPanorama)->required(), - "Path of the output folder."); - + ("input,i", po::value(&sfmDataFilename)->required(), "SfMData file.") + ("output,o", po::value(&outputPanorama)->required(), "Path of the output folder."); + allParams.add(requiredParams); + + /** + * Description of optional parameters + */ + std::pair panoramaSize = {1024, 512}; po::options_description optionalParams("Optional parameters"); - optionalParams.add_options() - ("scaleFactor", po::value(&scaleFactor)->default_value(scaleFactor), - "Scale factor to resize the output resolution (e.g. 0.5 for downscaling to half resolution).") - ("fisheyeMasking", po::value(&fisheyeMasking)->default_value(fisheyeMasking), - "For fisheye images, skip the invalid pixels on the borders.") - ("fisheyeMaskingMargin", po::value(&fisheyeMaskingMargin)->default_value(fisheyeMaskingMargin), - "Margin for fisheye images (in percentage of the image).") - ("transitionSize", po::value(&transitionSize)->default_value(transitionSize), - "Size of the transition between images (in pixels).") - ("debugSubsetStart", po::value(&debugSubsetStart)->default_value(debugSubsetStart), - "For debug only: Index of first image of the subset to merge.") - ("debugSubsetSize", po::value(&debugSubsetSize)->default_value(debugSubsetSize), - "For debug only: Number of images in the subset to merge.") - //("panoramaSize", po::value>(&panoramaSize)->default_value(panoramaSize), - // "Image size of the output panorama image file.") - ; + allParams.add(optionalParams); + /** + * Setup log level given command line + */ + std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); po::options_description logParams("Log parameters"); logParams.add_options() - ("verboseLevel,v", po::value(&verboseLevel)->default_value(verboseLevel), - "verbosity level (fatal, error, warning, info, debug, trace)."); + ("verboseLevel,v", po::value(&verboseLevel)->default_value(verboseLevel), "verbosity level (fatal, error, warning, info, debug, trace)."); + allParams.add(logParams); - allParams.add(requiredParams).add(optionalParams).add(logParams); + /** + * Effectively parse command line given parse options + */ po::variables_map vm; try { @@ -614,213 +343,90 @@ int main(int argc, char **argv) ALICEVISION_COUT("Program called with the following parameters:"); ALICEVISION_COUT(vm); - // set verbose level + + /** + * Set verbose level given command line + */ system::Logger::get()->setLogLevel(verboseLevel); - // load input SfMData scene + /** + * Load information about inputs + * Camera images + * Camera intrinsics + * Camera extrinsics + */ sfmData::SfMData sfmData; - if(!sfmDataIO::Load(sfmData, sfmDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS|sfmDataIO::EXTRINSICS))) + if(!sfmDataIO::Load(sfmData, sfmDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS| sfmDataIO::INTRINSICS| sfmDataIO::EXTRINSICS))) { ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmDataFilename << "' cannot be read."); return EXIT_FAILURE; } - auto validViews = sfmData.getValidViews(); - int nbCameras = sfmData.getValidViews().size(); - - ALICEVISION_LOG_INFO(nbCameras << " loaded from " << sfmDataFilename); - if(nbCameras == 0) - { - ALICEVISION_LOG_ERROR("Failed to get valid cameras from input images."); - return -1; - } - - if(panoramaSize.first == 0 || panoramaSize.second == 0) - { - estimate_panorama_size(sfmData, fisheyeMaskingMargin, panoramaSize); - } - - panoramaSize.first *= scaleFactor; - panoramaSize.second *= scaleFactor; - - /*Make sure panorama size is even */ - panoramaSize.first = 2 * (panoramaSize.first / 2); - panoramaSize.second = 2 * (panoramaSize.second / 2); - - ALICEVISION_LOG_INFO("Output panorama size: " << panoramaSize.first << ", " << panoramaSize.second); - - // Create panorama buffer - image::Image output; - image::Image imageOut(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); - - - int imageIndex = 0; - for(auto& viewIt: sfmData.getViews()) - { - - IndexT viewId = viewIt.first; - - if (!sfmData.isPoseAndIntrinsicDefined(viewId)) { - continue; - } + /** + * Create compositer + */ + Compositer compositer(panoramaSize.first, panoramaSize.second); + + /** + * Preprocessing per view + */ + size_t pos = 0; + for (auto & viewIt: sfmData.getViews()) { + + /** + * Retrieve view + */ const sfmData::View& view = *viewIt.second.get(); - if(!sfmData.isPoseAndIntrinsicDefined(&view)) + if (!sfmData.isPoseAndIntrinsicDefined(&view)) { continue; - - if(debugSubsetSize != 0) - { - if(imageIndex < debugSubsetStart) - { - ++imageIndex; - continue; - } - else if(imageIndex >= debugSubsetStart + debugSubsetSize) - { - break; - } } - ++imageIndex; - //if (imageIndex != 0 && imageIndex != 4 && imageIndex != 8) continue; - - - const sfmData::CameraPose camPose = sfmData.getPose(view); - const camera::IntrinsicBase& intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - std::string imagePath = view.getImagePath(); - - // Image RGB - image::Image imageIn; - ALICEVISION_LOG_INFO("Reading " << imagePath); - //sprintf(filename, "%s.png", imagePath.c_str()); - image::readImage(imagePath, imageIn, image::EImageColorSpace::LINEAR); - - - - //image::writeImage(filename, imageIn, image::EImageColorSpace::AUTO); - ALICEVISION_LOG_INFO(" - " << imageIn.Width() << "x" << imageIn.Height()); - const float maxRadius = std::min(imageIn.Width(), imageIn.Height()) * 0.5f * (1.0 - fisheyeMaskingMargin); - const float blurWidth = maxRadius * transitionSize; - const float blurMid = maxRadius - transitionSize/2.f; - const Vec2i center(imageIn.Width()/2.f, imageIn.Height()/2.f); + ALICEVISION_LOG_INFO("Processing view " << view.getViewId()); - const image::Sampler2d sampler; + /** + * Get intrinsics and extrinsics + */ + const geometry::Pose3 & camPose = sfmData.getPose(view).getTransform(); + const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - image::Image perCameraOutput(panoramaSize.first, panoramaSize.second, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + /** + * Prepare coordinates map + */ + CoordinatesMap map; + map.build(panoramaSize, camPose, intrinsic); - for(int y = 0; y < panoramaSize.second; ++y) - { - for(int x = 0; x < panoramaSize.first; ++x) - { - //if (x < 596 || x > 643) continue; - //if (y < 255 || y > 285) continue; - - // equirectangular to unit vector - Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), panoramaSize.first, panoramaSize.second); - - //Check that this ray should be visible - Vec3 transformedRay = camPose.getTransform()(ray); - if (!intrinsic.isVisibleRay(transformedRay)) { - continue; - } + /** + * Load image and convert it to linear colorspace + */ + std::string imagePath = view.getImagePath(); + image::Image source; + image::readImage(imagePath, source, image::EImageColorSpace::LINEAR); - // unit vector to camera - const Vec2 pix_disto = intrinsic.project(camPose.getTransform(), ray, true); + /** + * Warp image + */ + Warper warper; + warper.warp(map, source); - if( pix_disto(0) < 0 || pix_disto(0) >= imageIn.Width() - 1 || pix_disto(1) < 0 || pix_disto(1) >= imageIn.Height() - 1) - { - // the pixel is out of the image - continue; - } - - float contribution = 1.0f; - if(fisheyeMasking) - { - const float dist = std::sqrt((pix_disto(0)-center(0)) * (pix_disto(0)-center(0)) + (pix_disto(1)-center(1)) * (pix_disto(1)-center(1))); - if(dist > maxRadius) - { - // contribution = 0.0f; - continue; - } - else - { - contribution = sigmoid(dist, transitionSize, blurMid); - } - } - - const image::RGBfColor pixel = sampler(imageIn, pix_disto(1), pix_disto(0)); - if(contribution > 0.0f) - { - contribution = 1.0f; - perCameraOutput(y, x).r() += pixel.r() * contribution; - perCameraOutput(y, x).g() += pixel.g() * contribution; - perCameraOutput(y, x).b() += pixel.b() * contribution; - perCameraOutput(y, x).a() += contribution; - } - } - } + /** + *Composite image into output + */ + compositer.append(map, warper); - - image::Image distanceMap; - image::Image weightMap(perCameraOutput.Width(), perCameraOutput.Height()); - computeDistanceMap(distanceMap, perCameraOutput); - - for(int y = 0; y < perCameraOutput.Height(); ++y) - { - for(int x = 0; x < perCameraOutput.Width(); ++x) - { - int dist = distanceMap(y, x); - - weightMap(y, x) = 0.0f; - if (dist > 0) - { - float fdist = sqrtf(float(dist)); - weightMap(y, x) = 1.0f - sigmoid(fdist, 100, 50); - } - } - } - - LaplacianPyramid pyramid(panoramaSize.first, panoramaSize.second); - pyramid.process(perCameraOutput); - - LaplacianPyramid blending_pyramid(panoramaSize.first, panoramaSize.second); - blending_pyramid.process(imageOut); - blending_pyramid.blend(pyramid, weightMap); - blending_pyramid.rebuild(imageOut); - - - sprintf(filename, "%s_blend_%d.exr", outputPanorama.c_str(), imageIndex); - image::writeImage(filename, imageOut, image::EImageColorSpace::NO_CONVERSION); - - /*for (int i = 0; i < pyramid._scales; i++) { - sprintf(filename, "%s_diff_%d_%d.exr", outputPanorama.c_str(), imageIndex, i); - image::writeImage(filename, pyramid._differences_full[i], image::EImageColorSpace::NO_CONVERSION); - } - - sprintf(filename, "%s_cam_%d.exr", outputPanorama.c_str(), imageIndex); - image::writeImage(filename, perCameraOutput, image::EImageColorSpace::NO_CONVERSION);*/ - } - - - /*blending_pyramid.rebuild(imageOut); - - for(int y = 0; y < imageOut.Height(); ++y) - { - for(int x = 0; x < imageOut.Width(); ++x) - { - image::RGBAfColor& pixel = imageOut(y, x); - if(pixel.a() > std::numeric_limits::epsilon()) - { - pixel.r() /= pixel.a(); - pixel.g() /= pixel.a(); - pixel.b() /= pixel.a(); - pixel.a() = 1.0f; // TMP: comment to keep the alpha with the number of contribution for debugging - } - } + /** + const aliceVision::image::Image & color = warper.getColor(); + char filename[512]; + sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), view.getViewId()); + image::writeImage(filename, color, image::EImageColorSpace::NO_CONVERSION); + */ + const aliceVision::image::Image & panorama = compositer.getPanorama(); + char filename[512]; + sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); + image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); + pos++; } - image::writeImage(outputPanorama, imageOut, image::EImageColorSpace::LINEAR);*/ - return EXIT_SUCCESS; } \ No newline at end of file From a74a294a147c0ce582bfa1bb76402f6787d788c1 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Mon, 14 Oct 2019 16:25:14 +0200 Subject: [PATCH 072/174] laplacian --- .../pipeline/main_panoramaStitching.cpp | 517 +++++++++++++++++- 1 file changed, 495 insertions(+), 22 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 67414535ed..a0a1b1efa7 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -78,6 +78,339 @@ namespace SphericalMapping } } +class Pyramid { +public: + Pyramid(const size_t width_base, const size_t height_base, const size_t limit_scales = 64) : + _width_base(width_base), + _height_base(height_base) + { + /** + * Compute optimal scale + * The smallest level will be at least of size min_size + */ + size_t min_dim = std::min(_width_base, _height_base); + size_t min_size = 32; + _scales = std::min(limit_scales, static_cast(floor(log2(double(min_dim) / float(min_size))))); + + + /** + * Create pyramid + **/ + size_t new_width = _width_base; + size_t new_height = _height_base; + for (int i = 0; i < _scales; i++) { + + _pyramid_color.push_back(image::Image(new_width, new_height, true, image::RGBfColor(0.0f, 0.0f, 0.0f))); + _pyramid_mask.push_back(image::Image(new_width, new_height, true, false)); + new_height /= 2; + new_width /= 2; + } + } + + bool process(const image::Image & input, const image::Image & mask) { + + if (input.Height() != _pyramid_color[0].Height()) return false; + if (input.Width() != _pyramid_color[0].Width()) return false; + if (input.Height() != mask.Height()) return false; + if (input.Width() != mask.Width()) return false; + + + /** + * Build pyramid + */ + _pyramid_color[0] = input; + _pyramid_mask[0] = mask; + for (int lvl = 1; lvl < _scales; lvl++) { + downscale(_pyramid_color[lvl], _pyramid_color[lvl - 1]); + downscale(_pyramid_mask[lvl], _pyramid_mask[lvl - 1]); + } + + return true; + } + + static bool downscale(image::Image & output, const image::Image & input) { + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + output(i, j) = input(i * 2, j * 2); + } + } + + return true; + } + + static bool downscale(image::Image & output, const image::Image & input) { + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + output(i, j) = input(i * 2, j * 2); + } + } + + return true; + } + + const size_t getScalesCount() const { + return _scales; + } + + const std::vector> & getPyramidColor() const { + return _pyramid_color; + } + + const std::vector> & getPyramidMask() const { + return _pyramid_mask; + } + + std::vector> & getPyramidColor() { + return _pyramid_color; + } + + std::vector> & getPyramidMask() { + return _pyramid_mask; + } + +protected: + std::vector> _pyramid_color; + std::vector> _pyramid_mask; + size_t _width_base; + size_t _height_base; + size_t _scales; +}; + +class LaplacianPyramid { +public: + LaplacianPyramid(const size_t width_base, const size_t height_base, const size_t limit_scales = 64) + : + _pyramid(width_base, height_base, limit_scales), + _pyramid_differences(width_base, height_base, limit_scales), + _pyramid_buffer(width_base, height_base, limit_scales) + { + for (int i = 0; i < _pyramid.getScalesCount(); i++) { + _differences_full.push_back(image::Image(width_base, height_base, true, image::RGBfColor(0.0f, 0.0f, 0.0f))); + _differences_masks_full.push_back(image::Image(width_base, height_base, true, false)); + } + } + + bool process(const image::Image & input, const image::Image & mask) { + + /** + * Compute pyramid of images + */ + if (!_pyramid.process(input, mask)) { + return false; + } + + /** + * For each level, upscale once and differentiate with upper level + */ + const std::vector> & pyramid_color = _pyramid.getPyramidColor(); + const std::vector> & pyramid_mask = _pyramid.getPyramidMask(); + std::vector> & difference_color = _pyramid_differences.getPyramidColor(); + std::vector> & difference_mask = _pyramid_differences.getPyramidMask(); + std::vector> & buffers_color = _pyramid_buffer.getPyramidColor(); + std::vector> & buffers_mask = _pyramid_buffer.getPyramidMask(); + + for (int lvl = 0; lvl < _pyramid.getScalesCount() - 1; lvl++) { + + upscale(buffers_color[lvl], pyramid_color[lvl + 1]); + upscale(buffers_mask[lvl], pyramid_mask[lvl + 1]); + + substract(difference_color[lvl], difference_mask[lvl], pyramid_color[lvl], pyramid_mask[lvl], buffers_color[lvl], buffers_mask[lvl]); + } + + /** + * Last level is simply a copy of the original pyramid last level (nothing to substract with) + */ + difference_color[_pyramid.getScalesCount() - 1] = pyramid_color[_pyramid.getScalesCount() - 1]; + difference_mask[_pyramid.getScalesCount() - 1] = pyramid_mask[_pyramid.getScalesCount() - 1]; + + /*Upscale to the max each level*/ + for (int max_level = 0; max_level < _pyramid.getScalesCount(); max_level++) { + + image::Image & refOut = _differences_full[max_level]; + image::Image & maskOut = _differences_masks_full[max_level]; + + buffers_color[max_level] = difference_color[max_level]; + buffers_mask[max_level] = difference_mask[max_level]; + for (int lvl = max_level - 1; lvl >= 0; lvl--) { + upscale(buffers_color[lvl], buffers_color[lvl + 1]); + upscale(buffers_mask[lvl], buffers_mask[lvl + 1]); + } + + refOut = buffers_color[0]; + maskOut = buffers_mask[0]; + } + + return true; + } + + bool blend(const LaplacianPyramid & other) { + + for (int lvl = 0; lvl < _pyramid.getScalesCount(); lvl++) { + + image::Image & output = _differences_full[lvl]; + image::Image & mask = _differences_masks_full[lvl]; + + const image::Image & inputA = _differences_full[lvl]; + const image::Image & maskA = _differences_masks_full[lvl]; + const image::Image & inputB = other._differences_full[lvl]; + const image::Image & maskB = other._differences_masks_full[lvl]; + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + float weight = 0.5f; + float mweight = 1.0f - weight; + + const image::RGBfColor & pixA = inputA(i, j); + const image::RGBfColor & pixB = inputB(i, j); + + if (maskA(i, j)) { + if (maskB(i, j)) { + output(i, j).r() = mweight * pixA.r() + weight * pixB.r(); + output(i, j).g() = mweight * pixA.g() + weight * pixB.g(); + output(i, j).b() = mweight * pixA.b() + weight * pixB.b(); + mask(i, j) = true; + } + else { + output(i, j) = pixA; + mask(i, j) = true; + } + } + else { + if (maskB(i, j)) { + output(i, j) = pixB; + mask(i, j) = true; + } + else { + output(i, j).r() = 0.0f; + output(i, j).g() = 0.0f; + output(i, j).b() = 0.0f; + mask(i, j) = false; + } + } + } + } + } + + return true; + } + + bool rebuild(image::Image & output, image::Image & mask) { + + size_t scales = _pyramid.getScalesCount(); + output = _differences_full[scales - 1]; + mask = _differences_masks_full[scales - 1]; + + for (int lvl = scales - 2; lvl >= 0; lvl--) { + add(output, mask, output, mask, _differences_full[lvl], _differences_masks_full[lvl]); + } + + return true; + } + +private: + bool upscale(image::Image & output, const image::Image & input) { + + int width = output.Width(); + int height = output.Height(); + + width = 2 * (width / 2); + height = 2 * (height / 2); + + for (int i = 0; i < height; i++) { + for (int j = 0; j < width; j++) { + + output(i, j) = input(i / 2, j / 2); + } + } + + return true; + } + + bool upscale(image::Image & output, const image::Image & input) { + + int width = output.Width(); + int height = output.Height(); + + width = 2 * (width / 2); + height = 2 * (height / 2); + + for (int i = 0; i < height; i++) { + for (int j = 0; j < width; j++) { + + output(i, j) = input(i / 2, j / 2); + } + } + + return true; + } + + bool substract(image::Image & output, image::Image & output_mask, const image::Image & inputA, const image::Image & maskA, const image::Image & inputB, const image::Image & maskB) { + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + if ((!maskA(i, j)) || (!maskB(i, j))) { + output_mask(i, j) = false; + } + else { + output(i, j).r() = inputA(i, j).r() - inputB(i, j).r(); + output(i, j).g() = inputA(i, j).g() - inputB(i, j).g(); + output(i, j).b() = inputA(i, j).b() - inputB(i, j).b(); + output_mask(i, j) = true; + } + } + } + + return true; + } + + bool add(image::Image & output, image::Image & output_mask, const image::Image & inputA, const image::Image & maskA, const image::Image & inputB, const image::Image & maskB) { + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + + if (maskA(i, j)) { + if (maskB(i, j)) { + output(i, j).r() = inputA(i, j).r() + inputB(i, j).r(); + output(i, j).g() = inputA(i, j).g() + inputB(i, j).g(); + output(i, j).b() = inputA(i, j).b() + inputB(i, j).b(); + output_mask(i, j) = true; + } + else { + output(i, j).r() = inputA(i, j).r(); + output(i, j).g() = inputA(i, j).g(); + output(i, j).b() = inputA(i, j).b(); + output_mask(i, j) = true; + } + } + else { + output(i, j).r() = 0.0f; + output(i, j).g() = 0.0f; + output(i, j).b() = 0.0f; + output_mask(i, j) = false; + } + } + } + + return true; + } + + +private: + Pyramid _pyramid; + Pyramid _pyramid_differences; + Pyramid _pyramid_buffer; + + std::vector> _differences_full; + std::vector> _differences_masks_full; +}; + class CoordinatesMap { public: /** @@ -147,13 +480,8 @@ class CoordinatesMap { _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); _mask = aliceVision::image::Image(rectified_width, rectified_height, true, false); - for (int i = 0; i < _real_height; i++) { - for (int j = 0; j < _real_width; j++) { - - _coordinates(i, j) = buffer_coordinates(_offset_y + i, _offset_x + j); - _mask(i, j) = buffer_mask(_offset_y + i, _offset_x + j); - } - } + _coordinates.block(0, 0, _real_height, _real_width) = buffer_coordinates.block(_offset_y, _offset_x, _real_height, _real_width); + _mask.block(0, 0, _real_height, _real_width) = buffer_mask.block(_offset_y, _offset_x, _real_height, _real_width); return true; } @@ -196,17 +524,31 @@ class Warper { public: virtual bool warp(const CoordinatesMap & map, const aliceVision::image::Image & source) { + /** + * Copy additional info from map + */ + _offset_x = map.getOffsetX(); + _offset_y = map.getOffsetY(); + _real_width = map.getWidth(); + _real_height = map.getHeight(); + _mask = map.getMask(); + + const image::Sampler2d sampler; const aliceVision::image::Image & coordinates = map.getCoordinates(); - const aliceVision::image::Image & mask = map.getMask(); + /** + * Create buffer + */ _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); - const image::Sampler2d sampler; + /** + * Simple warp + */ for (size_t i = 0; i < _color.Height(); i++) { for (size_t j = 0; j < _color.Width(); j++) { - bool valid = mask(i, j); + bool valid = _mask(i, j); if (!valid) { continue; } @@ -225,40 +567,70 @@ class Warper { return _color; } + const aliceVision::image::Image & getMask() const { + return _mask; + } + + + size_t getOffsetX() const { + return _offset_x; + } + + size_t getOffsetY() const { + return _offset_y; + } + + size_t getWidth() const { + return _real_width; + } + + size_t getHeight() const { + return _real_height; + } + private: + size_t _offset_x = 0; + size_t _offset_y = 0; + size_t _real_width = 0; + size_t _real_height = 0; + aliceVision::image::Image _color; + aliceVision::image::Image _mask; }; class Compositer { public: Compositer(size_t outputWidth, size_t outputHeight) : - _panorama(outputWidth, outputHeight, true, image::RGBfColor(0.0f, 0.0f, 0.0f)) { + _panorama(outputWidth, outputHeight, true, image::RGBfColor(0.0f, 0.0f, 0.0f)), + _mask(outputWidth, outputHeight, true, false) + { } - bool append(const CoordinatesMap & map, const Warper & warper) { + bool append(const Warper & warper) { - const aliceVision::image::Image & mask = map.getMask(); + const aliceVision::image::Image & inputMask = warper.getMask(); const aliceVision::image::Image & color = warper.getColor(); - for (size_t i = 0; i < map.getHeight(); i++) { + for (size_t i = 0; i < warper.getHeight(); i++) { - size_t pano_i = map.getOffsetY() + i; + size_t pano_i = warper.getOffsetY() + i; if (pano_i >= _panorama.Height()) { continue; } - for (size_t j = 0; j < map.getWidth(); j++) { + for (size_t j = 0; j < warper.getWidth(); j++) { - if (!mask(i, j)) { + if (!inputMask(i, j)) { continue; } - size_t pano_j = map.getOffsetX() + j; + size_t pano_j = warper.getOffsetX() + j; if (pano_j >= _panorama.Width()) { continue; } _panorama(pano_i, pano_j) = color(i, j); + _mask(pano_i, pano_j) = true; } } @@ -269,10 +641,109 @@ class Compositer { return _panorama; } -private: + const aliceVision::image::Image & getPanoramaMask() const { + return _mask; + } + +protected: aliceVision::image::Image _panorama; + aliceVision::image::Image _mask; +}; + + +class AverageCompositer : public Compositer { + +public: + + AverageCompositer(size_t outputWidth, size_t outputHeight) : Compositer(outputWidth, outputHeight) { + + } + + bool append(const Warper & warper) { + + const aliceVision::image::Image & inputMask = warper.getMask(); + const aliceVision::image::Image & color = warper.getColor(); + + for (size_t i = 0; i < warper.getHeight(); i++) { + + size_t pano_i = warper.getOffsetY() + i; + if (pano_i >= _panorama.Height()) { + continue; + } + + for (size_t j = 0; j < warper.getWidth(); j++) { + + if (!inputMask(i, j)) { + continue; + } + + size_t pano_j = warper.getOffsetX() + j; + if (pano_j >= _panorama.Width()) { + continue; + } + + if (!_mask(pano_i, pano_j)) { + _panorama(pano_i, pano_j) = color(i, j); + } + else { + _panorama(pano_i, pano_j).r() = 0.5 * (_panorama(pano_i, pano_j).r() + color(i, j).r()); + _panorama(pano_i, pano_j).g() = 0.5 * (_panorama(pano_i, pano_j).g() + color(i, j).g()); + _panorama(pano_i, pano_j).b() = 0.5 * (_panorama(pano_i, pano_j).b() + color(i, j).b()); + + } + + _mask(pano_i, pano_j) = true; + } + } + + return true; + } }; +class LaplacianCompositer : public Compositer { +public: + + LaplacianCompositer(size_t outputWidth, size_t outputHeight) : + Compositer(outputWidth, outputHeight) + { + + } + + bool append(const Warper & warper) { + + const aliceVision::image::Image & inputMask = warper.getMask(); + const aliceVision::image::Image & color = warper.getColor(); + + aliceVision::image::Image enlargedMask(_panorama.Width(), _panorama.Height(), true, false); + aliceVision::image::Image enlargedInput(_panorama.Width(), _panorama.Height(), false); + + enlargedMask.block(warper.getOffsetY(), warper.getOffsetX(), warper.getHeight(), warper.getWidth()) = inputMask.block(0, 0, warper.getHeight(), warper.getWidth()); + enlargedInput.block(warper.getOffsetY(), warper.getOffsetX(), warper.getHeight(), warper.getWidth()) = color.block(0, 0, warper.getHeight(), warper.getWidth()); + + LaplacianPyramid pyramid_camera(_panorama.Width(), _panorama.Height()); + pyramid_camera.process(enlargedInput, enlargedMask); + + + LaplacianPyramid pyramid_panorama(_panorama.Width(), _panorama.Height()); + pyramid_panorama.process(_panorama, _mask); + pyramid_panorama.blend(pyramid_camera); + pyramid_panorama.rebuild(_panorama, _mask); + + for (int i = 0; i < _panorama.Height(); i++) { + for (int j = 0; j < _panorama.Width(); j++) { + if (!_mask(i, j)) { + _panorama(i, j).r() = 0.0f; + _panorama(i, j).g() = 0.0f; + _panorama(i, j).b() = 0.0f; + } + } + } + + return true; + } +}; + + int main(int argc, char **argv) { @@ -298,7 +769,7 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ - std::pair panoramaSize = {1024, 512}; + std::pair panoramaSize = {2048, 1024}; po::options_description optionalParams("Optional parameters"); allParams.add(optionalParams); @@ -365,7 +836,7 @@ int main(int argc, char **argv) { /** * Create compositer */ - Compositer compositer(panoramaSize.first, panoramaSize.second); + LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** @@ -413,7 +884,7 @@ int main(int argc, char **argv) { /** *Composite image into output */ - compositer.append(map, warper); + compositer.append(warper); /** const aliceVision::image::Image & color = warper.getColor(); @@ -426,6 +897,8 @@ int main(int argc, char **argv) { sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); pos++; + + } return EXIT_SUCCESS; From 11c2c1fe0b551c3c3cc38d58fd4dc90428456c6f Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 15 Oct 2019 08:05:45 +0200 Subject: [PATCH 073/174] some laplacian changes --- .../pipeline/main_panoramaStitching.cpp | 47 ++++++++++++------- 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index a0a1b1efa7..a68955f597 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -117,7 +117,7 @@ class Pyramid { /** * Build pyramid - */ + */ _pyramid_color[0] = input; _pyramid_mask[0] = mask; for (int lvl = 1; lvl < _scales; lvl++) { @@ -133,7 +133,9 @@ class Pyramid { for (int i = 0; i < output.Height(); i++) { for (int j = 0; j < output.Width(); j++) { - output(i, j) = input(i * 2, j * 2); + output(i, j).r() = 0.25 * (input(i * 2, j * 2).r() + input(i * 2 + 1, j * 2).r() + input(i * 2, j * 2 + 1).r() + input(i * 2 + 1, j * 2 + 1).r()); + output(i, j).g() = 0.25 * (input(i * 2, j * 2).g() + input(i * 2 + 1, j * 2).g() + input(i * 2, j * 2 + 1).g() + input(i * 2 + 1, j * 2 + 1).g()); + output(i, j).b() = 0.25 * (input(i * 2, j * 2).b() + input(i * 2 + 1, j * 2).b() + input(i * 2, j * 2 + 1).b() + input(i * 2 + 1, j * 2 + 1).b()); } } @@ -145,7 +147,7 @@ class Pyramid { for (int i = 0; i < output.Height(); i++) { for (int j = 0; j < output.Width(); j++) { - output(i, j) = input(i * 2, j * 2); + output(i, j) = input(i * 2, j * 2) & input(i * 2 + 1, j * 2) & input(i * 2, j * 2 + 1) & input(i * 2 + 1, j * 2 + 1) ; } } @@ -354,14 +356,19 @@ class LaplacianPyramid { for (int i = 0; i < output.Height(); i++) { for (int j = 0; j < output.Width(); j++) { - if ((!maskA(i, j)) || (!maskB(i, j))) { - output_mask(i, j) = false; + if (maskA(i, j)) { + if (maskB(i, j)) { + output(i, j).r() = inputA(i, j).r() - inputB(i, j).r(); + output(i, j).g() = inputA(i, j).g() - inputB(i, j).g(); + output(i, j).b() = inputA(i, j).b() - inputB(i, j).b(); + } + else { + output(i, j) = inputA(i, j); + } + output_mask(i, j) = true; } else { - output(i, j).r() = inputA(i, j).r() - inputB(i, j).r(); - output(i, j).g() = inputA(i, j).g() - inputB(i, j).g(); - output(i, j).b() = inputA(i, j).b() - inputB(i, j).b(); - output_mask(i, j) = true; + output_mask(i, j) = false; } } } @@ -383,17 +390,21 @@ class LaplacianPyramid { output_mask(i, j) = true; } else { - output(i, j).r() = inputA(i, j).r(); - output(i, j).g() = inputA(i, j).g(); - output(i, j).b() = inputA(i, j).b(); + output(i, j) = inputA(i, j); output_mask(i, j) = true; } } else { - output(i, j).r() = 0.0f; - output(i, j).g() = 0.0f; - output(i, j).b() = 0.0f; - output_mask(i, j) = false; + if (maskB(i, j)) { + output(i, j) = inputB(i, j); + output_mask(i, j) = true; + } + else { + output(i, j).r() = 0.0f; + output(i, j).g() = 0.0f; + output(i, j).b() = 0.0f; + output_mask(i, j) = false; + } } } } @@ -769,7 +780,7 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ - std::pair panoramaSize = {2048, 1024}; + std::pair panoramaSize = {1024, 512}; po::options_description optionalParams("Optional parameters"); allParams.add(optionalParams); @@ -864,6 +875,7 @@ int main(int argc, char **argv) { /** * Prepare coordinates map */ + if (pos == 3 || pos == 7 || pos == 15) { CoordinatesMap map; map.build(panoramaSize, camPose, intrinsic); @@ -896,6 +908,7 @@ int main(int argc, char **argv) { char filename[512]; sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); + } pos++; From fc86b28fa7e513dd458a0a82d59a44dc225cddbe Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Tue, 15 Oct 2019 14:52:14 +0200 Subject: [PATCH 074/174] fuly working laplacian --- .../pipeline/main_panoramaStitching.cpp | 341 ++++++++++++------ 1 file changed, 239 insertions(+), 102 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index a68955f597..f70e90e739 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -25,6 +25,102 @@ using namespace aliceVision; namespace po = boost::program_options; +float sigmoid(float x, float sigwidth, float sigMid) +{ + return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); +} + +bool computeDistanceMap(image::Image & distance, const image::Image & mask) { + + int m = mask.Height(); + int n = mask.Width(); + + int maxval = m * n; + + distance = image::Image (n, m, false); + for(int x = 0; x < n; ++x) { + + //A corner is when mask becomes 0 + bool b = !mask(0, x); + if (b) { + distance(0, x) = 0; + } + else { + distance(0, x) = maxval * maxval; + } + + for (int y = 1; y < m; y++) { + bool b = !mask(y, x); + if (b) { + distance(y, x) = 0; + } + else { + distance(y, x) = 1 + distance(y - 1, x); + } + } + + for (int y = m - 2; y >= 0; y--) { + if (distance(y + 1, x) < distance(y, x)) { + distance(y, x) = 1 + distance(y + 1, x); + } + } + } + + for (int y = 0; y < m; y++) { + int q; + std::map s; + std::map t; + + q = 0; + s[0] = 0; + t[0] = 0; + + std::function f = [distance, y](int x, int i) { + int gi = distance(y, i); + return (x - i)*(x - i) + gi * gi; + }; + + std::function sep = [distance, y](int i, int u) { + int gu = distance(y, u); + int gi = distance(y, i); + + int nom = (u * u) - (i * i) + (gu * gu) - (gi * gi); + int denom = 2 * (u - i); + + return nom / denom; + }; + + for (int u = 1; u < n; u++) { + + while (q >= 0 && (f(t[q], s[q]) > f(t[q], u))) { + q = q - 1; + } + + if (q < 0) { + q = 0; + s[0] = u; + } + else { + int w = 1 + sep(s[q], u); + if (w < n) { + q = q + 1; + s[q] = u; + t[q] = w; + } + } + } + + for (int u = n - 1; u >= 0; u--) { + distance(y, u) = f(u, s[q]); + if (u == t[q]) { + q = q - 1; + } + } + } + + return true; +} + namespace SphericalMapping { /** @@ -78,6 +174,7 @@ namespace SphericalMapping } } +template class Pyramid { public: Pyramid(const size_t width_base, const size_t height_base, const size_t limit_scales = 64) : @@ -100,14 +197,14 @@ class Pyramid { size_t new_height = _height_base; for (int i = 0; i < _scales; i++) { - _pyramid_color.push_back(image::Image(new_width, new_height, true, image::RGBfColor(0.0f, 0.0f, 0.0f))); + _pyramid_color.push_back(image::Image(new_width, new_height, true, T(0))); _pyramid_mask.push_back(image::Image(new_width, new_height, true, false)); new_height /= 2; new_width /= 2; } } - bool process(const image::Image & input, const image::Image & mask) { + bool process(const image::Image & input, const image::Image & mask) { if (input.Height() != _pyramid_color[0].Height()) return false; if (input.Width() != _pyramid_color[0].Width()) return false; @@ -121,33 +218,56 @@ class Pyramid { _pyramid_color[0] = input; _pyramid_mask[0] = mask; for (int lvl = 1; lvl < _scales; lvl++) { - downscale(_pyramid_color[lvl], _pyramid_color[lvl - 1]); - downscale(_pyramid_mask[lvl], _pyramid_mask[lvl - 1]); + downscale(_pyramid_color[lvl], _pyramid_mask[lvl], _pyramid_color[lvl - 1], _pyramid_mask[lvl - 1]); } return true; } - - static bool downscale(image::Image & output, const image::Image & input) { + + + static bool downscale(image::Image & output, image::Image & output_mask, const image::Image & input, const image::Image & input_mask) { for (int i = 0; i < output.Height(); i++) { + int ui = i * 2; + for (int j = 0; j < output.Width(); j++) { + int uj = j * 2; + + bool one = input_mask(ui, uj) || input_mask(ui, uj + 1) || input_mask(ui + 1, uj) || input_mask(ui + 1, uj + 1); - output(i, j).r() = 0.25 * (input(i * 2, j * 2).r() + input(i * 2 + 1, j * 2).r() + input(i * 2, j * 2 + 1).r() + input(i * 2 + 1, j * 2 + 1).r()); - output(i, j).g() = 0.25 * (input(i * 2, j * 2).g() + input(i * 2 + 1, j * 2).g() + input(i * 2, j * 2 + 1).g() + input(i * 2 + 1, j * 2 + 1).g()); - output(i, j).b() = 0.25 * (input(i * 2, j * 2).b() + input(i * 2 + 1, j * 2).b() + input(i * 2, j * 2 + 1).b() + input(i * 2 + 1, j * 2 + 1).b()); - } - } + if (!one) { + output(i, j) = T(0.0f); + output_mask(i, j) = false; + } + else { + size_t count = 0; + T color = T(0.0f); - return true; - } + if (input_mask(ui, uj)) { + color = input(ui, uj); + count++; + } + + if (input_mask(ui, uj + 1)) { + color += input(ui, uj + 1); + count++; + } - static bool downscale(image::Image & output, const image::Image & input) { + if (input_mask(ui + 1, uj)) { + color += input(ui + 1, uj); + count++; + } - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { + if (input_mask(ui + 1, uj + 1)) { + color += input(ui + 1, uj + 1); + count++; + } + + color /= float(count); - output(i, j) = input(i * 2, j * 2) & input(i * 2 + 1, j * 2) & input(i * 2, j * 2 + 1) & input(i * 2 + 1, j * 2 + 1) ; + output(i, j) = color; + output_mask(i, j) = true; + } } } @@ -158,7 +278,7 @@ class Pyramid { return _scales; } - const std::vector> & getPyramidColor() const { + const std::vector> & getPyramidColor() const { return _pyramid_color; } @@ -166,7 +286,7 @@ class Pyramid { return _pyramid_mask; } - std::vector> & getPyramidColor() { + std::vector> & getPyramidColor() { return _pyramid_color; } @@ -175,7 +295,7 @@ class Pyramid { } protected: - std::vector> _pyramid_color; + std::vector> _pyramid_color; std::vector> _pyramid_mask; size_t _width_base; size_t _height_base; @@ -190,10 +310,6 @@ class LaplacianPyramid { _pyramid_differences(width_base, height_base, limit_scales), _pyramid_buffer(width_base, height_base, limit_scales) { - for (int i = 0; i < _pyramid.getScalesCount(); i++) { - _differences_full.push_back(image::Image(width_base, height_base, true, image::RGBfColor(0.0f, 0.0f, 0.0f))); - _differences_masks_full.push_back(image::Image(width_base, height_base, true, false)); - } } bool process(const image::Image & input, const image::Image & mask) { @@ -229,91 +345,103 @@ class LaplacianPyramid { difference_color[_pyramid.getScalesCount() - 1] = pyramid_color[_pyramid.getScalesCount() - 1]; difference_mask[_pyramid.getScalesCount() - 1] = pyramid_mask[_pyramid.getScalesCount() - 1]; - /*Upscale to the max each level*/ - for (int max_level = 0; max_level < _pyramid.getScalesCount(); max_level++) { + return true; + } - image::Image & refOut = _differences_full[max_level]; - image::Image & maskOut = _differences_masks_full[max_level]; + bool blend(image::Image & output, image::Image & output_mask, const LaplacianPyramid & other, const image::Image & weights) { - buffers_color[max_level] = difference_color[max_level]; - buffers_mask[max_level] = difference_mask[max_level]; - for (int lvl = max_level - 1; lvl >= 0; lvl--) { - upscale(buffers_color[lvl], buffers_color[lvl + 1]); - upscale(buffers_mask[lvl], buffers_mask[lvl + 1]); - } + const auto & differences = _pyramid_differences.getPyramidColor(); + const auto & differences_masks = _pyramid_differences.getPyramidMask(); + const auto & other_differences = other._pyramid_differences.getPyramidColor(); + const auto & other_differences_masks = other._pyramid_differences.getPyramidMask(); + auto & rescaleds = _pyramid_buffer.getPyramidColor(); + auto & rescaleds_masks = _pyramid_buffer.getPyramidMask(); + const auto & original_camera_mask = other._pyramid_differences.getPyramidMask()[0]; - refOut = buffers_color[0]; - maskOut = buffers_mask[0]; - } + output.fill(image::RGBfColor(0.0f)); + output_mask.fill(false); - return true; - } + for (int lvl = _pyramid.getScalesCount() - 1; lvl >= 0; lvl--) { - bool blend(const LaplacianPyramid & other) { + /*Rescale this level to the 0 level size for the added camera*/ + rescaleds[lvl] = other_differences[lvl]; + rescaleds_masks[lvl] = other_differences_masks[lvl]; + for (int slvl = lvl - 1; slvl >= 0; slvl--) { + upscale(rescaleds[slvl], rescaleds[slvl + 1]); + upscale(rescaleds_masks[slvl], rescaleds_masks[slvl + 1]); + } - for (int lvl = 0; lvl < _pyramid.getScalesCount(); lvl++) { + auto rescaled_other = rescaleds[0]; + auto rescaled_masks_other = rescaleds_masks[0]; - image::Image & output = _differences_full[lvl]; - image::Image & mask = _differences_masks_full[lvl]; + for (int i = 0; i < rescaled_other.Height(); i++) { + for (int j = 0; j < rescaled_other.Width(); j++) { + if (!original_camera_mask(i, j)) { + rescaled_other(i, j) = image::RGBfColor(0.0f); + rescaled_masks_other(i, j) = false; + } + } + } - const image::Image & inputA = _differences_full[lvl]; - const image::Image & maskA = _differences_masks_full[lvl]; - const image::Image & inputB = other._differences_full[lvl]; - const image::Image & maskB = other._differences_masks_full[lvl]; + /*Rescale this level to the 0 level size*/ + rescaleds[lvl] = differences[lvl]; + rescaleds_masks[lvl] = differences_masks[lvl]; + for (int slvl = lvl - 1; slvl >= 0; slvl--) { + upscale(rescaleds[slvl], rescaleds[slvl + 1]); + upscale(rescaleds_masks[slvl], rescaleds_masks[slvl + 1]); + } - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { + auto rescaled_origin = rescaleds[0]; + auto rescaled_masks_origin = rescaleds_masks[0]; - float weight = 0.5f; - float mweight = 1.0f - weight; + for (int i = 0; i < rescaled_origin.Height(); i++) { + for (int j = 0; j < rescaled_origin.Width(); j++) { + if (rescaled_masks_origin(i, j)) { + if (rescaled_masks_other(i, j)) { - const image::RGBfColor & pixA = inputA(i, j); - const image::RGBfColor & pixB = inputB(i, j); + float weight = weights(i, j); + float mweight = 1.0f - weight; - if (maskA(i, j)) { - if (maskB(i, j)) { - output(i, j).r() = mweight * pixA.r() + weight * pixB.r(); - output(i, j).g() = mweight * pixA.g() + weight * pixB.g(); - output(i, j).b() = mweight * pixA.b() + weight * pixB.b(); - mask(i, j) = true; + rescaled_origin(i, j).r() = mweight * rescaled_origin(i, j).r() + weight * rescaled_other(i, j).r(); + rescaled_origin(i, j).g() = mweight * rescaled_origin(i, j).g() + weight * rescaled_other(i, j).g(); + rescaled_origin(i, j).b() = mweight * rescaled_origin(i, j).b() + weight * rescaled_other(i, j).b(); + rescaled_masks_origin(i, j) = true; } else { - output(i, j) = pixA; - mask(i, j) = true; + //Nothing to do + rescaled_masks_origin(i, j) = true; } } else { - if (maskB(i, j)) { - output(i, j) = pixB; - mask(i, j) = true; + if (rescaled_masks_other(i, j)) { + rescaled_origin(i, j) = rescaled_other(i, j); + rescaled_masks_origin(i, j) = rescaled_masks_other(i, j); + rescaled_masks_origin(i, j) = true; } else { - output(i, j).r() = 0.0f; - output(i, j).g() = 0.0f; - output(i, j).b() = 0.0f; - mask(i, j) = false; + rescaled_origin(i, j) = image::RGBfColor(0.0f); + rescaled_masks_origin(i, j) = false; } } } } - } - - return true; - } - - bool rebuild(image::Image & output, image::Image & mask) { - - size_t scales = _pyramid.getScalesCount(); - output = _differences_full[scales - 1]; - mask = _differences_masks_full[scales - 1]; - for (int lvl = scales - 2; lvl >= 0; lvl--) { - add(output, mask, output, mask, _differences_full[lvl], _differences_masks_full[lvl]); + for (int i = 0; i < rescaled_origin.Height(); i++) { + for (int j = 0; j < rescaled_origin.Width(); j++) { + if (rescaled_masks_origin(i, j)) { + output(i, j).r() = output(i, j).r() + rescaled_origin(i, j).r(); + output(i, j).g() = output(i, j).g() + rescaled_origin(i, j).g(); + output(i, j).b() = output(i, j).b() + rescaled_origin(i, j).b(); + output_mask(i, j) = true; + } + } + } } return true; } + private: bool upscale(image::Image & output, const image::Image & input) { @@ -392,6 +520,11 @@ class LaplacianPyramid { else { output(i, j) = inputA(i, j); output_mask(i, j) = true; + + output(i, j).r() = 0.0f; + output(i, j).g() = 0.0f; + output(i, j).b() = 0.0f; + output_mask(i, j) = false; } } else { @@ -414,12 +547,9 @@ class LaplacianPyramid { private: - Pyramid _pyramid; - Pyramid _pyramid_differences; - Pyramid _pyramid_buffer; - - std::vector> _differences_full; - std::vector> _differences_masks_full; + Pyramid _pyramid; + Pyramid _pyramid_differences; + Pyramid _pyramid_buffer; }; class CoordinatesMap { @@ -734,11 +864,28 @@ class LaplacianCompositer : public Compositer { LaplacianPyramid pyramid_camera(_panorama.Width(), _panorama.Height()); pyramid_camera.process(enlargedInput, enlargedMask); - + image::Image distanceMap; + computeDistanceMap(distanceMap, enlargedMask); + + image::Image weightMap(_panorama.Width(), _panorama.Height()); + for(int y = 0; y < _panorama.Height(); ++y) + { + for(int x = 0; x < _panorama.Width(); ++x) + { + int dist = distanceMap(y, x); + + weightMap(y, x) = 0.0f; + if (dist > 0) + { + float fdist = sqrtf(float(dist)); + weightMap(y, x) = std::min(1.0f, std::max(0.0f, 1.0f - sigmoid(fdist, 100, 50))); + } + } + } + LaplacianPyramid pyramid_panorama(_panorama.Width(), _panorama.Height()); pyramid_panorama.process(_panorama, _mask); - pyramid_panorama.blend(pyramid_camera); - pyramid_panorama.rebuild(_panorama, _mask); + pyramid_panorama.blend(_panorama, _mask, pyramid_camera, weightMap); for (int i = 0; i < _panorama.Height(); i++) { for (int j = 0; j < _panorama.Width(); j++) { @@ -780,7 +927,7 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ - std::pair panoramaSize = {1024, 512}; + std::pair panoramaSize = {4096, 2048}; po::options_description optionalParams("Optional parameters"); allParams.add(optionalParams); @@ -848,7 +995,7 @@ int main(int argc, char **argv) { * Create compositer */ LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); - + /** * Preprocessing per view @@ -875,7 +1022,6 @@ int main(int argc, char **argv) { /** * Prepare coordinates map */ - if (pos == 3 || pos == 7 || pos == 15) { CoordinatesMap map; map.build(panoramaSize, camPose, intrinsic); @@ -897,21 +1043,12 @@ int main(int argc, char **argv) { *Composite image into output */ compositer.append(warper); - - /** - const aliceVision::image::Image & color = warper.getColor(); - char filename[512]; - sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), view.getViewId()); - image::writeImage(filename, color, image::EImageColorSpace::NO_CONVERSION); - */ + const aliceVision::image::Image & panorama = compositer.getPanorama(); char filename[512]; sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); - } pos++; - - } return EXIT_SUCCESS; From 7745ae33637b10a8b3d3d685e65ddb02cbc274d5 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Wed, 16 Oct 2019 11:15:59 +0200 Subject: [PATCH 075/174] multi level warping --- .../pipeline/main_panoramaEstimation.cpp | 30 +++ .../pipeline/main_panoramaStitching.cpp | 194 +++++++++++++++++- 2 files changed, 219 insertions(+), 5 deletions(-) diff --git a/src/software/pipeline/main_panoramaEstimation.cpp b/src/software/pipeline/main_panoramaEstimation.cpp index 25fb8f91f9..dfb95e6918 100644 --- a/src/software/pipeline/main_panoramaEstimation.cpp +++ b/src/software/pipeline/main_panoramaEstimation.cpp @@ -168,6 +168,13 @@ int main(int argc, char **argv) return EXIT_FAILURE; } + sfmData::Poses & initial_poses = inputSfmData.getPoses(); + Eigen::Matrix3d ref_R_base = Eigen::Matrix3d::Identity(); + if (!initial_poses.empty()) { + + ref_R_base = initial_poses.begin()->second.getTransform().rotation(); + } + // get describerTypes const std::vector describerTypes = feature::EImageDescriberType_stringToEnums(describerTypesName); @@ -217,6 +224,7 @@ int main(int argc, char **argv) // configure relative rotation method (from essential or from homography matrix) sfmEngine.SetRelativeRotationMethod(sfm::ERelativeRotationMethod(relativeRotationMethod)); + if(!sfmEngine.process()) return EXIT_FAILURE; @@ -235,8 +243,28 @@ int main(int argc, char **argv) sfmDataIO::Save(sfmEngine.getSfMData(), (fs::path(outDirectory) / "BA_after.abc").string(), sfmDataIO::ESfMData::ALL); } + sfmData::SfMData& outSfmData = sfmEngine.getSfMData(); + + + /** + * If an initial set of poses was available, make sure at least one pose is aligned with it + */ + sfmData::Poses & final_poses = outSfmData.getPoses(); + if (!final_poses.empty()) { + + Eigen::Matrix3d ref_R_current = final_poses.begin()->second.getTransform().rotation(); + Eigen::Matrix3d R_restore = ref_R_current.transpose() * ref_R_base; + + for (auto & pose : outSfmData.getPoses()) { + geometry::Pose3 p = pose.second.getTransform(); + Eigen::Matrix3d newR = p.rotation() * R_restore ; + p.rotation() = newR; + pose.second.setTransform(p); + } + } + ALICEVISION_LOG_INFO("Panorama solve took (s): " << timer.elapsed()); ALICEVISION_LOG_INFO("Generating HTML report..."); @@ -255,6 +283,7 @@ int main(int argc, char **argv) return -1; } + if (initial_poses.empty()) { std::string firstShot_datetime; IndexT firstShot_viewId = 0; @@ -318,6 +347,7 @@ int main(int argc, char **argv) // We only need to correct the rotation S = 1.0; t = Vec3::Zero(); + sfm::applyTransform(outSfmData, S, R, t); } diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index f70e90e739..1ba602e674 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -8,6 +8,7 @@ * Image stuff */ #include +#include /*Logging stuff*/ #include @@ -174,6 +175,103 @@ namespace SphericalMapping } } +class GaussianPyramidNoMask { +public: + GaussianPyramidNoMask(const size_t width_base, const size_t height_base, const size_t limit_scales = 64) : + _width_base(width_base), + _height_base(height_base) + { + /** + * Compute optimal scale + * The smallest level will be at least of size min_size + */ + size_t min_dim = std::min(_width_base, _height_base); + size_t min_size = 32; + _scales = std::min(limit_scales, static_cast(floor(log2(double(min_dim) / float(min_size))))); + + + /** + * Create pyramid + **/ + size_t new_width = _width_base; + size_t new_height = _height_base; + for (int i = 0; i < _scales; i++) { + + _pyramid_color.push_back(image::Image(new_width, new_height, true, image::RGBfColor(0))); + _filter_buffer.push_back(image::Image(new_width, new_height, true, image::RGBfColor(0))); + new_height /= 2; + new_width /= 2; + } + } + + bool process(const image::Image & input) { + + if (input.Height() != _pyramid_color[0].Height()) return false; + if (input.Width() != _pyramid_color[0].Width()) return false; + + + /** + * Kernel + */ + oiio::ImageBuf K = oiio::ImageBufAlgo::make_kernel("gaussian", 5, 5); + + /** + * Build pyramid + */ + _pyramid_color[0] = input; + for (int lvl = 0; lvl < _scales - 1; lvl++) { + + const image::Image & source = _pyramid_color[lvl]; + image::Image & dst = _filter_buffer[lvl]; + + oiio::ImageSpec spec(source.Width(), source.Height(), 3, oiio::TypeDesc::FLOAT); + + const oiio::ImageBuf inBuf(spec, const_cast(source.data())); + oiio::ImageBuf outBuf(spec, dst.data()); + oiio::ImageBufAlgo::convolve(outBuf, inBuf, K); + + downscale(_pyramid_color[lvl + 1], _filter_buffer[lvl]); + } + + return true; + } + + + bool downscale(image::Image & output, const image::Image & input) { + + for (int i = 0; i < output.Height(); i++) { + int ui = i * 2; + + for (int j = 0; j < output.Width(); j++) { + int uj = j * 2; + + output(i, j) = input(ui, uj); + } + } + + return true; + } + + const size_t getScalesCount() const { + return _scales; + } + + const std::vector> & getPyramidColor() const { + return _pyramid_color; + } + + std::vector> & getPyramidColor() { + return _pyramid_color; + } + +protected: + std::vector> _pyramid_color; + std::vector> _filter_buffer; + size_t _width_base; + size_t _height_base; + size_t _scales; +}; + template class Pyramid { public: @@ -729,7 +827,7 @@ class Warper { return _real_height; } -private: +protected: size_t _offset_x = 0; size_t _offset_y = 0; size_t _real_width = 0; @@ -739,10 +837,91 @@ class Warper { aliceVision::image::Image _mask; }; +class GaussianWarper : public Warper { +public: + virtual bool warp(const CoordinatesMap & map, const aliceVision::image::Image & source) { + + /** + * Copy additional info from map + */ + _offset_x = map.getOffsetX(); + _offset_y = map.getOffsetY(); + _real_width = map.getWidth(); + _real_height = map.getHeight(); + _mask = map.getMask(); + + const image::Sampler2d sampler; + const aliceVision::image::Image & coordinates = map.getCoordinates(); + + /** + * Create a pyramid for input + */ + GaussianPyramidNoMask pyramid(source.Width(), source.Height()); + pyramid.process(source); + const std::vector> & mlsource = pyramid.getPyramidColor(); + size_t max_level = pyramid.getScalesCount() - 1; + + /** + * Create buffer + */ + _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); + + + /** + * Multi level warp + */ + for (size_t i = 0; i < _color.Height(); i++) { + for (size_t j = 0; j < _color.Width(); j++) { + + bool valid = _mask(i, j); + if (!valid) { + continue; + } + + if (i == _color.Height() - 1 || j == _color.Width() - 1 || !_mask(i + 1, j) || !_mask(i, j + 1)) { + const Eigen::Vector2d & coord = coordinates(i, j); + const image::RGBfColor pixel = sampler(source, coord(1), coord(0)); + _color(i, j) = pixel; + continue; + } + + const Eigen::Vector2d & coord_mm = coordinates(i, j); + const Eigen::Vector2d & coord_mp = coordinates(i, j + 1); + const Eigen::Vector2d & coord_pm = coordinates(i + 1, j); + + double dxx = coord_pm(0) - coord_mm(0); + double dxy = coord_mp(0) - coord_mm(0); + double dyx = coord_pm(1) - coord_mm(1); + double dyy = coord_mp(1) - coord_mm(1); + double det = std::abs(dxx*dyy - dxy*dyx); + double scale = sqrt(det); + + + double flevel = log2(scale); + size_t blevel = std::min(max_level, size_t(floor(flevel))); + + double dscale, x, y; + dscale = 1.0 / pow(2.0, blevel); + x = coord_mm(0) * dscale; + y = coord_mm(1) * dscale; + /*Fallback to first level if outside*/ + if (x >= mlsource[blevel].Width() - 1 || y >= mlsource[blevel].Height() - 1) { + _color(i, j) = sampler(mlsource[0], coord_mm(1), coord_mm(0)); + continue; + } + + _color(i, j) = sampler(mlsource[blevel], y, x); + } + } + + return true; + } +}; + class Compositer { public: Compositer(size_t outputWidth, size_t outputHeight) : - _panorama(outputWidth, outputHeight, true, image::RGBfColor(0.0f, 0.0f, 0.0f)), + _panorama(outputWidth, outputHeight, true, image::RGBfColor(1.0f, 0.0f, 0.0f)), _mask(outputWidth, outputHeight, true, false) { } @@ -927,7 +1106,7 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ - std::pair panoramaSize = {4096, 2048}; + std::pair panoramaSize = {2048, 1024}; po::options_description optionalParams("Optional parameters"); allParams.add(optionalParams); @@ -1019,6 +1198,9 @@ int main(int argc, char **argv) { const geometry::Pose3 & camPose = sfmData.getPose(view).getTransform(); const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); + std::cout << camPose.rotation() << std::endl; + + /*if (pos == 8) */{ /** * Prepare coordinates map */ @@ -1035,7 +1217,7 @@ int main(int argc, char **argv) { /** * Warp image */ - Warper warper; + GaussianWarper warper; warper.warp(map, source); @@ -1043,11 +1225,13 @@ int main(int argc, char **argv) { *Composite image into output */ compositer.append(warper); - + { const aliceVision::image::Image & panorama = compositer.getPanorama(); char filename[512]; sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); + } + } pos++; } From a5f3d0e6addaf61951ee29ce44da8b52c24a9bb3 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Thu, 17 Oct 2019 13:17:58 +0200 Subject: [PATCH 076/174] adding alphacompositer --- src/aliceVision/image/io.cpp | 5 + src/aliceVision/image/io.hpp | 1 + .../pipeline/main_panoramaStitching.cpp | 736 +++++++----------- 3 files changed, 303 insertions(+), 439 deletions(-) diff --git a/src/aliceVision/image/io.cpp b/src/aliceVision/image/io.cpp index 00a1d8b700..dc30c448e0 100644 --- a/src/aliceVision/image/io.cpp +++ b/src/aliceVision/image/io.cpp @@ -414,6 +414,11 @@ void writeImage(const std::string& path, const Image& image, EImageCo writeImage(path, oiio::TypeDesc::FLOAT, 3, image, imageColorSpace, metadata); } +void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata) +{ + writeImage(path, oiio::TypeDesc::FLOAT, 1, image, imageColorSpace, metadata); +} + void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata) { writeImage(path, oiio::TypeDesc::UINT8, 3, image, imageColorSpace, metadata); diff --git a/src/aliceVision/image/io.hpp b/src/aliceVision/image/io.hpp index 2ff4b370d8..b00d56ec72 100644 --- a/src/aliceVision/image/io.hpp +++ b/src/aliceVision/image/io.hpp @@ -148,6 +148,7 @@ void readImage(const std::string& path, Image& image, EImageColorSpace * @param[in] path The given path to the image * @param[in] image The output image buffer */ +void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata = oiio::ParamValueList()); void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata = oiio::ParamValueList()); void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata = oiio::ParamValueList()); void writeImage(const std::string& path, const Image& image, EImageColorSpace imageColorSpace, const oiio::ParamValueList& metadata = oiio::ParamValueList()); diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 1ba602e674..397465c14d 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -31,7 +31,156 @@ float sigmoid(float x, float sigwidth, float sigMid) return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); } -bool computeDistanceMap(image::Image & distance, const image::Image & mask) { +Eigen::MatrixXf gaussian_kernel(size_t kernel_length, float sigma) { + + Eigen::VectorXd x; + x.setLinSpaced(kernel_length + 1, -sigma, +sigma); + + Eigen::VectorXd cdf(kernel_length + 1); + for (int i = 0; i < kernel_length + 1; i++) { + cdf(i) = 0.5 * (1 + std::erf(x(i)/sqrt(2.0))); + } + + Eigen::VectorXd k1d(kernel_length); + for (int i = 0; i < kernel_length; i++) { + k1d(i) = cdf(i + 1) - cdf(i); + } + + Eigen::MatrixXd K = k1d * k1d.transpose(); + + + double sum = K.sum(); + K = K / sum; + + return K.cast(); +} + +bool convolve(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::MatrixXf & kernel) { + + if (output.size() != input.size()) { + return false; + } + + if (output.size() != mask.size()) { + return false; + } + + if (kernel.size() % 2 == 0) { + return false; + } + + if (kernel.rows() != kernel.cols()) { + return false; + } + + int radius = kernel.rows() / 2; + + Eigen::MatrixXf kernel_scaled = kernel; + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + float sum = 0.0f; + float sum_mask = 0.0f; + + if (!mask(i, j)) { + output(i, j) = 0.0f; + continue; + } + + for (int k = 0; k < kernel.rows(); k++) { + float ni = i + k - radius; + if (ni < 0 || ni >= output.Height()) { + continue; + } + + for (int l = 0; l < kernel.cols(); l++) { + float nj = j + l - radius; + if (nj < 0 || nj >= output.Width()) { + continue; + } + + if (!mask(ni, nj)) { + continue; + } + + float val = kernel(k, l) * input(ni, nj); + sum += val; + sum_mask += kernel(k, l); + } + } + + output(i, j) = sum / sum_mask; + } + } + + return true; +} + +bool convolve(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::MatrixXf & kernel) { + + if (output.size() != input.size()) { + return false; + } + + if (output.size() != mask.size()) { + return false; + } + + if (kernel.size() % 2 == 0) { + return false; + } + + if (kernel.rows() != kernel.cols()) { + return false; + } + + int radius = kernel.rows() / 2; + + Eigen::MatrixXf kernel_scaled = kernel; + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + image::RGBfColor sum(0.0f); + float sum_mask = 0.0f; + + if (!mask(i, j)) { + output(i, j) = sum; + continue; + } + + for (int k = 0; k < kernel.rows(); k++) { + float ni = i + k - radius; + if (ni < 0 || ni >= output.Height()) { + continue; + } + + for (int l = 0; l < kernel.cols(); l++) { + float nj = j + l - radius; + if (nj < 0 || nj >= output.Width()) { + continue; + } + + if (!mask(ni, nj)) { + continue; + } + + sum.r() += kernel(k, l) * input(ni, nj).r(); + sum.g() += kernel(k, l) * input(ni, nj).g(); + sum.b() += kernel(k, l) * input(ni, nj).b(); + sum_mask += kernel(k, l); + } + } + + output(i, j) = sum / sum_mask; + } + } + + return true; +} + +bool computeDistanceMap(image::Image & distance, const image::Image & mask) { int m = mask.Height(); int n = mask.Width(); @@ -122,6 +271,7 @@ bool computeDistanceMap(image::Image & distance, const image::Image & return true; } + namespace SphericalMapping { /** @@ -272,384 +422,6 @@ class GaussianPyramidNoMask { size_t _scales; }; -template -class Pyramid { -public: - Pyramid(const size_t width_base, const size_t height_base, const size_t limit_scales = 64) : - _width_base(width_base), - _height_base(height_base) - { - /** - * Compute optimal scale - * The smallest level will be at least of size min_size - */ - size_t min_dim = std::min(_width_base, _height_base); - size_t min_size = 32; - _scales = std::min(limit_scales, static_cast(floor(log2(double(min_dim) / float(min_size))))); - - - /** - * Create pyramid - **/ - size_t new_width = _width_base; - size_t new_height = _height_base; - for (int i = 0; i < _scales; i++) { - - _pyramid_color.push_back(image::Image(new_width, new_height, true, T(0))); - _pyramid_mask.push_back(image::Image(new_width, new_height, true, false)); - new_height /= 2; - new_width /= 2; - } - } - - bool process(const image::Image & input, const image::Image & mask) { - - if (input.Height() != _pyramid_color[0].Height()) return false; - if (input.Width() != _pyramid_color[0].Width()) return false; - if (input.Height() != mask.Height()) return false; - if (input.Width() != mask.Width()) return false; - - - /** - * Build pyramid - */ - _pyramid_color[0] = input; - _pyramid_mask[0] = mask; - for (int lvl = 1; lvl < _scales; lvl++) { - downscale(_pyramid_color[lvl], _pyramid_mask[lvl], _pyramid_color[lvl - 1], _pyramid_mask[lvl - 1]); - } - - return true; - } - - - static bool downscale(image::Image & output, image::Image & output_mask, const image::Image & input, const image::Image & input_mask) { - - for (int i = 0; i < output.Height(); i++) { - int ui = i * 2; - - for (int j = 0; j < output.Width(); j++) { - int uj = j * 2; - - bool one = input_mask(ui, uj) || input_mask(ui, uj + 1) || input_mask(ui + 1, uj) || input_mask(ui + 1, uj + 1); - - if (!one) { - output(i, j) = T(0.0f); - output_mask(i, j) = false; - } - else { - size_t count = 0; - T color = T(0.0f); - - if (input_mask(ui, uj)) { - color = input(ui, uj); - count++; - } - - if (input_mask(ui, uj + 1)) { - color += input(ui, uj + 1); - count++; - } - - if (input_mask(ui + 1, uj)) { - color += input(ui + 1, uj); - count++; - } - - if (input_mask(ui + 1, uj + 1)) { - color += input(ui + 1, uj + 1); - count++; - } - - color /= float(count); - - output(i, j) = color; - output_mask(i, j) = true; - } - } - } - - return true; - } - - const size_t getScalesCount() const { - return _scales; - } - - const std::vector> & getPyramidColor() const { - return _pyramid_color; - } - - const std::vector> & getPyramidMask() const { - return _pyramid_mask; - } - - std::vector> & getPyramidColor() { - return _pyramid_color; - } - - std::vector> & getPyramidMask() { - return _pyramid_mask; - } - -protected: - std::vector> _pyramid_color; - std::vector> _pyramid_mask; - size_t _width_base; - size_t _height_base; - size_t _scales; -}; - -class LaplacianPyramid { -public: - LaplacianPyramid(const size_t width_base, const size_t height_base, const size_t limit_scales = 64) - : - _pyramid(width_base, height_base, limit_scales), - _pyramid_differences(width_base, height_base, limit_scales), - _pyramid_buffer(width_base, height_base, limit_scales) - { - } - - bool process(const image::Image & input, const image::Image & mask) { - - /** - * Compute pyramid of images - */ - if (!_pyramid.process(input, mask)) { - return false; - } - - /** - * For each level, upscale once and differentiate with upper level - */ - const std::vector> & pyramid_color = _pyramid.getPyramidColor(); - const std::vector> & pyramid_mask = _pyramid.getPyramidMask(); - std::vector> & difference_color = _pyramid_differences.getPyramidColor(); - std::vector> & difference_mask = _pyramid_differences.getPyramidMask(); - std::vector> & buffers_color = _pyramid_buffer.getPyramidColor(); - std::vector> & buffers_mask = _pyramid_buffer.getPyramidMask(); - - for (int lvl = 0; lvl < _pyramid.getScalesCount() - 1; lvl++) { - - upscale(buffers_color[lvl], pyramid_color[lvl + 1]); - upscale(buffers_mask[lvl], pyramid_mask[lvl + 1]); - - substract(difference_color[lvl], difference_mask[lvl], pyramid_color[lvl], pyramid_mask[lvl], buffers_color[lvl], buffers_mask[lvl]); - } - - /** - * Last level is simply a copy of the original pyramid last level (nothing to substract with) - */ - difference_color[_pyramid.getScalesCount() - 1] = pyramid_color[_pyramid.getScalesCount() - 1]; - difference_mask[_pyramid.getScalesCount() - 1] = pyramid_mask[_pyramid.getScalesCount() - 1]; - - return true; - } - - bool blend(image::Image & output, image::Image & output_mask, const LaplacianPyramid & other, const image::Image & weights) { - - const auto & differences = _pyramid_differences.getPyramidColor(); - const auto & differences_masks = _pyramid_differences.getPyramidMask(); - const auto & other_differences = other._pyramid_differences.getPyramidColor(); - const auto & other_differences_masks = other._pyramid_differences.getPyramidMask(); - auto & rescaleds = _pyramid_buffer.getPyramidColor(); - auto & rescaleds_masks = _pyramid_buffer.getPyramidMask(); - const auto & original_camera_mask = other._pyramid_differences.getPyramidMask()[0]; - - output.fill(image::RGBfColor(0.0f)); - output_mask.fill(false); - - for (int lvl = _pyramid.getScalesCount() - 1; lvl >= 0; lvl--) { - - /*Rescale this level to the 0 level size for the added camera*/ - rescaleds[lvl] = other_differences[lvl]; - rescaleds_masks[lvl] = other_differences_masks[lvl]; - for (int slvl = lvl - 1; slvl >= 0; slvl--) { - upscale(rescaleds[slvl], rescaleds[slvl + 1]); - upscale(rescaleds_masks[slvl], rescaleds_masks[slvl + 1]); - } - - auto rescaled_other = rescaleds[0]; - auto rescaled_masks_other = rescaleds_masks[0]; - - for (int i = 0; i < rescaled_other.Height(); i++) { - for (int j = 0; j < rescaled_other.Width(); j++) { - if (!original_camera_mask(i, j)) { - rescaled_other(i, j) = image::RGBfColor(0.0f); - rescaled_masks_other(i, j) = false; - } - } - } - - /*Rescale this level to the 0 level size*/ - rescaleds[lvl] = differences[lvl]; - rescaleds_masks[lvl] = differences_masks[lvl]; - for (int slvl = lvl - 1; slvl >= 0; slvl--) { - upscale(rescaleds[slvl], rescaleds[slvl + 1]); - upscale(rescaleds_masks[slvl], rescaleds_masks[slvl + 1]); - } - - auto rescaled_origin = rescaleds[0]; - auto rescaled_masks_origin = rescaleds_masks[0]; - - for (int i = 0; i < rescaled_origin.Height(); i++) { - for (int j = 0; j < rescaled_origin.Width(); j++) { - if (rescaled_masks_origin(i, j)) { - if (rescaled_masks_other(i, j)) { - - float weight = weights(i, j); - float mweight = 1.0f - weight; - - rescaled_origin(i, j).r() = mweight * rescaled_origin(i, j).r() + weight * rescaled_other(i, j).r(); - rescaled_origin(i, j).g() = mweight * rescaled_origin(i, j).g() + weight * rescaled_other(i, j).g(); - rescaled_origin(i, j).b() = mweight * rescaled_origin(i, j).b() + weight * rescaled_other(i, j).b(); - rescaled_masks_origin(i, j) = true; - } - else { - //Nothing to do - rescaled_masks_origin(i, j) = true; - } - } - else { - if (rescaled_masks_other(i, j)) { - rescaled_origin(i, j) = rescaled_other(i, j); - rescaled_masks_origin(i, j) = rescaled_masks_other(i, j); - rescaled_masks_origin(i, j) = true; - } - else { - rescaled_origin(i, j) = image::RGBfColor(0.0f); - rescaled_masks_origin(i, j) = false; - } - } - } - } - - for (int i = 0; i < rescaled_origin.Height(); i++) { - for (int j = 0; j < rescaled_origin.Width(); j++) { - if (rescaled_masks_origin(i, j)) { - output(i, j).r() = output(i, j).r() + rescaled_origin(i, j).r(); - output(i, j).g() = output(i, j).g() + rescaled_origin(i, j).g(); - output(i, j).b() = output(i, j).b() + rescaled_origin(i, j).b(); - output_mask(i, j) = true; - } - } - } - } - - return true; - } - - -private: - bool upscale(image::Image & output, const image::Image & input) { - - int width = output.Width(); - int height = output.Height(); - - width = 2 * (width / 2); - height = 2 * (height / 2); - - for (int i = 0; i < height; i++) { - for (int j = 0; j < width; j++) { - - output(i, j) = input(i / 2, j / 2); - } - } - - return true; - } - - bool upscale(image::Image & output, const image::Image & input) { - - int width = output.Width(); - int height = output.Height(); - - width = 2 * (width / 2); - height = 2 * (height / 2); - - for (int i = 0; i < height; i++) { - for (int j = 0; j < width; j++) { - - output(i, j) = input(i / 2, j / 2); - } - } - - return true; - } - - bool substract(image::Image & output, image::Image & output_mask, const image::Image & inputA, const image::Image & maskA, const image::Image & inputB, const image::Image & maskB) { - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { - - if (maskA(i, j)) { - if (maskB(i, j)) { - output(i, j).r() = inputA(i, j).r() - inputB(i, j).r(); - output(i, j).g() = inputA(i, j).g() - inputB(i, j).g(); - output(i, j).b() = inputA(i, j).b() - inputB(i, j).b(); - } - else { - output(i, j) = inputA(i, j); - } - output_mask(i, j) = true; - } - else { - output_mask(i, j) = false; - } - } - } - - return true; - } - - bool add(image::Image & output, image::Image & output_mask, const image::Image & inputA, const image::Image & maskA, const image::Image & inputB, const image::Image & maskB) { - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { - - - if (maskA(i, j)) { - if (maskB(i, j)) { - output(i, j).r() = inputA(i, j).r() + inputB(i, j).r(); - output(i, j).g() = inputA(i, j).g() + inputB(i, j).g(); - output(i, j).b() = inputA(i, j).b() + inputB(i, j).b(); - output_mask(i, j) = true; - } - else { - output(i, j) = inputA(i, j); - output_mask(i, j) = true; - - output(i, j).r() = 0.0f; - output(i, j).g() = 0.0f; - output(i, j).b() = 0.0f; - output_mask(i, j) = false; - } - } - else { - if (maskB(i, j)) { - output(i, j) = inputB(i, j); - output_mask(i, j) = true; - } - else { - output(i, j).r() = 0.0f; - output(i, j).g() = 0.0f; - output(i, j).b() = 0.0f; - output_mask(i, j) = false; - } - } - } - } - - return true; - } - - -private: - Pyramid _pyramid; - Pyramid _pyramid_differences; - Pyramid _pyramid_buffer; -}; - class CoordinatesMap { public: /** @@ -661,7 +433,7 @@ class CoordinatesMap { bool build(const std::pair & panoramaSize, const geometry::Pose3 & pose, const aliceVision::camera::IntrinsicBase & intrinsics) { aliceVision::image::Image buffer_coordinates(panoramaSize.first, panoramaSize.second, false); - aliceVision::image::Image buffer_mask(panoramaSize.first, panoramaSize.second, true, false); + aliceVision::image::Image buffer_mask(panoramaSize.first, panoramaSize.second, true, false); size_t max_x = 0; size_t max_y = 0; @@ -696,7 +468,7 @@ class CoordinatesMap { } buffer_coordinates(y, x) = pix_disto; - buffer_mask(y, x) = true; + buffer_mask(y, x) = 1; min_x = std::min(x, min_x); min_y = std::min(y, min_y); @@ -717,7 +489,7 @@ class CoordinatesMap { /* Resize buffers */ _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); - _mask = aliceVision::image::Image(rectified_width, rectified_height, true, false); + _mask = aliceVision::image::Image(rectified_width, rectified_height, true, 0); _coordinates.block(0, 0, _real_height, _real_width) = buffer_coordinates.block(_offset_y, _offset_x, _real_height, _real_width); _mask.block(0, 0, _real_height, _real_width) = buffer_mask.block(_offset_y, _offset_x, _real_height, _real_width); @@ -745,7 +517,7 @@ class CoordinatesMap { return _coordinates; } - const aliceVision::image::Image & getMask() const { + const aliceVision::image::Image & getMask() const { return _mask; } @@ -756,7 +528,55 @@ class CoordinatesMap { size_t _real_height = 0; aliceVision::image::Image _coordinates; - aliceVision::image::Image _mask; + aliceVision::image::Image _mask; +}; + +class AlphaBuilder { +public: + virtual bool build(const CoordinatesMap & map, const aliceVision::camera::IntrinsicBase & intrinsics) { + + float w = static_cast(intrinsics.w()); + float h = static_cast(intrinsics.h()); + float cx = w / 2.0f; + float cy = h / 2.0f; + + + const aliceVision::image::Image & coordinates = map.getCoordinates(); + const aliceVision::image::Image & mask = map.getMask(); + + _weights = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); + + for (int i = 0; i < _weights.Height(); i++) { + for (int j = 0; j < _weights.Width(); j++) { + + _weights(i, j) = 0.0f; + + bool valid = mask(i, j); + if (!valid) { + continue; + } + + const Vec2 & coords = coordinates(i, j); + + float x = coords(0); + float y = coords(1); + + float wx = 1.0f - std::abs((x - cx) / cx); + float wy = 1.0f - std::abs((y - cy) / cy); + + _weights(i, j) = wx * wy; + } + } + + return true; + } + + const aliceVision::image::Image & getWeights() const { + return _weights; + } + +private: + aliceVision::image::Image _weights; }; class Warper { @@ -806,7 +626,7 @@ class Warper { return _color; } - const aliceVision::image::Image & getMask() const { + const aliceVision::image::Image & getMask() const { return _mask; } @@ -834,7 +654,7 @@ class Warper { size_t _real_height = 0; aliceVision::image::Image _color; - aliceVision::image::Image _mask; + aliceVision::image::Image _mask; }; class GaussianWarper : public Warper { @@ -926,9 +746,9 @@ class Compositer { { } - bool append(const Warper & warper) { + virtual bool append(const Warper & warper) { - const aliceVision::image::Image & inputMask = warper.getMask(); + const aliceVision::image::Image & inputMask = warper.getMask(); const aliceVision::image::Image & color = warper.getColor(); for (size_t i = 0; i < warper.getHeight(); i++) { @@ -950,7 +770,7 @@ class Compositer { } _panorama(pano_i, pano_j) = color(i, j); - _mask(pano_i, pano_j) = true; + _mask(pano_i, pano_j) = 1; } } @@ -961,27 +781,26 @@ class Compositer { return _panorama; } - const aliceVision::image::Image & getPanoramaMask() const { + const aliceVision::image::Image & getPanoramaMask() const { return _mask; } protected: aliceVision::image::Image _panorama; - aliceVision::image::Image _mask; + aliceVision::image::Image _mask; }; class AverageCompositer : public Compositer { - public: AverageCompositer(size_t outputWidth, size_t outputHeight) : Compositer(outputWidth, outputHeight) { } - bool append(const Warper & warper) { + virtual bool append(const Warper & warper) { - const aliceVision::image::Image & inputMask = warper.getMask(); + const aliceVision::image::Image & inputMask = warper.getMask(); const aliceVision::image::Image & color = warper.getColor(); for (size_t i = 0; i < warper.getHeight(); i++) { @@ -1020,70 +839,104 @@ class AverageCompositer : public Compositer { } }; -class LaplacianCompositer : public Compositer { +class AlphaCompositer : public Compositer { public: - LaplacianCompositer(size_t outputWidth, size_t outputHeight) : - Compositer(outputWidth, outputHeight) - { + AlphaCompositer(size_t outputWidth, size_t outputHeight) : + Compositer(outputWidth, outputHeight), + _weightmap(outputWidth, outputHeight, true, 0.0f) { } - bool append(const Warper & warper) { + virtual bool append(const Warper & warper, const AlphaBuilder & alpha) { - const aliceVision::image::Image & inputMask = warper.getMask(); + const aliceVision::image::Image & inputMask = warper.getMask(); const aliceVision::image::Image & color = warper.getColor(); + const aliceVision::image::Image & inputWeights = alpha.getWeights(); - aliceVision::image::Image enlargedMask(_panorama.Width(), _panorama.Height(), true, false); - aliceVision::image::Image enlargedInput(_panorama.Width(), _panorama.Height(), false); + for (size_t i = 0; i < warper.getHeight(); i++) { - enlargedMask.block(warper.getOffsetY(), warper.getOffsetX(), warper.getHeight(), warper.getWidth()) = inputMask.block(0, 0, warper.getHeight(), warper.getWidth()); - enlargedInput.block(warper.getOffsetY(), warper.getOffsetX(), warper.getHeight(), warper.getWidth()) = color.block(0, 0, warper.getHeight(), warper.getWidth()); + size_t pano_i = warper.getOffsetY() + i; + if (pano_i >= _panorama.Height()) { + continue; + } - LaplacianPyramid pyramid_camera(_panorama.Width(), _panorama.Height()); - pyramid_camera.process(enlargedInput, enlargedMask); - - image::Image distanceMap; - computeDistanceMap(distanceMap, enlargedMask); + for (size_t j = 0; j < warper.getWidth(); j++) { + + if (!inputMask(i, j)) { + continue; + } + + size_t pano_j = warper.getOffsetX() + j; + if (pano_j >= _panorama.Width()) { + continue; + } - image::Image weightMap(_panorama.Width(), _panorama.Height()); - for(int y = 0; y < _panorama.Height(); ++y) - { - for(int x = 0; x < _panorama.Width(); ++x) - { - int dist = distanceMap(y, x); - - weightMap(y, x) = 0.0f; - if (dist > 0) - { - float fdist = sqrtf(float(dist)); - weightMap(y, x) = std::min(1.0f, std::max(0.0f, 1.0f - sigmoid(fdist, 100, 50))); + if (!_mask(pano_i, pano_j)) { + _panorama(pano_i, pano_j) = color(i, j); + _weightmap(pano_i, pano_j) = inputWeights(i, j); + } + else { + float wp = _weightmap(pano_i, pano_j); + float wc = inputWeights(i, j); + float wtotal = wp + wc; + wp /= wtotal; + wc /= wtotal; + + _panorama(pano_i, pano_j).r() = wp * _panorama(pano_i, pano_j).r() + wc * color(i, j).r(); + _panorama(pano_i, pano_j).g() = wp * _panorama(pano_i, pano_j).g() + wc * color(i, j).g(); + _panorama(pano_i, pano_j).b() = wp * _panorama(pano_i, pano_j).b() + wc * color(i, j).b(); + + _weightmap(pano_i, pano_j) = wtotal; } + + _mask(pano_i, pano_j) = true; } } - LaplacianPyramid pyramid_panorama(_panorama.Width(), _panorama.Height()); - pyramid_panorama.process(_panorama, _mask); - pyramid_panorama.blend(_panorama, _mask, pyramid_camera, weightMap); + return true; + } - for (int i = 0; i < _panorama.Height(); i++) { - for (int j = 0; j < _panorama.Width(); j++) { - if (!_mask(i, j)) { - _panorama(i, j).r() = 0.0f; - _panorama(i, j).g() = 0.0f; - _panorama(i, j).b() = 0.0f; - } +protected: + aliceVision::image::Image _weightmap; +}; + +class LaplacianCompositer : public AlphaCompositer { +public: + + LaplacianCompositer(size_t outputWidth, size_t outputHeight) : + AlphaCompositer(outputWidth, outputHeight), + _maxweightmap(outputWidth, outputHeight, true, 0.0f) { + + } + + virtual bool append(const Warper & warper, const AlphaBuilder & alpha) { + + const auto & weights = alpha.getWeights(); + + size_t ox = warper.getOffsetX(); + size_t oy = warper.getOffsetY(); + + for (int i = 0; i < weights.Height(); i++) { + for (int j = 0; j < weights.Width(); j++) { + float cw = weights(i, j); + float pw = _weightmap(oy + i, ox + j); + + //if (cw > ) } } + //Eigen::MatrixXf kernel = gaussian_kernel(5, 1.0f); + return true; } -}; +private: + aliceVision::image::Image _maxweightmap; +}; int main(int argc, char **argv) { - /** * Program description */ @@ -1106,7 +959,7 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ - std::pair panoramaSize = {2048, 1024}; + std::pair panoramaSize = {1024, 512}; po::options_description optionalParams("Optional parameters"); allParams.add(optionalParams); @@ -1173,7 +1026,7 @@ int main(int argc, char **argv) { /** * Create compositer */ - LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** @@ -1198,9 +1051,8 @@ int main(int argc, char **argv) { const geometry::Pose3 & camPose = sfmData.getPose(view).getTransform(); const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - std::cout << camPose.rotation() << std::endl; - /*if (pos == 8) */{ + /*if (pos == 3 || pos == 7 || pos == 15) */{ /** * Prepare coordinates map */ @@ -1221,14 +1073,20 @@ int main(int argc, char **argv) { warper.warp(map, source); + + AlphaBuilder alphabuilder; + alphabuilder.build(map, intrinsic); + + /** *Composite image into output */ - compositer.append(warper); + compositer.append(warper, alphabuilder); + { const aliceVision::image::Image & panorama = compositer.getPanorama(); char filename[512]; - sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); + sprintf(filename, "%s_intermediate_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); } } From 640731fa40cfc76c08a89095970db3e826736cf1 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Fri, 18 Oct 2019 11:11:44 +0200 Subject: [PATCH 077/174] wip borders --- src/aliceVision/image/pixelTypes.hpp | 23 ++ .../pipeline/main_panoramaStitching.cpp | 246 ++++++++++++++++-- 2 files changed, 249 insertions(+), 20 deletions(-) diff --git a/src/aliceVision/image/pixelTypes.hpp b/src/aliceVision/image/pixelTypes.hpp index 12f9767328..b9692cf28f 100644 --- a/src/aliceVision/image/pixelTypes.hpp +++ b/src/aliceVision/image/pixelTypes.hpp @@ -147,6 +147,29 @@ namespace aliceVision return os; } + /** + * @brief Elementwise substraction + * @param other the other element to substract + * @return Rgb color after substraction + * @note This does not modify the Rgb value (ie: only return a modified copy) + */ + inline Rgb operator -( const Rgb& other ) const + { + return Rgb( ((*this)(0) - other(0)), ((*this)(1) - other(1)), ((*this)(2) - other(2))); + } + + /** + * @brief Elementwise addition + * @param other the other element to substract + * @return Rgb color after addition + * @note This does not modify the Rgb value (ie: only return a modified copy) + */ + inline Rgb operator +( const Rgb& other ) const + { + return Rgb( (*this(0) + other(0)), (*this(1) + other(1)), (*this(2) + other(2))); + } + + /** * @brief scalar division * @param val Scalar divisor factor diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 397465c14d..6daeae6f94 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -440,6 +440,11 @@ class CoordinatesMap { size_t min_x = panoramaSize.first; size_t min_y = panoramaSize.second; + std::vector counts_per_column; + for (size_t x = 0; x < panoramaSize.first; x++) { + counts_per_column.push_back(0); + } + for (size_t y = 0; y < panoramaSize.second; y++) { for (size_t x = 0; x < panoramaSize.first; x++) { @@ -469,6 +474,7 @@ class CoordinatesMap { buffer_coordinates(y, x) = pix_disto; buffer_mask(y, x) = 1; + counts_per_column[x]++; min_x = std::min(x, min_x); min_y = std::min(y, min_y); @@ -477,22 +483,78 @@ class CoordinatesMap { } } - _offset_x = min_x; - _offset_y = min_y; + bool had_data = false; + std::vector> blocks; + std::pair block; + for (int x = 0; x < panoramaSize.first; x++) { + + if (counts_per_column[x] > 0) { + if (!had_data) { + block.first = x; + } + block.second = x; + had_data = true; + } + else { + if (had_data) { + blocks.push_back(block); + } + had_data = false; + } + } + if (had_data) { + blocks.push_back(block); + } + + bool divided = false; + if (blocks.size() == 2) { + if (blocks[0].first == 0 && blocks[1].second == panoramaSize.first - 1) { + divided = true; + } + } + + if (!divided) { + _offset_x = min_x; + _offset_y = min_y; + + _real_width = max_x - min_x + 1; + _real_height = max_y - min_y + 1; + + /* Make sure the buffer is a power of 2 for potential pyramids*/ + size_t rectified_width = pow(2.0, ceil(log2(double(_real_width)))); + size_t rectified_height = pow(2.0, ceil(log2(double(_real_height)))); + + /* Resize buffers */ + _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); + _mask = aliceVision::image::Image(rectified_width, rectified_height, true, 0); + + _coordinates.block(0, 0, _real_height, _real_width) = buffer_coordinates.block(_offset_y, _offset_x, _real_height, _real_width); + _mask.block(0, 0, _real_height, _real_width) = buffer_mask.block(_offset_y, _offset_x, _real_height, _real_width); + } + else { + _offset_x = blocks[1].first; + _offset_y = min_y; + + size_t width_block_1 = (blocks[0].second - blocks[0].first + 1); + size_t width_block_2 = (blocks[1].second - blocks[1].first + 1); - _real_width = max_x - min_x + 1; - _real_height = max_y - min_y + 1; + _real_width = width_block_1 + width_block_2; + _real_height = max_y - min_y + 1; - /* Make sure the buffer is a power of 2 for potential pyramids*/ - size_t rectified_width = pow(2.0, ceil(log2(double(_real_width)))); - size_t rectified_height = pow(2.0, ceil(log2(double(_real_height)))); + /* Make sure the buffer is a power of 2 for potential pyramids*/ + size_t rectified_width = pow(2.0, ceil(log2(double(_real_width)))); + size_t rectified_height = pow(2.0, ceil(log2(double(_real_height)))); - /* Resize buffers */ - _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); - _mask = aliceVision::image::Image(rectified_width, rectified_height, true, 0); + /* Resize buffers */ + _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); + _mask = aliceVision::image::Image(rectified_width, rectified_height, true, 0); - _coordinates.block(0, 0, _real_height, _real_width) = buffer_coordinates.block(_offset_y, _offset_x, _real_height, _real_width); - _mask.block(0, 0, _real_height, _real_width) = buffer_mask.block(_offset_y, _offset_x, _real_height, _real_width); + + _coordinates.block(0, 0, _real_height, width_block_2) = buffer_coordinates.block(_offset_y, _offset_x, _real_height, width_block_2); + _coordinates.block(0, width_block_2, _real_height, width_block_1) = buffer_coordinates.block(_offset_y, 0, _real_height, width_block_1); + _mask.block(0, 0, _real_height, width_block_2) = buffer_mask.block(_offset_y, _offset_x, _real_height, width_block_2); + _mask.block(0, width_block_2, _real_height, width_block_1) = buffer_mask.block(_offset_y, 0, _real_height, width_block_1); + } return true; } @@ -908,25 +970,162 @@ class LaplacianCompositer : public AlphaCompositer { AlphaCompositer(outputWidth, outputHeight), _maxweightmap(outputWidth, outputHeight, true, 0.0f) { + } virtual bool append(const Warper & warper, const AlphaBuilder & alpha) { - const auto & weights = alpha.getWeights(); + const aliceVision::image::Image & camera_weights = alpha.getWeights(); + const aliceVision::image::Image & camera_mask = warper.getMask(); + const aliceVision::image::Image & camera_color = warper.getColor(); size_t ox = warper.getOffsetX(); size_t oy = warper.getOffsetY(); - for (int i = 0; i < weights.Height(); i++) { - for (int j = 0; j < weights.Width(); j++) { - float cw = weights(i, j); + /** + * Create a copy of panorama related pixels + */ + aliceVision::image::Image panorama_subcolor(camera_weights.Width(), camera_weights.Height(), true, image::RGBfColor(0.0f, 0.0f, 0.0f)); + aliceVision::image::Image panorama_submask(camera_weights.Width(), camera_weights.Height(), true, false); + panorama_subcolor.block(0, 0, warper.getHeight(), warper.getWidth()) = _panorama.block(oy, ox, warper.getHeight(), warper.getWidth()); + panorama_submask.block(0, 0, warper.getHeight(), warper.getWidth()) = _mask.block(oy, ox, warper.getHeight(), warper.getWidth()); + + /** + * Compute optimal scale + * The smallest level will be at least of size min_size + */ + size_t min_dim = std::min(camera_weights.Width(), camera_weights.Height()); + size_t min_size = 4; + size_t limit_scales = 10; + size_t scales = std::min(limit_scales, static_cast(floor(log2(double(min_dim) / float(min_size))))); + + /** + * Create buffers + */ + std::vector> camera_weightmaps(scales); + std::vector> camera_colors(scales); + std::vector> camera_differences(scales); + std::vector> panorama_colors(scales); + std::vector> panorama_differences(scales); + std::vector> blended_differences(scales); + std::vector> blended_mask(scales); + + for (int level = 0; level < scales; level++) { + camera_weightmaps[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); + camera_colors[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); + camera_differences[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); + panorama_colors[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); + panorama_differences[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); + blended_differences[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); + blended_mask[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); + } + + /* Build max weight map for first level */ + auto & maxweightmap = camera_weightmaps[0]; + + for (int i = 0; i < camera_weights.Height(); i++) { + for (int j = 0; j < camera_weights.Width(); j++) { + + maxweightmap(i, j) = 0.0f; + + if (!camera_mask(i, j)) { + continue; + } + + float cw = camera_weights(i, j); float pw = _weightmap(oy + i, ox + j); - //if (cw > ) + if (cw > pw) { + maxweightmap(i, j) = 1.0f; + _weightmap(oy + i, ox + j) = cw; + } } } - //Eigen::MatrixXf kernel = gaussian_kernel(5, 1.0f); + Eigen::MatrixXf kernel = gaussian_kernel(5, 1.0f); + + /*Create scales*/ + camera_colors[0] = camera_color; + panorama_colors[0] = panorama_subcolor; + for (int level = 1; level < scales; level++) { + convolve(camera_weightmaps[level], camera_weightmaps[level - 1], camera_mask, kernel); + convolve(camera_colors[level], camera_colors[level - 1], camera_mask, kernel); + convolve(panorama_colors[level], panorama_colors[level - 1], panorama_submask, kernel); + } + + + /*Compute differences*/ + for (int level = 0; level < scales - 1; level++) { + camera_differences[level] = camera_colors[level] - camera_colors[level + 1]; + panorama_differences[level] = panorama_colors[level] - panorama_colors[level + 1]; + } + camera_differences[scales - 1] = camera_colors[scales - 1]; + panorama_differences[scales - 1] = panorama_colors[scales - 1]; + + /*Blend*/ + for (int level = 0; level < scales; level++) { + const aliceVision::image::Image & imgpano = panorama_differences[level]; + const aliceVision::image::Image & imgcam = camera_differences[level]; + const aliceVision::image::Image & weights = camera_weightmaps[level]; + aliceVision::image::Image & imgblend = blended_differences[level]; + aliceVision::image::Image & maskblend = blended_mask[level]; + + for (int i = 0; i < camera_weights.Height(); i++) { + for (int j = 0; j < camera_weights.Width(); j++) { + if (camera_mask(i, j)) { + if (panorama_submask(i, j)) { + float w = weights(i, j); + float mw = 1.0f - w; + imgblend(i, j).r() = mw * imgpano(i, j).r() + w * imgcam(i, j).r(); + imgblend(i, j).g() = mw * imgpano(i, j).g() + w * imgcam(i, j).g(); + imgblend(i, j).b() = mw * imgpano(i, j).b() + w * imgcam(i, j).b(); + maskblend(i, j) = 1; + } + else { + imgblend(i, j) = imgcam(i, j); + maskblend(i, j) = 1; + } + } + else { + if (panorama_submask(i, j)) { + imgblend(i, j) = imgpano(i, j); + maskblend(i, j) = 1; + } + else { + maskblend(i, j) = 0; + } + } + } + } + } + + /*Rebuild*/ + _panorama.block(oy, ox, warper.getHeight(), warper.getWidth()).fill(image::RGBfColor(0.0f, 0.0f, 0.0f)); + _mask.block(oy, ox, warper.getHeight(), warper.getWidth()).fill(0); + + for (int level = scales - 1; level >= 0; level--) { + + aliceVision::image::Image & imgblend = blended_differences[level]; + aliceVision::image::Image & maskblend = blended_mask[level]; + + for (int i = 0; i < warper.getHeight(); i++) { + for (int j = 0; j < warper.getWidth(); j++) { + if (maskblend(i, j)) { + _panorama(oy + i, ox + j) += imgblend(i, j); + _mask(oy + i, ox + j) = 1; + } + } + } + } + + /*auto & tosave = camera_weightmaps[0]; + for (int i = 0; i < camera_weights.Height(); i++) { + for (int j = 0; j < camera_weights.Width(); j++) { + tosave(i, j) *= 100.0f; + } + }*/ + /*image::writeImage("/home/servantf/test.png", camera_colors[0], image::EImageColorSpace::SRGB); + image::writeImage("/home/servantf/test2.png", camera_colors[scales - 1], image::EImageColorSpace::SRGB);*/ return true; } @@ -1026,7 +1225,7 @@ int main(int argc, char **argv) { /** * Create compositer */ - AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** @@ -1052,7 +1251,7 @@ int main(int argc, char **argv) { const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - /*if (pos == 3 || pos == 7 || pos == 15) */{ + if (pos == 3 || pos == 7 || pos == 15) { /** * Prepare coordinates map */ @@ -1089,6 +1288,13 @@ int main(int argc, char **argv) { sprintf(filename, "%s_intermediate_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); } + + { + const aliceVision::image::Image & panorama = warper.getColor(); + char filename[512]; + sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); + image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); + } } pos++; } From e5173ff87d1356ee5ffb9baeb27682cbd2f5dbb2 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Mon, 21 Oct 2019 07:27:09 +0200 Subject: [PATCH 078/174] panorama --- .../pipeline/main_panoramaStitching.cpp | 65 +++++++++++++++---- 1 file changed, 53 insertions(+), 12 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 6daeae6f94..2a68d9e6ec 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -981,14 +981,29 @@ class LaplacianCompositer : public AlphaCompositer { size_t ox = warper.getOffsetX(); size_t oy = warper.getOffsetY(); + size_t pwidth = _panorama.Width(); + size_t pheight = _panorama.Height(); /** * Create a copy of panorama related pixels */ aliceVision::image::Image panorama_subcolor(camera_weights.Width(), camera_weights.Height(), true, image::RGBfColor(0.0f, 0.0f, 0.0f)); aliceVision::image::Image panorama_submask(camera_weights.Width(), camera_weights.Height(), true, false); - panorama_subcolor.block(0, 0, warper.getHeight(), warper.getWidth()) = _panorama.block(oy, ox, warper.getHeight(), warper.getWidth()); - panorama_submask.block(0, 0, warper.getHeight(), warper.getWidth()) = _mask.block(oy, ox, warper.getHeight(), warper.getWidth()); + + if (ox + warper.getWidth() > pwidth) { + + int left_width = ox + warper.getWidth() - pwidth; + int right_width = warper.getWidth() - left_width; + panorama_subcolor.block(0, 0, warper.getHeight(), right_width) = _panorama.block(oy, ox, warper.getHeight(), right_width); + panorama_subcolor.block(0, right_width, warper.getHeight(), left_width) = _panorama.block(oy, 0, warper.getHeight(), left_width); + panorama_submask.block(0, 0, warper.getHeight(), right_width) = _mask.block(oy, ox, warper.getHeight(), right_width); + panorama_submask.block(0, right_width, warper.getHeight(), left_width) = _mask.block(oy, 0, warper.getHeight(), left_width); + } + else { + panorama_subcolor.block(0, 0, warper.getHeight(), warper.getWidth()) = _panorama.block(oy, ox, warper.getHeight(), warper.getWidth()); + panorama_submask.block(0, 0, warper.getHeight(), warper.getWidth()) = _mask.block(oy, ox, warper.getHeight(), warper.getWidth()); + } + /** * Compute optimal scale @@ -1033,16 +1048,23 @@ class LaplacianCompositer : public AlphaCompositer { } float cw = camera_weights(i, j); - float pw = _weightmap(oy + i, ox + j); + + int pano_y = oy + i; + int pano_x = ox + j; + if (pano_x >= pwidth) { + pano_x = pano_x - pwidth; + } + + float pw = _weightmap(pano_y, pano_x); if (cw > pw) { maxweightmap(i, j) = 1.0f; - _weightmap(oy + i, ox + j) = cw; + _weightmap(pano_y, pano_x) = cw; } } } - Eigen::MatrixXf kernel = gaussian_kernel(5, 1.0f); + Eigen::MatrixXf kernel = gaussian_kernel(5, 2.0f); /*Create scales*/ camera_colors[0] = camera_color; @@ -1100,9 +1122,6 @@ class LaplacianCompositer : public AlphaCompositer { } /*Rebuild*/ - _panorama.block(oy, ox, warper.getHeight(), warper.getWidth()).fill(image::RGBfColor(0.0f, 0.0f, 0.0f)); - _mask.block(oy, ox, warper.getHeight(), warper.getWidth()).fill(0); - for (int level = scales - 1; level >= 0; level--) { aliceVision::image::Image & imgblend = blended_differences[level]; @@ -1110,9 +1129,21 @@ class LaplacianCompositer : public AlphaCompositer { for (int i = 0; i < warper.getHeight(); i++) { for (int j = 0; j < warper.getWidth(); j++) { + + int pano_y = oy + i; + int pano_x = ox + j; + if (pano_x >= pwidth) { + pano_x = pano_x - pwidth; + } + if (maskblend(i, j)) { - _panorama(oy + i, ox + j) += imgblend(i, j); - _mask(oy + i, ox + j) = 1; + if (level == scales - 1) { + _panorama(pano_y, pano_x) = image::RGBfColor(0.0f); + _mask(pano_y, pano_x) = 0; + } + + _panorama(pano_y, pano_x) += imgblend(i, j); + _mask(pano_y, pano_x) = 1; } } } @@ -1251,7 +1282,7 @@ int main(int argc, char **argv) { const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - if (pos == 3 || pos == 7 || pos == 15) { + /*if (pos == 3 || pos == 7 || pos == 15)*/ { /** * Prepare coordinates map */ @@ -1265,13 +1296,23 @@ int main(int argc, char **argv) { image::Image source; image::readImage(imagePath, source, image::EImageColorSpace::LINEAR); + float min = 10000.0; + float max = -10000.0; + for (int i = 0; i < source.Height(); i++) { + for (int j = 0; j < source.Width(); j++) { + min = std::min(min, source(i, j).r()); + max = std::max(max, source(i, j).r()); + } + } + + + /** * Warp image */ GaussianWarper warper; warper.warp(map, source); - AlphaBuilder alphabuilder; alphabuilder.build(map, intrinsic); From 0629071566da2da64e15dd10e05aba28df7465ea Mon Sep 17 00:00:00 2001 From: fabienservant Date: Mon, 21 Oct 2019 07:33:30 +0200 Subject: [PATCH 079/174] temporary hack for hdr rotation problem --- src/software/convert/main_convertLDRToHDR.cpp | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 2040ac4c15..92fdd17178 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -493,9 +493,19 @@ int main(int argc, char** argv) ALICEVISION_LOG_INFO("Reading " << imagePath); image::readImage(imagePath, ldrImages[i], loadColorSpace); - metadatas[i] = image::readImageMetadata(imagePath); + if (ldrImages[i].Height() < ldrImages[i].Width()) { + image::Image rotate(ldrImages[i].Height(), ldrImages[i].Width()); + for (int k = 0; k < rotate.Height(); k++) { + for (int l = 0; l < rotate.Width(); l++) { + rotate(k, l) = ldrImages[i](l, rotate.Height() - 1 - k); + } + } + ldrImages[i] = rotate; + } + + // Debevec and Robertson algorithms use shutter speed as ev value // TODO: in the future, we should use EVs instead of just shutter speed. try From 1b3150edea8b2768b300e1ef40f27a5f91792548 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Tue, 22 Oct 2019 10:33:18 +0200 Subject: [PATCH 080/174] Heuristics to lower memory usage for coordinates evaluation --- src/aliceVision/camera/IntrinsicBase.hpp | 13 + src/aliceVision/camera/PinholeRadial.hpp | 16 + .../pipeline/main_panoramaStitching.cpp | 314 ++++++++++++------ 3 files changed, 249 insertions(+), 94 deletions(-) diff --git a/src/aliceVision/camera/IntrinsicBase.hpp b/src/aliceVision/camera/IntrinsicBase.hpp index ef17eb47f5..9766569fe3 100644 --- a/src/aliceVision/camera/IntrinsicBase.hpp +++ b/src/aliceVision/camera/IntrinsicBase.hpp @@ -370,6 +370,19 @@ struct IntrinsicBase return true; } + /** + * @brief Assuming the distortion is a function of radius, estimate the + * maximal undistorted radius for a range of distorted radius. + * @param min_radius the minimal radius to consider + * @param max_radius the maximal radius to consider + * @return the maximal undistorted radius + */ + virtual float getMaximalDistortion(double min_radius, double max_radius) const { + + /*Without distortion, obvious*/ + return max_radius; + } + /** * @brief Generate an unique Hash from the camera parameters (used for grouping) * @return Unique Hash from the camera parameters diff --git a/src/aliceVision/camera/PinholeRadial.hpp b/src/aliceVision/camera/PinholeRadial.hpp index 2ef440dfa2..6e5fc945e6 100644 --- a/src/aliceVision/camera/PinholeRadial.hpp +++ b/src/aliceVision/camera/PinholeRadial.hpp @@ -90,6 +90,22 @@ class PinholeRadialK1 : public Pinhole return radius * p; } + /** + * @brief Assuming the distortion is a function of radius, estimate the + * maximal undistorted radius for a range of distorted radius. + * @param min_radius the minimal radius to consider + * @param max_radius the maximal radius to consider + * @return the maximal undistorted radius + */ + virtual float getMaximalDistortion(double min_radius, double max_radius) const override { + + float ud = std::sqrt(radial_distortion::bisection_Radius_Solve(_distortionParams, max_radius * max_radius, distoFunctor)); + + + /*Without distortion, obvious*/ + return ud; + } + /// Return the un-distorted pixel (with removed distortion) virtual Vec2 get_ud_pixel(const Vec2& p) const override { diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 2a68d9e6ec..9bc7885379 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -432,24 +432,168 @@ class CoordinatesMap { */ bool build(const std::pair & panoramaSize, const geometry::Pose3 & pose, const aliceVision::camera::IntrinsicBase & intrinsics) { - aliceVision::image::Image buffer_coordinates(panoramaSize.first, panoramaSize.second, false); - aliceVision::image::Image buffer_mask(panoramaSize.first, panoramaSize.second, true, false); + /** + * First of all : + * Compute a bouding box for this image in the panorama. + * This bounding box is not optimal and may be too large (not too small). + */ + + /*Estimate distorted maximal distance from optical center*/ + Vec2 pts[] = {{0.0f, 0.0f}, {intrinsics.w(), 0.0f}, {intrinsics.w(), intrinsics.h()}, {0.0f, intrinsics.h()}}; + float max_radius = 0.0; + for (int i = 0; i < 4; i++) { + + Vec2 ptmeter = intrinsics.ima2cam(pts[i]); + float radius = ptmeter.norm(); + max_radius = std::max(max_radius, radius); + } + + /* Estimate undistorted maximal distance from optical center */ + float max_radius_distorted = intrinsics.getMaximalDistortion(0.0, max_radius); + Vec2 pts_radius[] = { + {-max_radius_distorted, -max_radius_distorted}, + {max_radius_distorted, -max_radius_distorted}, + {max_radius_distorted, max_radius_distorted}, + {-max_radius_distorted, max_radius_distorted} + }; + + + /* + Transform bounding box into the panorama frame. + Point are on a unit sphere. + */ + Vec3 rotated_pts[4]; + for (int i = 0; i < 4; i++) { + Vec3 pt3d = pts_radius[i].homogeneous().normalized(); + rotated_pts[i] = pose.rotation().transpose() * pt3d; + } + + + /* Check if the image is across the two side of panorama (angle is modulo 2pi)*/ + bool crossH; + double sum = 0.0; + for (int i = 0; i < 4; i++) { + int i2 = i + 1; + if (i2 > 3) i2 = 0; + + bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); + if (i == 0) crossH = cross; + else crossH |= cross; + } + + + /* Compute optimal bounds for vertical */ + int bound_top = panoramaSize.second; + int bound_bottom = 0; + for (int i = 0; i < 4; i++) { + int i2 = i + 1; + if (i2 > 3) i2 = 0; + + Vec3 extremaY = getExtremaY(rotated_pts[i], rotated_pts[i2]); + + Vec2 res; + res = SphericalMapping::toEquirectangular(extremaY, panoramaSize.first, panoramaSize.second); + bound_top = std::min(int(floor(res(1))), bound_top); + bound_bottom = std::max(int(ceil(res(1))), bound_bottom); + + res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + bound_top = std::min(int(floor(res(1))), bound_top); + bound_bottom = std::max(int(ceil(res(1))), bound_bottom); + } + + /* + Check if our region circumscribe a pole of the sphere : + Check that the region projected on the Y=0 plane contains the point (0, 0) + */ + bool pole = isPoleInTriangle(rotated_pts[0], rotated_pts[1], rotated_pts[3]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[2], rotated_pts[3]); + if (pole) { + + Vec3 normal = (rotated_pts[1] - rotated_pts[0]).cross(rotated_pts[3] - rotated_pts[0]); + if (normal(1) > 0) { + //Lower pole + bound_bottom = panoramaSize.second - 1; + } + else { + //upper pole + bound_top = 0; + } + } + + int height = bound_bottom - bound_top + 1; + + + int bound_left, bound_right; + int width; + + if (pole) { + bound_left = 0; + bound_right = panoramaSize.first - 1; + width = bound_right - bound_left + 1; + } + else if (crossH) { + + bound_left = panoramaSize.first; + bound_right = 0; + for (int i = 0; i < 4; i++) { + int i2 = i + 1; + if (i2 > 3) i2 = 0; + + if (crossHorizontalLoop(rotated_pts[i], rotated_pts[i2])) { + Vec2 res_left = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + Vec2 res_right = SphericalMapping::toEquirectangular(rotated_pts[i2], panoramaSize.first, panoramaSize.second); + + if (res_left(0) > res_right(0)) { + //border --> right ; Left --> border + bound_left = std::min(bound_left, int(floor(res_left(0)))); + bound_right = std::max(bound_right, int(ceil(res_right(0)))); + } + else { + //border --> left ; right --> border + bound_left = std::min(bound_left, int(floor(res_right(0)))); + bound_right = std::max(bound_right, int(ceil(res_left(0)))); + } + } + } + + width = bound_right + 1 + (panoramaSize.first - bound_left + 1); + + } else { + bound_left = panoramaSize.first; + bound_right = 0; + + for (int i = 0; i < 4; i++) { + Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + bound_left = std::min(int(floor(res(0))), bound_left); + bound_right = std::max(int(ceil(res(0))), bound_right); + } + + bound_left = 0; + bound_right = panoramaSize.first; + + width = bound_right - bound_left + 1; + } + + + aliceVision::image::Image buffer_coordinates(width, height, false); + aliceVision::image::Image buffer_mask(width, height, true, false); + + size_t max_x = 0; size_t max_y = 0; size_t min_x = panoramaSize.first; size_t min_y = panoramaSize.second; - std::vector counts_per_column; - for (size_t x = 0; x < panoramaSize.first; x++) { - counts_per_column.push_back(0); - } + for (size_t y = 0; y < height; y++) { - for (size_t y = 0; y < panoramaSize.second; y++) { + size_t cy = y + bound_top; - for (size_t x = 0; x < panoramaSize.first; x++) { + for (size_t x = 0; x < width; x++) { - Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(x,y), panoramaSize.first, panoramaSize.second); + size_t cx = x + bound_left; + + Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(cx, cy), panoramaSize.first, panoramaSize.second); /** * Check that this ray should be visible. @@ -472,9 +616,9 @@ class CoordinatesMap { continue; } + buffer_coordinates(y, x) = pix_disto; buffer_mask(y, x) = 1; - counts_per_column[x]++; min_x = std::min(x, min_x); min_y = std::min(y, min_y); @@ -482,79 +626,22 @@ class CoordinatesMap { max_y = std::max(y, max_y); } } + + _offset_x = bound_left + min_x; + _offset_y = bound_top + min_y; + _real_width = max_x - min_x + 1; + _real_height = max_y - min_y + 1; - bool had_data = false; - std::vector> blocks; - std::pair block; - for (int x = 0; x < panoramaSize.first; x++) { - - if (counts_per_column[x] > 0) { - if (!had_data) { - block.first = x; - } - block.second = x; - had_data = true; - } - else { - if (had_data) { - blocks.push_back(block); - } - had_data = false; - } - } - if (had_data) { - blocks.push_back(block); - } - - bool divided = false; - if (blocks.size() == 2) { - if (blocks[0].first == 0 && blocks[1].second == panoramaSize.first - 1) { - divided = true; - } - } - - if (!divided) { - _offset_x = min_x; - _offset_y = min_y; - - _real_width = max_x - min_x + 1; - _real_height = max_y - min_y + 1; - - /* Make sure the buffer is a power of 2 for potential pyramids*/ - size_t rectified_width = pow(2.0, ceil(log2(double(_real_width)))); - size_t rectified_height = pow(2.0, ceil(log2(double(_real_height)))); - - /* Resize buffers */ - _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); - _mask = aliceVision::image::Image(rectified_width, rectified_height, true, 0); - - _coordinates.block(0, 0, _real_height, _real_width) = buffer_coordinates.block(_offset_y, _offset_x, _real_height, _real_width); - _mask.block(0, 0, _real_height, _real_width) = buffer_mask.block(_offset_y, _offset_x, _real_height, _real_width); - } - else { - _offset_x = blocks[1].first; - _offset_y = min_y; - - size_t width_block_1 = (blocks[0].second - blocks[0].first + 1); - size_t width_block_2 = (blocks[1].second - blocks[1].first + 1); - - _real_width = width_block_1 + width_block_2; - _real_height = max_y - min_y + 1; - - /* Make sure the buffer is a power of 2 for potential pyramids*/ - size_t rectified_width = pow(2.0, ceil(log2(double(_real_width)))); - size_t rectified_height = pow(2.0, ceil(log2(double(_real_height)))); + /* Make sure the buffer is a power of 2 for potential pyramids*/ + size_t rectified_width = pow(2.0, ceil(log2(double(_real_width)))); + size_t rectified_height = pow(2.0, ceil(log2(double(_real_height)))); /* Resize buffers */ - _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); - _mask = aliceVision::image::Image(rectified_width, rectified_height, true, 0); - + _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); + _mask = aliceVision::image::Image(rectified_width, rectified_height, true, 0); - _coordinates.block(0, 0, _real_height, width_block_2) = buffer_coordinates.block(_offset_y, _offset_x, _real_height, width_block_2); - _coordinates.block(0, width_block_2, _real_height, width_block_1) = buffer_coordinates.block(_offset_y, 0, _real_height, width_block_1); - _mask.block(0, 0, _real_height, width_block_2) = buffer_mask.block(_offset_y, _offset_x, _real_height, width_block_2); - _mask.block(0, width_block_2, _real_height, width_block_1) = buffer_mask.block(_offset_y, 0, _real_height, width_block_1); - } + _coordinates.block(0, 0, _real_height, _real_width) = buffer_coordinates.block(min_y, min_x, _real_height, _real_width); + _mask.block(0, 0, _real_height, _real_width) = buffer_mask.block(min_y, min_x, _real_height, _real_width); return true; } @@ -583,6 +670,56 @@ class CoordinatesMap { return _mask; } +private: + Vec3 getExtremaY(const Vec3 & pt1, const Vec3 & pt2) { + Vec3 delta = pt2 - pt1; + double dx = delta(0); + double dy = delta(1); + double dz = delta(2); + double sx = pt1(0); + double sy = pt1(1); + double sz = pt1(2); + + double ot_y = -(dx*sx*sy - (dy*sx)*(dy*sx) - (dy*sz)*(dy*sz) + dz*sy*sz)/(dx*dx*sy - dx*dy*sx - dy*dz*sz + dz*dz*sy); + + Vec3 pt_extrema = pt1 + ot_y * delta; + + return pt_extrema.normalized(); + } + + bool crossHorizontalLoop(const Vec3 & pt1, const Vec3 & pt2) { + Vec3 direction = pt2 - pt1; + + /*Vertical line*/ + if (std::abs(direction(0)) < 1e-12) { + return false; + } + + double t = - pt1(0) / direction(0); + Vec3 cross = pt1 + direction * t; + + if (t >= 0.0 && t < 1.0) { + if (cross(2) < 0.0) { + return true; + } + } + + return false; + } + + bool isPoleInTriangle(const Vec3 & pt1, const Vec3 & pt2, const Vec3 & pt3) { + + double a = (pt2.x()*pt3.z() - pt3.x()*pt2.z())/(pt1.x()*pt2.z() - pt1.x()*pt3.z() - pt2.x()*pt1.z() + pt2.x()*pt3.z() + pt3.x()*pt1.z() - pt3.x()*pt2.z()); + double b = (-pt1.x()*pt3.z() + pt3.x()*pt1.z())/(pt1.x()*pt2.z() - pt1.x()*pt3.z() - pt2.x()*pt1.z() + pt2.x()*pt3.z() + pt3.x()*pt1.z() - pt3.x()*pt2.z()); + double c = 1.0 - a - b; + + if (a < 0.0 || a > 1.0) return false; + if (b < 0.0 || b > 1.0) return false; + if (c < 0.0 || c > 1.0) return false; + + return true; + } + private: size_t _offset_x = 0; size_t _offset_y = 0; @@ -1256,7 +1393,7 @@ int main(int argc, char **argv) { /** * Create compositer */ - LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** @@ -1282,29 +1419,20 @@ int main(int argc, char **argv) { const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - /*if (pos == 3 || pos == 7 || pos == 15)*/ { + { /** * Prepare coordinates map */ CoordinatesMap map; map.build(panoramaSize, camPose, intrinsic); + /** * Load image and convert it to linear colorspace */ std::string imagePath = view.getImagePath(); image::Image source; image::readImage(imagePath, source, image::EImageColorSpace::LINEAR); - - float min = 10000.0; - float max = -10000.0; - for (int i = 0; i < source.Height(); i++) { - for (int j = 0; j < source.Width(); j++) { - min = std::min(min, source(i, j).r()); - max = std::max(max, source(i, j).r()); - } - } - /** @@ -1312,12 +1440,10 @@ int main(int argc, char **argv) { */ GaussianWarper warper; warper.warp(map, source); - AlphaBuilder alphabuilder; alphabuilder.build(map, intrinsic); - /** *Composite image into output */ @@ -1330,7 +1456,7 @@ int main(int argc, char **argv) { image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); } - { + { const aliceVision::image::Image & panorama = warper.getColor(); char filename[512]; sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); From 75c1e9d02571e190384490c5b6ae89d7713e190e Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 23 Oct 2019 09:46:40 +0200 Subject: [PATCH 081/174] add heuristics for camera with fov>180 --- .../pipeline/main_panoramaStitching.cpp | 38 ++++++++++++------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 9bc7885379..45a847f0c0 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -575,20 +575,26 @@ class CoordinatesMap { } + /* Effectively compute the warping map */ aliceVision::image::Image buffer_coordinates(width, height, false); aliceVision::image::Image buffer_mask(width, height, true, false); - - size_t max_x = 0; size_t max_y = 0; size_t min_x = panoramaSize.first; size_t min_y = panoramaSize.second; - + + #pragma omp parallel for reduction(min: min_x, min_y) reduction(max: max_x, max_y) for (size_t y = 0; y < height; y++) { size_t cy = y + bound_top; + size_t row_max_x = 0; + size_t row_max_y = 0; + size_t row_min_x = panoramaSize.first; + size_t row_min_y = panoramaSize.second; + + for (size_t x = 0; x < width; x++) { size_t cx = x + bound_left; @@ -620,11 +626,16 @@ class CoordinatesMap { buffer_coordinates(y, x) = pix_disto; buffer_mask(y, x) = 1; - min_x = std::min(x, min_x); - min_y = std::min(y, min_y); - max_x = std::max(x, max_x); - max_y = std::max(y, max_y); + row_min_x = std::min(x, row_min_x); + row_min_y = std::min(y, row_min_y); + row_max_x = std::max(x, row_max_x); + row_max_y = std::max(y, row_max_y); } + + min_x = std::min(row_min_x, min_x); + min_y = std::min(row_min_y, min_y); + max_x = std::max(row_max_x, max_x); + max_y = std::max(row_max_y, max_y); } _offset_x = bound_left + min_x; @@ -1068,6 +1079,7 @@ class AlphaCompositer : public Compositer { size_t pano_j = warper.getOffsetX() + j; if (pano_j >= _panorama.Width()) { + pano_j = pano_j - _panorama.Width(); continue; } @@ -1326,7 +1338,7 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ - std::pair panoramaSize = {1024, 512}; + std::pair panoramaSize = {20000, 10000}; po::options_description optionalParams("Optional parameters"); allParams.add(optionalParams); @@ -1393,7 +1405,7 @@ int main(int argc, char **argv) { /** * Create compositer */ - AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** @@ -1419,17 +1431,16 @@ int main(int argc, char **argv) { const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - { /** * Prepare coordinates map */ CoordinatesMap map; map.build(panoramaSize, camPose, intrinsic); - /** * Load image and convert it to linear colorspace */ + ALICEVISION_LOG_INFO("Load image\n"); std::string imagePath = view.getImagePath(); image::Image source; image::readImage(imagePath, source, image::EImageColorSpace::LINEAR); @@ -1438,6 +1449,7 @@ int main(int argc, char **argv) { /** * Warp image */ + ALICEVISION_LOG_INFO("Warp\n"); GaussianWarper warper; warper.warp(map, source); @@ -1447,6 +1459,7 @@ int main(int argc, char **argv) { /** *Composite image into output */ + ALICEVISION_LOG_INFO("Composite\n"); compositer.append(warper, alphabuilder); { @@ -1456,13 +1469,12 @@ int main(int argc, char **argv) { image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); } - { + { const aliceVision::image::Image & panorama = warper.getColor(); char filename[512]; sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); } - } pos++; } From f6cf207168f01e709ddaf1807ee74560a653730f Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 23 Oct 2019 10:23:44 +0200 Subject: [PATCH 082/174] add new heuristics for large fov --- .../pipeline/main_panoramaStitching.cpp | 464 ++++++++++++------ 1 file changed, 309 insertions(+), 155 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 45a847f0c0..77e995bef5 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -423,6 +423,14 @@ class GaussianPyramidNoMask { }; class CoordinatesMap { +private: + struct BBox { + int left; + int top; + int width; + int height; + }; + public: /** * Build coordinates map given camera properties @@ -432,152 +440,14 @@ class CoordinatesMap { */ bool build(const std::pair & panoramaSize, const geometry::Pose3 & pose, const aliceVision::camera::IntrinsicBase & intrinsics) { - /** - * First of all : - * Compute a bouding box for this image in the panorama. - * This bounding box is not optimal and may be too large (not too small). - */ - - /*Estimate distorted maximal distance from optical center*/ - Vec2 pts[] = {{0.0f, 0.0f}, {intrinsics.w(), 0.0f}, {intrinsics.w(), intrinsics.h()}, {0.0f, intrinsics.h()}}; - float max_radius = 0.0; - for (int i = 0; i < 4; i++) { - - Vec2 ptmeter = intrinsics.ima2cam(pts[i]); - float radius = ptmeter.norm(); - max_radius = std::max(max_radius, radius); - } - - /* Estimate undistorted maximal distance from optical center */ - float max_radius_distorted = intrinsics.getMaximalDistortion(0.0, max_radius); - Vec2 pts_radius[] = { - {-max_radius_distorted, -max_radius_distorted}, - {max_radius_distorted, -max_radius_distorted}, - {max_radius_distorted, max_radius_distorted}, - {-max_radius_distorted, max_radius_distorted} - }; - - - /* - Transform bounding box into the panorama frame. - Point are on a unit sphere. - */ - Vec3 rotated_pts[4]; - for (int i = 0; i < 4; i++) { - Vec3 pt3d = pts_radius[i].homogeneous().normalized(); - rotated_pts[i] = pose.rotation().transpose() * pt3d; - } - - - /* Check if the image is across the two side of panorama (angle is modulo 2pi)*/ - bool crossH; - double sum = 0.0; - for (int i = 0; i < 4; i++) { - int i2 = i + 1; - if (i2 > 3) i2 = 0; - - bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); - if (i == 0) crossH = cross; - else crossH |= cross; - } - - - /* Compute optimal bounds for vertical */ - int bound_top = panoramaSize.second; - int bound_bottom = 0; - for (int i = 0; i < 4; i++) { - int i2 = i + 1; - if (i2 > 3) i2 = 0; - - Vec3 extremaY = getExtremaY(rotated_pts[i], rotated_pts[i2]); - - Vec2 res; - res = SphericalMapping::toEquirectangular(extremaY, panoramaSize.first, panoramaSize.second); - bound_top = std::min(int(floor(res(1))), bound_top); - bound_bottom = std::max(int(ceil(res(1))), bound_bottom); - - res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); - bound_top = std::min(int(floor(res(1))), bound_top); - bound_bottom = std::max(int(ceil(res(1))), bound_bottom); - } - - /* - Check if our region circumscribe a pole of the sphere : - Check that the region projected on the Y=0 plane contains the point (0, 0) - */ - bool pole = isPoleInTriangle(rotated_pts[0], rotated_pts[1], rotated_pts[3]); - pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[2], rotated_pts[3]); - if (pole) { - - Vec3 normal = (rotated_pts[1] - rotated_pts[0]).cross(rotated_pts[3] - rotated_pts[0]); - if (normal(1) > 0) { - //Lower pole - bound_bottom = panoramaSize.second - 1; - } - else { - //upper pole - bound_top = 0; - } - } - - int height = bound_bottom - bound_top + 1; - - - int bound_left, bound_right; - int width; - - if (pole) { - bound_left = 0; - bound_right = panoramaSize.first - 1; - width = bound_right - bound_left + 1; - } - else if (crossH) { - - bound_left = panoramaSize.first; - bound_right = 0; - for (int i = 0; i < 4; i++) { - int i2 = i + 1; - if (i2 > 3) i2 = 0; - - if (crossHorizontalLoop(rotated_pts[i], rotated_pts[i2])) { - Vec2 res_left = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); - Vec2 res_right = SphericalMapping::toEquirectangular(rotated_pts[i2], panoramaSize.first, panoramaSize.second); - - if (res_left(0) > res_right(0)) { - //border --> right ; Left --> border - bound_left = std::min(bound_left, int(floor(res_left(0)))); - bound_right = std::max(bound_right, int(ceil(res_right(0)))); - } - else { - //border --> left ; right --> border - bound_left = std::min(bound_left, int(floor(res_right(0)))); - bound_right = std::max(bound_right, int(ceil(res_left(0)))); - } - } - } - - width = bound_right + 1 + (panoramaSize.first - bound_left + 1); - - } else { - bound_left = panoramaSize.first; - bound_right = 0; - - for (int i = 0; i < 4; i++) { - Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); - bound_left = std::min(int(floor(res(0))), bound_left); - bound_right = std::max(int(ceil(res(0))), bound_right); - } - - bound_left = 0; - bound_right = panoramaSize.first; - - width = bound_right - bound_left + 1; + BBox coarse_bbox; + if (!computeCoarseBB(coarse_bbox, panoramaSize, pose, intrinsics)) { + return false; } - /* Effectively compute the warping map */ - aliceVision::image::Image buffer_coordinates(width, height, false); - aliceVision::image::Image buffer_mask(width, height, true, false); + aliceVision::image::Image buffer_coordinates(coarse_bbox.width, coarse_bbox.height, false); + aliceVision::image::Image buffer_mask(coarse_bbox.width, coarse_bbox.height, true, 0); size_t max_x = 0; size_t max_y = 0; @@ -585,9 +455,9 @@ class CoordinatesMap { size_t min_y = panoramaSize.second; #pragma omp parallel for reduction(min: min_x, min_y) reduction(max: max_x, max_y) - for (size_t y = 0; y < height; y++) { + for (size_t y = 0; y < coarse_bbox.height; y++) { - size_t cy = y + bound_top; + size_t cy = y + coarse_bbox.top; size_t row_max_x = 0; size_t row_max_y = 0; @@ -595,9 +465,9 @@ class CoordinatesMap { size_t row_min_y = panoramaSize.second; - for (size_t x = 0; x < width; x++) { + for (size_t x = 0; x < coarse_bbox.width; x++) { - size_t cx = x + bound_left; + size_t cx = x + coarse_bbox.left; Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(cx, cy), panoramaSize.first, panoramaSize.second); @@ -638,8 +508,8 @@ class CoordinatesMap { max_y = std::max(row_max_y, max_y); } - _offset_x = bound_left + min_x; - _offset_y = bound_top + min_y; + _offset_x = coarse_bbox.left + min_x; + _offset_y = coarse_bbox.top + min_y; _real_width = max_x - min_x + 1; _real_height = max_y - min_y + 1; @@ -657,6 +527,35 @@ class CoordinatesMap { return true; } + bool computeScale(double & result) { + + std::vector scales; + + for (int i = 0; i < _real_height - 1; i++) { + for (int j = 0; j < _real_width - 1; j++) { + if (!_mask(i, j) || !_mask(i, j + 1) || !_mask(i + 1, j)) { + continue; + } + + double dxx = _coordinates(i, j + 1).x() - _coordinates(i, j).x(); + double dxy = _coordinates(i + 1, j).x() - _coordinates(i, j).x(); + double dyx = _coordinates(i, j + 1).y() - _coordinates(i, j).y(); + double dyy = _coordinates(i + 1, j).y() - _coordinates(i, j).y(); + + double det = std::abs(dxx*dyy - dxy*dyx); + scales.push_back(det); + } + } + + if (scales.size() <= 1) return false; + + std::nth_element(scales.begin(), scales.begin() + scales.size() / 2, scales.end()); + result = sqrt(scales[scales.size() / 2]); + + + return true; + } + size_t getOffsetX() const { return _offset_x; } @@ -682,6 +581,203 @@ class CoordinatesMap { } private: + + bool computeCoarseBB(BBox & coarse_bbox, const std::pair & panoramaSize, const geometry::Pose3 & pose, const aliceVision::camera::IntrinsicBase & intrinsics) { + + coarse_bbox.left = 0; + coarse_bbox.top = 0; + coarse_bbox.width = panoramaSize.first; + coarse_bbox.height = panoramaSize.second; + + int bbox_left, bbox_top; + int bbox_right, bbox_bottom; + int bbox_width, bbox_height; + + /*Estimate distorted maximal distance from optical center*/ + Vec2 pts[] = {{0.0f, 0.0f}, {intrinsics.w(), 0.0f}, {intrinsics.w(), intrinsics.h()}, {0.0f, intrinsics.h()}}; + float max_radius = 0.0; + for (int i = 0; i < 4; i++) { + + Vec2 ptmeter = intrinsics.ima2cam(pts[i]); + float radius = ptmeter.norm(); + max_radius = std::max(max_radius, radius); + } + + /* Estimate undistorted maximal distance from optical center */ + float max_radius_distorted = intrinsics.getMaximalDistortion(0.0, max_radius); + + /* + Coarse rectangle bouding box in camera space + We add intermediate points to ensure arclength between 2 points is never more than 180° + */ + Vec2 pts_radius[] = { + {-max_radius_distorted, -max_radius_distorted}, + {0, -max_radius_distorted}, + {max_radius_distorted, -max_radius_distorted}, + {max_radius_distorted, 0}, + {max_radius_distorted, max_radius_distorted}, + {0, max_radius_distorted}, + {-max_radius_distorted, max_radius_distorted}, + {-max_radius_distorted, 0} + }; + + + /* + Transform bounding box into the panorama frame. + Point are on a unit sphere. + */ + Vec3 rotated_pts[8]; + for (int i = 0; i < 8; i++) { + Vec3 pt3d = pts_radius[i].homogeneous().normalized(); + rotated_pts[i] = pose.rotation().transpose() * pt3d; + } + + /* Vertical Default solution : no pole*/ + bbox_top = panoramaSize.second; + bbox_bottom = 0; + + for (int i = 0; i < 8; i++) { + int i2 = i + 1; + if (i2 > 7) i2 = 0; + + Vec3 extremaY = getExtremaY(rotated_pts[i], rotated_pts[i2]); + + Vec2 res; + res = SphericalMapping::toEquirectangular(extremaY, panoramaSize.first, panoramaSize.second); + bbox_top = std::min(int(floor(res(1))), bbox_top); + bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); + + res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + bbox_top = std::min(int(floor(res(1))), bbox_top); + bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); + } + + /* + Check if our region circumscribe a pole of the sphere : + Check that the region projected on the Y=0 plane contains the point (0, 0) + This is a special projection case + */ + bool pole = isPoleInTriangle(rotated_pts[0], rotated_pts[1], rotated_pts[7]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[2], rotated_pts[3]); + pole |= isPoleInTriangle(rotated_pts[3], rotated_pts[4], rotated_pts[5]); + pole |= isPoleInTriangle(rotated_pts[7], rotated_pts[5], rotated_pts[6]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[3], rotated_pts[5]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[5], rotated_pts[7]); + + + if (pole) { + Vec3 normal = (rotated_pts[1] - rotated_pts[0]).cross(rotated_pts[3] - rotated_pts[0]); + if (normal(1) > 0) { + //Lower pole + bbox_bottom = panoramaSize.second - 1; + } + else { + //upper pole + bbox_top = 0; + } + } + + bbox_height = bbox_bottom - bbox_top + 1; + + + /*Check if we cross the horizontal loop*/ + bool crossH; + for (int i = 0; i < 8; i++) { + int i2 = i + 1; + if (i2 > 7) i2 = 0; + + bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); + if (i == 0) crossH = cross; + else crossH |= cross; + } + + + if (pole) { + /*Easy : if we cross the pole, the width is full*/ + bbox_left = 0; + bbox_right = panoramaSize.first - 1; + bbox_width = bbox_right - bbox_left + 1; + } + else if (crossH) { + + bbox_left = panoramaSize.first; + bbox_right = 0; + + for (int i = 0; i < 8; i++) { + int i2 = i + 1; + if (i2 > 7) i2 = 0; + + Vec2 res_left = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + Vec2 res_right = SphericalMapping::toEquirectangular(rotated_pts[i2], panoramaSize.first, panoramaSize.second); + + if (crossHorizontalLoop(rotated_pts[i], rotated_pts[i2])) { + + if (res_left(0) > res_right(0)) { + //Left --> border; border --> right + bbox_left = std::min(bbox_left, int(floor(res_left(0)))); + bbox_right = std::max(bbox_right, int(ceil(res_right(0)))); + + if (i % 2 == 0) { + //next segment is continuity + int next = i2 + 1; + if (next > 7) next = 0; + + Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[next], panoramaSize.first, panoramaSize.second); + bbox_right = std::max(bbox_right, int(ceil(res(0)))); + } + else { + int prev = i - 1; + if (prev < 0) prev = 7; + Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[prev], panoramaSize.first, panoramaSize.second); + bbox_left = std::min(bbox_left, int(ceil(res(0)))); + } + } + else { + //right --> border; border --> left + bbox_left = std::min(bbox_left, int(floor(res_right(0)))); + bbox_right = std::max(bbox_right, int(ceil(res_left(0)))); + + if (i % 2 == 0) { + //next segment is continuity + int next = i2 + 1; + if (next > 7) next = 0; + + Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[next], panoramaSize.first, panoramaSize.second); + bbox_left = std::min(bbox_left, int(ceil(res(0)))); + } + else { + int prev = i - 1; + if (prev < 0) prev = 7; + Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[prev], panoramaSize.first, panoramaSize.second); + bbox_right = std::max(bbox_right, int(ceil(res(0)))); + } + } + } + } + + bbox_width = bbox_right + 1 + (panoramaSize.first - bbox_left + 1); + } + else { + /*horizontal default solution : no border crossing, no pole*/ + bbox_left = panoramaSize.first; + bbox_right = 0; + for (int i = 0; i < 8; i++) { + Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + bbox_left = std::min(int(floor(res(0))), bbox_left); + bbox_right = std::max(int(ceil(res(0))), bbox_right); + } + bbox_width = bbox_right - bbox_left + 1; + } + + /*Assign solution to result*/ + coarse_bbox.left = bbox_left; + coarse_bbox.top = bbox_top; + coarse_bbox.width = bbox_width; + coarse_bbox.height = bbox_height; + + return true; + } + Vec3 getExtremaY(const Vec3 & pt1, const Vec3 & pt2) { Vec3 delta = pt2 - pt1; double dx = delta(0); @@ -1314,6 +1410,62 @@ class LaplacianCompositer : public AlphaCompositer { aliceVision::image::Image _maxweightmap; }; +bool computeOptimalPanoramaSize(std::pair & optimalSize, const sfmData::SfMData & sfmData) { + + optimalSize.first = 512; + optimalSize.second = 256; + + /** + * Loop over views to estimate best scale + */ + std::vector scales; + for (auto & viewIt: sfmData.getViews()) { + + /** + * Retrieve view + */ + const sfmData::View& view = *viewIt.second.get(); + if (!sfmData.isPoseAndIntrinsicDefined(&view)) { + continue; + } + + /** + * Get intrinsics and extrinsics + */ + const geometry::Pose3 camPose = sfmData.getPose(view).getTransform(); + const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); + + /** + * Compute map + */ + CoordinatesMap map; + if (!map.build(optimalSize, camPose, intrinsic)) { + continue; + } + + double scale; + if (!map.computeScale(scale)) { + continue; + } + + scales.push_back(scale); + } + + + if (scales.size() > 1) { + double median_scale; + std::nth_element(scales.begin(), scales.begin() + scales.size() / 2, scales.end()); + median_scale = scales[scales.size() / 2]; + + double multiplier = pow(2.0, int(floor(log2(median_scale)))); + + optimalSize.first = optimalSize.first * multiplier; + optimalSize.second = optimalSize.second * multiplier; + } + + return true; +} + int main(int argc, char **argv) { /** @@ -1338,7 +1490,7 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ - std::pair panoramaSize = {20000, 10000}; + std::pair panoramaSize = {1024, 512}; po::options_description optionalParams("Optional parameters"); allParams.add(optionalParams); @@ -1402,12 +1554,14 @@ int main(int argc, char **argv) { return EXIT_FAILURE; } + + //computeOptimalPanoramaSize(); + /** * Create compositer */ LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); - /** * Preprocessing per view */ @@ -1427,7 +1581,7 @@ int main(int argc, char **argv) { /** * Get intrinsics and extrinsics */ - const geometry::Pose3 & camPose = sfmData.getPose(view).getTransform(); + const geometry::Pose3 camPose = sfmData.getPose(view).getTransform(); const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); @@ -1459,21 +1613,21 @@ int main(int argc, char **argv) { /** *Composite image into output */ - ALICEVISION_LOG_INFO("Composite\n"); + ALICEVISION_LOG_INFO("Composite\n"); compositer.append(warper, alphabuilder); { const aliceVision::image::Image & panorama = compositer.getPanorama(); char filename[512]; sprintf(filename, "%s_intermediate_%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); + image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); } { const aliceVision::image::Image & panorama = warper.getColor(); char filename[512]; sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, panorama, image::EImageColorSpace::NO_CONVERSION); + image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); } pos++; } From a0020f77aea70ba765657ebf7ab2b5e4a1bf42cd Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 23 Oct 2019 10:58:11 +0200 Subject: [PATCH 083/174] Border bug --- .../pipeline/main_panoramaStitching.cpp | 34 ++++++++++++------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 77e995bef5..75eb754a1e 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -444,6 +444,7 @@ class CoordinatesMap { if (!computeCoarseBB(coarse_bbox, panoramaSize, pose, intrinsics)) { return false; } + /* Effectively compute the warping map */ aliceVision::image::Image buffer_coordinates(coarse_bbox.width, coarse_bbox.height, false); @@ -509,6 +510,11 @@ class CoordinatesMap { } _offset_x = coarse_bbox.left + min_x; + if (_offset_x > panoramaSize.first) { + /*The coarse bounding box may cross the borders where as the true coordinates may not*/ + int ox = int(_offset_x) - int(panoramaSize.first); + _offset_x = ox; + } _offset_y = coarse_bbox.top + min_y; _real_width = max_x - min_x + 1; _real_height = max_y - min_y + 1; @@ -1228,7 +1234,7 @@ class LaplacianCompositer : public AlphaCompositer { size_t oy = warper.getOffsetY(); size_t pwidth = _panorama.Width(); size_t pheight = _panorama.Height(); - + /** * Create a copy of panorama related pixels */ @@ -1248,7 +1254,6 @@ class LaplacianCompositer : public AlphaCompositer { panorama_subcolor.block(0, 0, warper.getHeight(), warper.getWidth()) = _panorama.block(oy, ox, warper.getHeight(), warper.getWidth()); panorama_submask.block(0, 0, warper.getHeight(), warper.getWidth()) = _mask.block(oy, ox, warper.getHeight(), warper.getWidth()); } - /** * Compute optimal scale @@ -1308,7 +1313,7 @@ class LaplacianCompositer : public AlphaCompositer { } } } - + Eigen::MatrixXf kernel = gaussian_kernel(5, 2.0f); /*Create scales*/ @@ -1554,8 +1559,10 @@ int main(int argc, char **argv) { return EXIT_FAILURE; } - - //computeOptimalPanoramaSize(); + std::pair optimalPanoramaSize; + if (computeOptimalPanoramaSize(optimalPanoramaSize, sfmData)) { + ALICEVISION_LOG_INFO("Optimal panorama size : " << optimalPanoramaSize.first << "x" << optimalPanoramaSize.second); + } /** * Create compositer @@ -1584,7 +1591,6 @@ int main(int argc, char **argv) { const geometry::Pose3 camPose = sfmData.getPose(view).getTransform(); const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - /** * Prepare coordinates map */ @@ -1606,6 +1612,13 @@ int main(int argc, char **argv) { ALICEVISION_LOG_INFO("Warp\n"); GaussianWarper warper; warper.warp(map, source); + + { + const aliceVision::image::Image & panorama = warper.getColor(); + char filename[512]; + sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); + image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); + } AlphaBuilder alphabuilder; alphabuilder.build(map, intrinsic); @@ -1615,6 +1628,7 @@ int main(int argc, char **argv) { */ ALICEVISION_LOG_INFO("Composite\n"); compositer.append(warper, alphabuilder); + { const aliceVision::image::Image & panorama = compositer.getPanorama(); @@ -1622,13 +1636,7 @@ int main(int argc, char **argv) { sprintf(filename, "%s_intermediate_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); } - - { - const aliceVision::image::Image & panorama = warper.getColor(); - char filename[512]; - sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); - } + pos++; } From 85112a4a469061b5ca270f4b27a7d0eaade7f621 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 23 Oct 2019 15:41:34 +0200 Subject: [PATCH 084/174] Eigen modifications --- .../sfm/ResidualErrorConstraintFunctor.hpp | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp index 8036e0589c..7c94233d36 100644 --- a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp +++ b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp @@ -45,7 +45,7 @@ struct ResidualErrorConstraintFunctor_Pinhole }; template - void lift(const T* const cam_K, const Vec3 pt, Eigen::Vector< T, 3> & out) const + void lift(const T* const cam_K, const Vec3 pt, Eigen::Matrix< T, 3, 1> & out) const { const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; @@ -58,13 +58,13 @@ struct ResidualErrorConstraintFunctor_Pinhole } template - void unlift(const T* const cam_K, const Eigen::Vector & pt, Eigen::Vector & out) const + void unlift(const T* const cam_K, const Eigen::Matrix< T, 3, 1> & pt, Eigen::Matrix< T, 3, 1> & out) const { const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; - Eigen::Vector proj_pt = pt / pt(2); + Eigen::Matrix< T, 3, 1> proj_pt = pt / pt(2); out(0) = proj_pt(0) * focal + principal_point_x; out(1) = proj_pt(1) * focal + principal_point_y; @@ -87,7 +87,7 @@ struct ResidualErrorConstraintFunctor_Pinhole { Eigen::Matrix oneRo, twoRo, twoRone; - Eigen::Vector pt3d_1; + Eigen::Matrix< T, 3, 1> pt3d_1; lift(cam_K, m_pos_2dpoint_first, pt3d_1); @@ -96,12 +96,12 @@ struct ResidualErrorConstraintFunctor_Pinhole twoRone = twoRo * oneRo.transpose(); - Eigen::Vector pt3d_2_est = twoRone * pt3d_1; + Eigen::Matrix< T, 3, 1> pt3d_2_est = twoRone * pt3d_1; - Eigen::Vector pt2d_2_est; + Eigen::Matrix< T, 3, 1> pt2d_2_est; unlift(cam_K, pt3d_2_est, pt2d_2_est); - Eigen::Vector residual = pt2d_2_est - m_pos_2dpoint_second; + Eigen::Matrix< T, 3, 1> residual = pt2d_2_est - m_pos_2dpoint_second; out_residuals[0] = residual(0); out_residuals[1] = residual(1); @@ -147,7 +147,7 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 } template - void lift(const T* const cam_K, const Vec3 pt, Eigen::Vector & out) const + void lift(const T* const cam_K, const Vec3 pt, Eigen::Matrix< T, 3, 1> & out) const { const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; @@ -183,7 +183,7 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 } template - void unlift(const T* const cam_K, const Eigen::Vector & pt, Eigen::Vector & out) const + void unlift(const T* const cam_K, const Eigen::Matrix< T, 3, 1> & pt, Eigen::Matrix< T, 3, 1> & out) const { const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; @@ -191,7 +191,7 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 const T& k1 = cam_K[OFFSET_DISTO_K1]; //Project on plane - Eigen::Vector proj_pt = pt / pt(2); + Eigen::Matrix< T, 3, 1> proj_pt = pt / pt(2); //Apply distortion const T r2 = proj_pt(0)*proj_pt(0) + proj_pt(1)*proj_pt(1); @@ -221,7 +221,7 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 { Eigen::Matrix oneRo, twoRo, twoRone; - Eigen::Vector pt3d_1; + Eigen::Matrix< T, 3, 1> pt3d_1; //From pixel to meters lift(cam_K, m_pos_2dpoint_first, pt3d_1); @@ -232,14 +232,14 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 twoRone = twoRo * oneRo.transpose(); //Transform point - Eigen::Vector pt3d_2_est = twoRone * pt3d_1; + Eigen::Matrix< T, 3, 1> pt3d_2_est = twoRone * pt3d_1; //Project back to image space in pixels - Eigen::Vector pt2d_2_est; + Eigen::Matrix< T, 3, 1> pt2d_2_est; unlift(cam_K, pt3d_2_est, pt2d_2_est); //Compute residual - Eigen::Vector residual = pt2d_2_est - m_pos_2dpoint_second; + Eigen::Matrix< T, 3, 1> residual = pt2d_2_est - m_pos_2dpoint_second; out_residuals[0] = residual(0); out_residuals[1] = residual(1); @@ -281,7 +281,7 @@ struct ResidualErrorConstraintFunctor_PinholeFisheye }; template - void lift(const T* const cam_K, const Vec3 pt, Eigen::Vector & out) const + void lift(const T* const cam_K, const Vec3 pt, Eigen::Matrix< T, 3, 1> & out) const { const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; @@ -341,7 +341,7 @@ struct ResidualErrorConstraintFunctor_PinholeFisheye } template - void unlift(const T* const cam_K, const Eigen::Vector & pt, Eigen::Vector & out) const + void unlift(const T* const cam_K, const Eigen::Matrix< T, 3, 1> & pt, Eigen::Matrix< T, 3, 1> & out) const { const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; @@ -352,7 +352,7 @@ struct ResidualErrorConstraintFunctor_PinholeFisheye const T& k4 = cam_K[OFFSET_DISTO_K4]; //Project on plane - Eigen::Vector proj_pt = pt / pt(2); + Eigen::Matrix< T, 3, 1> proj_pt = pt / pt(2); //Apply distortion const T x_u = proj_pt(0); @@ -398,7 +398,7 @@ struct ResidualErrorConstraintFunctor_PinholeFisheye { Eigen::Matrix oneRo, twoRo, twoRone; - Eigen::Vector pt3d_1; + Eigen::Matrix< T, 3, 1> pt3d_1; //From pixel to meters lift(cam_K, m_pos_2dpoint_first, pt3d_1); @@ -409,14 +409,14 @@ struct ResidualErrorConstraintFunctor_PinholeFisheye twoRone = twoRo * oneRo.transpose(); //Transform point - Eigen::Vector pt3d_2_est = twoRone * pt3d_1; + Eigen::Matrix< T, 3, 1> pt3d_2_est = twoRone * pt3d_1; //Project back to image space in pixels - Eigen::Vector pt2d_2_est; + Eigen::Matrix< T, 3, 1> pt2d_2_est; unlift(cam_K, pt3d_2_est, pt2d_2_est); //Compute residual - Eigen::Vector residual = pt2d_2_est - m_pos_2dpoint_second; + Eigen::Matrix< T, 3, 1> residual = pt2d_2_est - m_pos_2dpoint_second; out_residuals[0] = residual(0); out_residuals[1] = residual(1); From 457e72efdc5eadb330f2791ac94a20bda8434c27 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 23 Oct 2019 16:02:29 +0200 Subject: [PATCH 085/174] Bug in borders --- .../pipeline/main_panoramaStitching.cpp | 41 +++++++++++-------- 1 file changed, 23 insertions(+), 18 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 75eb754a1e..3c2c5a6fe2 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -31,7 +31,7 @@ float sigmoid(float x, float sigwidth, float sigMid) return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); } -Eigen::MatrixXf gaussian_kernel(size_t kernel_length, float sigma) { +Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { Eigen::VectorXd x; x.setLinSpaced(kernel_length + 1, -sigma, +sigma); @@ -46,15 +46,26 @@ Eigen::MatrixXf gaussian_kernel(size_t kernel_length, float sigma) { k1d(i) = cdf(i + 1) - cdf(i); } - Eigen::MatrixXd K = k1d * k1d.transpose(); + double sum = k1d.sum(); + k1d = k1d / sum; + + return k1d.cast(); +} + +Eigen::MatrixXf gaussian_kernel(size_t kernel_length, float sigma) { + + Eigen::VectorXf k1d = gaussian_kernel_vector(kernel_length, sigma); + Eigen::MatrixXf K = k1d * k1d.transpose(); double sum = K.sum(); K = K / sum; - return K.cast(); + return K; } + + bool convolve(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::MatrixXf & kernel) { if (output.size() != input.size()) { @@ -1182,7 +1193,6 @@ class AlphaCompositer : public Compositer { size_t pano_j = warper.getOffsetX() + j; if (pano_j >= _panorama.Width()) { pano_j = pano_j - _panorama.Width(); - continue; } if (!_mask(pano_i, pano_j)) { @@ -1399,14 +1409,6 @@ class LaplacianCompositer : public AlphaCompositer { } } - /*auto & tosave = camera_weightmaps[0]; - for (int i = 0; i < camera_weights.Height(); i++) { - for (int j = 0; j < camera_weights.Width(); j++) { - tosave(i, j) *= 100.0f; - } - }*/ - /*image::writeImage("/home/servantf/test.png", camera_colors[0], image::EImageColorSpace::SRGB); - image::writeImage("/home/servantf/test2.png", camera_colors[scales - 1], image::EImageColorSpace::SRGB);*/ return true; } @@ -1562,19 +1564,21 @@ int main(int argc, char **argv) { std::pair optimalPanoramaSize; if (computeOptimalPanoramaSize(optimalPanoramaSize, sfmData)) { ALICEVISION_LOG_INFO("Optimal panorama size : " << optimalPanoramaSize.first << "x" << optimalPanoramaSize.second); + panoramaSize = optimalPanoramaSize; } /** * Create compositer */ - LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** * Preprocessing per view */ size_t pos = 0; for (auto & viewIt: sfmData.getViews()) { - + + /*if (pos == 12) */{ /** * Retrieve view */ @@ -1628,16 +1632,17 @@ int main(int argc, char **argv) { */ ALICEVISION_LOG_INFO("Composite\n"); compositer.append(warper, alphabuilder); + } + pos++; + } - { + { + ALICEVISION_LOG_INFO("Save\n"); const aliceVision::image::Image & panorama = compositer.getPanorama(); char filename[512]; sprintf(filename, "%s_intermediate_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); - } - - pos++; } return EXIT_SUCCESS; From 98f1d1f920c99f50fb2222f60fefb4578e0f2d6b Mon Sep 17 00:00:00 2001 From: fabienservant Date: Thu, 24 Oct 2019 09:49:04 +0200 Subject: [PATCH 086/174] Parameter for panorama size --- .../pipeline/main_panoramaStitching.cpp | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 3c2c5a6fe2..16861d3b5f 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -1497,9 +1497,12 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ - std::pair panoramaSize = {1024, 512}; + std::pair panoramaSize = {1024, 0}; po::options_description optionalParams("Optional parameters"); + optionalParams.add_options() + ("panoramaWidth,w", po::value(&panoramaSize.first)->default_value(panoramaSize.first), "Panorama Width in pixels."); allParams.add(optionalParams); + panoramaSize.second = panoramaSize.first / 2; /** * Setup log level given command line @@ -1561,10 +1564,13 @@ int main(int argc, char **argv) { return EXIT_FAILURE; } - std::pair optimalPanoramaSize; - if (computeOptimalPanoramaSize(optimalPanoramaSize, sfmData)) { - ALICEVISION_LOG_INFO("Optimal panorama size : " << optimalPanoramaSize.first << "x" << optimalPanoramaSize.second); - panoramaSize = optimalPanoramaSize; + /*If panorama width is undefined, estimate it*/ + if (panoramaSize.first <= 0) { + std::pair optimalPanoramaSize; + if (computeOptimalPanoramaSize(optimalPanoramaSize, sfmData)) { + ALICEVISION_LOG_INFO("Optimal panorama size : " << optimalPanoramaSize.first << "x" << optimalPanoramaSize.second); + panoramaSize = optimalPanoramaSize; + } } /** @@ -1617,12 +1623,12 @@ int main(int argc, char **argv) { GaussianWarper warper; warper.warp(map, source); - { + /*{ const aliceVision::image::Image & panorama = warper.getColor(); char filename[512]; sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); - } + }*/ AlphaBuilder alphabuilder; alphabuilder.build(map, intrinsic); @@ -1641,7 +1647,7 @@ int main(int argc, char **argv) { ALICEVISION_LOG_INFO("Save\n"); const aliceVision::image::Image & panorama = compositer.getPanorama(); char filename[512]; - sprintf(filename, "%s_intermediate_%d.exr", outputPanorama.c_str(), pos); + sprintf(filename, "%s.exr", outputPanorama.c_str()); image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); } From 17b5d86967a39882118d52e17eb425be3286236b Mon Sep 17 00:00:00 2001 From: fabienservant Date: Thu, 24 Oct 2019 13:43:20 +0200 Subject: [PATCH 087/174] remove recover source image... it is not useful in the current pipeline --- src/software/convert/main_convertLDRToHDR.cpp | 90 +------------------ 1 file changed, 1 insertion(+), 89 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 92fdd17178..1c4bc9e097 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -256,43 +256,6 @@ void getInputPaths(const std::vector& imagesFolder, std::vector& hdrImage, hdr::rgbCurve& response, float channelQuantization, float meanVal[], image::Image &targetRecover) -{ - float meanRecovered[3] = {0.f, 0.f, 0.f}; - for(std::size_t channel = 0; channel < 3; ++channel) - { - std::vector::iterator first = response.getCurve(channel).begin(); - std::vector::iterator last = response.getCurve(channel).end(); - for(std::size_t y = 0; y < hdrImage.Height(); ++y) - { - for(std::size_t x = 0; x < hdrImage.Width(); ++x) - { - const float &pixelValue = hdrImage(y, x)(channel); - std::vector::iterator it = std::lower_bound(first, last, pixelValue); - float value = float(std::distance(response.getCurve(channel).begin(), it)) / (channelQuantization - 1.f); - targetRecover(y, x)(channel) = value; - - meanRecovered[channel] += value; - - } - } - meanRecovered[channel] /= hdrImage.size(); - } - float offset[3]; - for(int i=0; i<3; ++i) - offset[i] = std::abs(meanRecovered[i] - meanVal[i]); - ALICEVISION_LOG_INFO("Offset between target source image and recovered from hdr = " << offset[0] << " " << offset[1] << " " << offset[2]); -} - - int main(int argc, char** argv) { // command-line parameters @@ -308,7 +271,6 @@ int main(int argc, char** argv) std::vector targets; float clampedValueCorrection = 1.f; bool fisheye = true; - std::string recoverSourcePath; po::options_description allParams("AliceVision convertLDRToHDR"); @@ -336,9 +298,7 @@ int main(int argc, char** argv) ("fusionWeight,W", po::value(&fusionWeightFunction)->default_value(fusionWeightFunction), "Weight function used to fuse all LDR images together (gaussian, triangle, plateau).") ("outputResponse", po::value(&outputResponsePath), - "(For debug) Output camera response function folder or complete path.") - ("recoverPath", po::value(&recoverSourcePath)->default_value(recoverSourcePath), - "(For debug) Folder path for recovering LDR images at the target exposures by applying inverse response on HDR images."); + "(For debug) Output camera response function folder or complete path."); po::options_description logParams("Log parameters"); logParams.add_options() @@ -494,16 +454,6 @@ int main(int argc, char** argv) image::readImage(imagePath, ldrImages[i], loadColorSpace); metadatas[i] = image::readImageMetadata(imagePath); - - if (ldrImages[i].Height() < ldrImages[i].Width()) { - image::Image rotate(ldrImages[i].Height(), ldrImages[i].Width()); - for (int k = 0; k < rotate.Height(); k++) { - for (int l = 0; l < rotate.Width(); l++) { - rotate(k, l) = ldrImages[i](l, rotate.Height() - 1 - k); - } - } - ldrImages[i] = rotate; - } // Debevec and Robertson algorithms use shutter speed as ev value @@ -659,43 +609,5 @@ int main(int argc, char** argv) } - // test of recovery of source target image from HDR - if(!recoverSourcePath.empty()) - { - fs::path recoverPath = fs::path(recoverSourcePath); - - for(int g = 0; g < nbGroups; ++g) - { - image::Image HDRimage; - image::readImage(outputHDRImagesPath[g], HDRimage, image::EImageColorSpace::NO_CONVERSION); - - // calcul of mean value of target images - std::size_t targetIndex = std::distance(times[g].begin(), std::find(times[g].begin(), times[g].end(), targetTimes[g])); - float meanVal[3] = {0.f, 0.f, 0.f}; - image::Image &targetImage = ldrImageGroups[g].at(targetIndex); - image::Image targetRecover(targetImage.Width(), targetImage.Height(), false); - for(std::size_t channel=0; channel<3; ++channel) - { - for(std::size_t y = 0; y < targetImage.Height(); ++y) - { - for(std::size_t x = 0; x < targetImage.Width(); ++x) - { - meanVal[channel] += targetImage(y, x)(channel); - } - } - meanVal[channel] /= targetImage.size(); - } - recoverSourceImage(HDRimage, response, channelQuantization, meanVal, targetRecover); - - if(nbGroups == 1) - recoverSourcePath = (recoverPath / (std::string("recovered.exr"))).string(); - else - recoverSourcePath = (recoverPath / (std::string("recovered_") + std::to_string(g) + std::string(".exr"))).string(); - - image::writeImage(recoverSourcePath, targetRecover, image::EImageColorSpace::AUTO); - ALICEVISION_LOG_INFO("Recovered target source image written as " << recoverSourcePath); - } - } - return EXIT_SUCCESS; } From ecb6804b1a6f17e20bbea2d7995e8c3723c4f10b Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 07:48:29 +0200 Subject: [PATCH 088/174] Rescratch LDR to HDR main to integrate into pipeline --- src/aliceVision/hdr/DebevecCalibrate.cpp | 23 +- src/aliceVision/hdr/DebevecCalibrate.hpp | 4 +- src/software/convert/CMakeLists.txt | 2 + src/software/convert/main_convertLDRToHDR.cpp | 682 +++++------------- 4 files changed, 199 insertions(+), 512 deletions(-) diff --git a/src/aliceVision/hdr/DebevecCalibrate.cpp b/src/aliceVision/hdr/DebevecCalibrate.cpp index 1106acff59..5c930f0607 100644 --- a/src/aliceVision/hdr/DebevecCalibrate.cpp +++ b/src/aliceVision/hdr/DebevecCalibrate.cpp @@ -10,23 +10,25 @@ #include #include #include +#include +#include namespace aliceVision { namespace hdr { using T = Eigen::Triplet; -void DebevecCalibrate::process(const std::vector< std::vector< image::Image > > &ldrImageGroups, +void DebevecCalibrate::process(const std::vector< std::vector> & imagePathsGroups, const std::size_t channelQuantization, - const std::vector< std::vector > ×, + const std::vector > ×, const int nbPoints, const bool fisheye, const rgbCurve &weight, const float lambda, rgbCurve &response) { - const int nbGroups = ldrImageGroups.size(); - const int nbImages = ldrImageGroups.front().size(); + const int nbGroups = imagePathsGroups.size(); + const int nbImages = imagePathsGroups.front().size(); const int samplesPerImage = nbPoints / (nbGroups*nbImages); //set channels count always RGB @@ -47,7 +49,13 @@ void DebevecCalibrate::process(const std::vector< std::vector< image::Image > &ldrImagesGroup = ldrImageGroups[g]; + const std::vector &imagePaths = imagePathsGroups[g]; + std::vector> ldrImagesGroup(imagePaths.size()); + + for (int i = 0; i < imagePaths.size(); i++) { + image::readImage(imagePaths[i], ldrImagesGroup[i], image::EImageColorSpace::SRGB); + } + const std::vector &ldrTimes = times[g]; const std::size_t width = ldrImagesGroup.front().Width(); const std::size_t height = ldrImagesGroup.front().Height(); @@ -151,11 +159,10 @@ void DebevecCalibrate::process(const std::vector< std::vector< image::Image > > &ldrImageGroups, + void process(const std::vector> &ldrImageGroups, const std::size_t channelQuantization, - const std::vector< std::vector > ×, + const std::vector> ×, const int nbPoints, const bool fisheye, const rgbCurve &weight, diff --git a/src/software/convert/CMakeLists.txt b/src/software/convert/CMakeLists.txt index 0571598eed..9a084047a8 100644 --- a/src/software/convert/CMakeLists.txt +++ b/src/software/convert/CMakeLists.txt @@ -43,6 +43,8 @@ alicevision_add_software(aliceVision_convertLDRToHDR LINKS aliceVision_system aliceVision_image aliceVision_hdr + aliceVision_sfmData + aliceVision_sfmDataIO ${Boost_LIBRARIES} ) endif() diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 1c4bc9e097..7b180fd6f7 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -8,6 +8,12 @@ #include #include #include + +/*SFMData*/ +#include +#include + +/*HDR Related*/ #include #include #include @@ -15,13 +21,10 @@ #include #include +/*Command line parameters*/ #include #include - -#include -#include #include -#include // These constants define the current software version. // They must be updated when the command line is changed. @@ -31,282 +34,41 @@ using namespace aliceVision; namespace po = boost::program_options; -namespace fs = boost::filesystem; - - -enum class ECalibrationMethod -{ - LINEAR, - ROBERTSON, - DEBEVEC, - GROSSBERG -}; - -/** - * @brief convert an enum ECalibrationMethod to its corresponding string - * @param ECalibrationMethod - * @return String - */ -inline std::string ECalibrationMethod_enumToString(const ECalibrationMethod calibrationMethod) -{ - switch(calibrationMethod) - { - case ECalibrationMethod::LINEAR: return "linear"; - case ECalibrationMethod::ROBERTSON: return "robertson"; - case ECalibrationMethod::DEBEVEC: return "debevec"; - case ECalibrationMethod::GROSSBERG: return "grossberg"; - } - throw std::out_of_range("Invalid method name enum"); -} - -/** - * @brief convert a string calibration method name to its corresponding enum ECalibrationMethod - * @param ECalibrationMethod - * @return String - */ -inline ECalibrationMethod ECalibrationMethod_stringToEnum(const std::string& calibrationMethodName) -{ - std::string methodName = calibrationMethodName; - std::transform(methodName.begin(), methodName.end(), methodName.begin(), ::tolower); - - if(methodName == "linear") return ECalibrationMethod::LINEAR; - if(methodName == "robertson") return ECalibrationMethod::ROBERTSON; - if(methodName == "debevec") return ECalibrationMethod::DEBEVEC; - if(methodName == "grossberg") return ECalibrationMethod::GROSSBERG; - - throw std::out_of_range("Invalid method name : '" + calibrationMethodName + "'"); -} - -inline std::ostream& operator<<(std::ostream& os, ECalibrationMethod calibrationMethodName) -{ - os << ECalibrationMethod_enumToString(calibrationMethodName); - return os; -} - -inline std::istream& operator>>(std::istream& in, ECalibrationMethod& calibrationMethod) -{ - std::string token; - in >> token; - calibrationMethod = ECalibrationMethod_stringToEnum(token); - return in; -} - - -/** - * @brief check if given file is regular and matches with authorized extensions - * @param[in] filepath - * @return true if valid file - */ -bool isValidImageFile(const fs::path& filepath) -{ - if(!fs::is_regular_file(filepath)) - false; - - const std::string validExtensions( - // Basic image file extensions - "jpg|jpeg|png|tiff|tif|" - // RAW image file extensions: - "3fr|" // Hasselblad - "arw|" // Sony - "crw|cr2|cr3|" // Canon - "dng|" // Adobe - "kdc|" // Kodak - "mrw|" // Minolta - "nef|nrw|" // Nikon - "orf|" // Olympus - "ptx|pef|" // Pentax - "raf|" // Fuji - "R3D|" // RED - "rw2|" // Panasonic - "srw|" // Samsung - "x3f" // Sigma - ); - const std::regex validExtensionsExp("\\.(?:" + validExtensions + ")"); - - std::string extension = filepath.extension().string(); - std::transform(extension.begin(), extension.end(), extension.begin(), ::tolower); - - return std::regex_match(extension, validExtensionsExp); -} - - -/** - * @brief get image file paths in folder and add it to output vector if file is valid - * @param[in] folder - input folder path - * @param[out] out_imageFiles - vector of images paths - */ -void getImagesGroup(const fs::path& folder, std::vector& out_imageFiles) -{ - - for(const fs::directory_entry& file: fs::directory_iterator(folder)) - { - if(isValidImageFile(file.path())) - out_imageFiles.push_back(file.path().string()); - } -} - - -/** - * @brief get all input images paths from input provided by user - * @param[in] imagesFolder - input folder or list provided by user - * @param[out] ldrImagesPaths - vector of vector of images paths sorted by group - */ -void getInputPaths(const std::vector& imagesFolder, std::vector>& ldrImagesPaths) -{ - std::vector singleFiles; - bool sub_folders = false; - for(const std::string& entry: imagesFolder) - { - fs::path entryPath = fs::path(entry); - if(fs::is_directory(entryPath)) - { - std::vector group; - getImagesGroup(entryPath, group); - - if(group.empty()) - { - // folder without image, assume it is a folder with sub-folders - sub_folders = true; - for(const fs::directory_entry& subEntry: fs::directory_iterator(entryPath)) - { - fs::path subEntryPath = fs::path(subEntry); - if(fs::is_directory(subEntryPath)); - { - std::vector subGroup; - getImagesGroup(subEntryPath, subGroup); - ldrImagesPaths.push_back(subGroup); - } - } - } - else - { - // folder with images - ldrImagesPaths.push_back(group); - } - } - // list of images - else if(fs::is_regular_file(entryPath)) - { - if(isValidImageFile(entryPath)) - singleFiles.push_back(entry); - } - else // is an expression - { - // extract folder / expression - std::string imagesExp = entryPath.stem().string(); - std::string extension = entryPath.extension().string(); - std::size_t pos = imagesExp.find("*"); - if(pos != std::string::npos) - { - imagesExp.insert(pos, std::string(".")); - } - else - { - throw std::runtime_error("[ldrToHdr] Invalid expression of input files."); - } - std::regex exp("\\.(?:" + imagesExp + "\\" + extension + ")"); - - - for(const fs::directory_entry& file: fs::directory_iterator(entryPath.parent_path())) - { - if(fs::is_regular_file(file) && std::regex_match(file.path().string(), exp)) - if(isValidImageFile(file.path())) - singleFiles.push_back(file.path().string()); - } - - if(singleFiles.empty()) - throw std::runtime_error("[ldrToHdr] Invalid expression of input files."); - } - } - - if(!singleFiles.empty()) - { - if(!ldrImagesPaths.empty()) - { - throw std::runtime_error("[ldrToHdr] Cannot mix files and folders in input."); - } - ldrImagesPaths.push_back(singleFiles); - } - - // if folder with sub-folders we need to sort sub-folders in alphabetic order - if(sub_folders) - { - std::vector> ldrImagesPaths_copy = ldrImagesPaths; - std::vector subFoldersNames; - - for(const std::vector& group: ldrImagesPaths) - { - fs::path firstPath = fs::path(group.front()); - subFoldersNames.push_back(firstPath.parent_path().string()); - } - - std::vector subFoldersNames_sorted = subFoldersNames; - std::sort(subFoldersNames_sorted.begin(), subFoldersNames_sorted.end()); - - for(std::size_t g = 0; g < ldrImagesPaths.size(); ++g) - { - std::vector::iterator it = std::find(subFoldersNames.begin(), subFoldersNames.end(), subFoldersNames_sorted[g]); - if(it == subFoldersNames.end()) - ALICEVISION_LOG_ERROR("Cannot sort folers, please store them in alphabetic order."); - - ldrImagesPaths[g] = ldrImagesPaths_copy.at(std::distance(subFoldersNames.begin(), it)); - } - } -} - - -int main(int argc, char** argv) -{ - // command-line parameters +int main(int argc, char * argv[]) { std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); - std::vector imagesFolder; - std::vector outputHDRImagesPath; - std::string outputResponsePath; - ECalibrationMethod calibrationMethod = ECalibrationMethod::LINEAR; - std::string inputResponsePath; - std::string calibrationWeightFunction = "default"; - hdr::EFunctionType fusionWeightFunction = hdr::EFunctionType::GAUSSIAN; - std::vector targets; - float clampedValueCorrection = 1.f; - bool fisheye = true; - - po::options_description allParams("AliceVision convertLDRToHDR"); - - po::options_description requiredParams("Required Parameters"); + std::string sfmInputDataFilename = ""; + std::string sfmOutputDataFilename = ""; + int groupSize = 3; + float clampedValueCorrection = 1.0f; + + /***** + * DESCRIBE COMMAND LINE PARAMETERS + */ + po::options_description allParams( + "Parse external information about cameras used in a panorama.\n" + "AliceVision PanoramaExternalInfo"); + + po::options_description requiredParams("Required parameters"); requiredParams.add_options() - ("input,i", po::value>(&imagesFolder)->required()->multitoken(), - "List of LDR images or a folder containing them (accepted formats are: .jpg .jpeg .png .tif .tiff or RAW image file extensions: .3fr .arw .crw .cr2 .cr3 .dng .kdc .mrw .nef .nrw .orf .ptx .pef .raf .R3D .rw2 .srw .x3f") - ("output,o", po::value>(&outputHDRImagesPath)->required()->multitoken(), - "Output HDR image folders or complete paths."); - - po::options_description optionalParams("Optional Parameters"); - optionalParams.add_options() - ("calibrationMethod,m", po::value(&calibrationMethod )->default_value(calibrationMethod), - "Name of method used for camera calibration (linear, robertson -> slow !, debevec, grossberg).") - ("expandDynamicRange,e", po::value(&clampedValueCorrection)->default_value(clampedValueCorrection), - "float value between 0 and 1 to correct clamped high values in dynamic range: use 0 for no correction, 0.5 for interior lighting and 1 for outdoor lighting.") - ("targetExposureImage,t", po::value>(&targets)->multitoken(), - "Name of LDR image to center your HDR exposure.") - ("fisheyeLens,f", po::value(&fisheye)->default_value(fisheye), - "Set to 1 if images are taken with a fisheye lens and to 0 if not. Default value is set to 1.") - ("inputResponse,r", po::value(&inputResponsePath), - "External camera response file to fuse all LDR images together.") - ("calibrationWeight,w", po::value(&calibrationWeightFunction)->default_value(calibrationWeightFunction), - "Weight function used to calibrate camera response (default depends on the calibration method, gaussian, triangle, plateau).") - ("fusionWeight,W", po::value(&fusionWeightFunction)->default_value(fusionWeightFunction), - "Weight function used to fuse all LDR images together (gaussian, triangle, plateau).") - ("outputResponse", po::value(&outputResponsePath), - "(For debug) Output camera response function folder or complete path."); + ("input,i", po::value(&sfmInputDataFilename)->required(), "SfMData file input.") + ("outSfMDataFilename,o", po::value(&sfmOutputDataFilename)->required(), "SfMData file output.") + ("groupSize,g", po::value(&groupSize)->required(), "bracket count per HDR image.") + ; + + po::options_description optionalParams("Optional parameters"); po::options_description logParams("Log parameters"); logParams.add_options() ("verboseLevel,v", po::value(&verboseLevel)->default_value(verboseLevel), - "verbosity level (fatal, error, warning, info, debug, trace)."); - + "verbosity level (fatal, error, warning, info, debug, trace)."); + allParams.add(requiredParams).add(optionalParams).add(logParams); + /** + * READ COMMAND LINE + */ po::variables_map vm; try { @@ -335,279 +97,195 @@ int main(int argc, char** argv) ALICEVISION_COUT("Program called with the following parameters:"); ALICEVISION_COUT(vm); - // set verbose level + /** + * set verbose level + **/ system::Logger::get()->setLogLevel(verboseLevel); - std::vector> imagesPaths; - getInputPaths(imagesFolder, imagesPaths); - std::size_t nbGroups = imagesPaths.size(); - std::size_t nbImages = imagesPaths.front().size(); - for(int g = 0; g < nbGroups; ++g) - { - if(imagesPaths[g].size() != nbImages) - { - ALICEVISION_LOG_ERROR("All groups should have the same number of LDR images."); - return EXIT_FAILURE; - } + if (groupSize < 0) { + ALICEVISION_LOG_ERROR("Invalid number of brackets"); + return EXIT_FAILURE; } - ALICEVISION_LOG_INFO(nbGroups << " group(s) of LDR images found"); - - std::vector< std::vector< image::Image > > ldrImageGroups(nbGroups); - std::vector< std::vector< image::Image > > ldrImageGroups_sorted(nbGroups); - std::vector< std::vector > times(nbGroups); - std::vector< std::vector > times_sorted(nbGroups); - std::vector targetTimes(nbGroups); - std::vector targetMetadatas(nbGroups); - - hdr::rgbCurve response(0); - std::size_t channelQuantization; - image::EImageColorSpace loadColorSpace; - - targets.resize(nbGroups); - outputHDRImagesPath.resize(nbGroups); + /** + * Update sfm accordingly + */ + sfmData::SfMData sfmData; + if(!sfmDataIO::Load(sfmData, sfmInputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS | sfmDataIO::INTRINSICS))) { + ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmInputDataFilename << "' cannot be read."); + return EXIT_FAILURE; + } + - std::string outputPath = outputHDRImagesPath.front(); + size_t countImages = sfmData.getViews().size(); + if (countImages == 0) { + ALICEVISION_LOG_ERROR("The input SfMData contains no input !"); + return EXIT_FAILURE; + } - if(fs::is_directory(outputPath)) - { - if(nbGroups == 1) - outputHDRImagesPath.front() = (fs::path(outputPath) / ("hdr.exr")).string(); - else - for(int g = 0; g < nbGroups; ++g) { - std::ostringstream ss; - ss << std::setw(4) << std::setfill('0') << g; - outputHDRImagesPath[g] = (fs::path(outputPath) / ("hdr_" + ss.str() + ".exr")).string(); - } + if (countImages % groupSize != 0) { + ALICEVISION_LOG_ERROR("The input SfMData file is not compatible with this bracket size"); + return EXIT_FAILURE; } - else if(outputHDRImagesPath.size() != nbGroups) - { - ALICEVISION_LOG_ERROR("Please provide one output path for each group of LDR images."); + size_t countGroups = countImages / groupSize; + + /*Make sure there is only one kind of image in dataset*/ + if (sfmData.getIntrinsics().size() != 1) { + ALICEVISION_LOG_ERROR("Multiple intrinsics : Different kind of images in dataset"); return EXIT_FAILURE; } - // read input response file or set the correct channel quantization according to the calibration method used - if(!inputResponsePath.empty()) - { - response = hdr::rgbCurve(inputResponsePath); // use the camera response function set in "responseCalibration", calibrationMethod is set to "none" - channelQuantization = response.getSize(); + /*Order views by their image names*/ + std::vector> viewsOrderedByName; + for (auto & viewIt: sfmData.getViews()) { + viewsOrderedByName.push_back(viewIt.second); } - else - { - channelQuantization = std::pow(2, 10); //RAW 10 bit precision, 2^10 values between black and white point - response.resize(channelQuantization); + std::sort(viewsOrderedByName.begin(), viewsOrderedByName.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { + if (a == nullptr || b == nullptr) return true; + return (a->getImagePath() < b->getImagePath()); + }); + + boost::filesystem::path path(sfmOutputDataFilename); + std::string output_path = path.parent_path().string(); + + + /* + Make sure all images have same aperture + */ + float aperture = viewsOrderedByName[0]->getMetadataAperture(); + for (auto & view : viewsOrderedByName) { + + if (view->getMetadataAperture() != aperture) { + ALICEVISION_LOG_ERROR("Different apertures amongst the dataset"); + return EXIT_FAILURE; + } } - // set correct color space according to calibration method - if(calibrationMethod == ECalibrationMethod::LINEAR) - loadColorSpace = image::EImageColorSpace::LINEAR; - else - loadColorSpace = image::EImageColorSpace::SRGB; - // force clamped value correction between 0 and 1 - clampedValueCorrection = clamp(clampedValueCorrection, 0.f, 1.f); + /*Make groups*/ + std::vector>> groupedViews; + std::vector> group; + for (auto & view : viewsOrderedByName) { + + group.push_back(view); + if (group.size() == groupSize) { + groupedViews.push_back(group); + group.clear(); + } + } - // set the correct weight functions corresponding to the string parameter - hdr::rgbCurve fusionWeight(channelQuantization); - fusionWeight.setFunction(fusionWeightFunction); + std::vector> targetViews; + for (auto & group : groupedViews) { + /*Sort all images by exposure time*/ + std::sort(group.begin(), group.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { + if (a == nullptr || b == nullptr) return true; + return (a->getMetadataShutter() < b->getMetadataShutter()); + }); - hdr::rgbCurve calibrationWeight(channelQuantization); - std::transform(calibrationWeightFunction.begin(), calibrationWeightFunction.end(), calibrationWeightFunction.begin(), ::tolower); - if(calibrationWeightFunction == "default") - { - switch(calibrationMethod) - { - case ECalibrationMethod::LINEAR: break; - case ECalibrationMethod::DEBEVEC: calibrationWeight.setTriangular(); break; - case ECalibrationMethod::ROBERTSON: calibrationWeight.setRobertsonWeight(); break; - case ECalibrationMethod::GROSSBERG: break; + /*Target views are the middle exposed views*/ + int middleIndex = group.size() / 2; + + /*If odd size, choose the more exposed view*/ + if (group.size() % 2 && group.size() > 1) { + middleIndex++; } + + targetViews.push_back(group[middleIndex]); } - else - calibrationWeight.setFunction(hdr::EFunctionType_stringToEnum(calibrationWeightFunction)); + /*Build exposure times table*/ + std::vector> groupedExposures; + for (int i = 0; i < groupedViews.size(); i++) { + const std::vector> & group = groupedViews[i]; + std::vector exposures; - for(int g = 0; g < nbGroups; ++g) - { - if(nbGroups > 1) - ALICEVISION_LOG_INFO("Group " << g << " : "); - - std::vector> &ldrImages = ldrImageGroups[g]; - ldrImages.resize(nbImages); - std::vector &ldrTimes = times[g]; - // std::vector &ldrEv = ev.at(0); - std::vector &inputImagesNames = imagesPaths[g]; - - std::vector stemImages(nbImages); - std::vector nameImages(nbImages); - std::vector metadatas(nbImages); - std::vector aperture; - std::vector colorSpace; - - for(int i=0; igetMetadataShutter(); + exposures.push_back(etime); } + groupedExposures.push_back(exposures); + } + /*Build table of file names*/ + std::vector> groupedFilenames; + for (int i = 0; i < groupedViews.size(); i++) { + const std::vector> & group = groupedViews[i]; - // assert that all images have the same aperture and same color space - if(std::adjacent_find(aperture.begin(), aperture.end(), std::not_equal_to()) != aperture.end()) - { - ALICEVISION_LOG_ERROR("Input images have different apertures."); - return EXIT_FAILURE; - } - if(std::adjacent_find(colorSpace.begin(), colorSpace.end(), std::not_equal_to()) != colorSpace.end()) - { - ALICEVISION_LOG_ERROR("Input images have different color spaces."); - return EXIT_FAILURE; + std::vector filenames; + + for (int j = 0; j < group.size(); j++) { + + filenames.push_back(group[j]->getImagePath()); } + groupedFilenames.push_back(filenames); + } + - std::vector &ldrTimes_sorted = times_sorted[g]; - ldrTimes_sorted = ldrTimes; - std::sort(ldrTimes_sorted.begin(), ldrTimes_sorted.end()); + size_t channelQuantization = std::pow(2, 10); + hdr::rgbCurve response(channelQuantization); + hdr::rgbCurve calibrationWeight(channelQuantization); + hdr::rgbCurve fusionWeight(channelQuantization); - // we sort the images according to their exposure time - ldrImageGroups_sorted[g].resize(nbImages); - for(int i=0; i::iterator it = std::find(ldrTimes.begin(), ldrTimes.end(), ldrTimes_sorted.at(i)); - if(it == ldrTimes.end()) - { - ALICEVISION_LOG_ERROR("sorting failed"); - return EXIT_FAILURE; - } - - ldrImageGroups_sorted[g].at(i) = ldrImages.at(std::distance(ldrTimes.begin(), it)); - } + const float lambda = channelQuantization * 1.f; + const int nbPoints = 10000; + bool fisheye = false; + fusionWeight.setFunction(hdr::EFunctionType::GAUSSIAN); + calibrationWeight.setTriangular(); - ldrImages.clear(); + hdr::DebevecCalibrate calibration; + calibration.process(groupedFilenames, channelQuantization, groupedExposures, nbPoints, fisheye, calibrationWeight, lambda, response); + response.exponential(); + response.scale(); - // find target exposure time corresponding to the image name given by user - if(!targets[g].empty()) - { - nameImages.insert(nameImages.end(), stemImages.begin(), stemImages.end()); - nameImages.insert(nameImages.end(), inputImagesNames.begin(), inputImagesNames.end()); //search target for filenames only, filenames + extensions and complete paths - std::vector::iterator it = std::find(nameImages.begin(), nameImages.end(), targets[g]); - if(it == nameImages.end()) - { - ALICEVISION_CERR("Invalid target name: \"" << targets[g] << "\""); - return EXIT_FAILURE; - } - - std::size_t targetIndex = std::distance(nameImages.begin(), it) % nbImages; - targetMetadatas[g] = metadatas[targetIndex]; - targetTimes[g] = ldrTimes.at(targetIndex); - } - else - { - targetTimes[g] = ldrTimes_sorted.at(ldrTimes_sorted.size()/2); - std::size_t targetIndex = std::distance(ldrTimes.begin(), std::find(ldrTimes.begin(), ldrTimes.end(), targetTimes[g])); - targets[g] = nameImages.at(targetIndex); - targetMetadatas[g] = metadatas[targetIndex]; - } - } + sfmData::SfMData outputSfm; + sfmData::Views & vs = outputSfm.getViews(); + outputSfm.getIntrinsics() = sfmData.getIntrinsics(); - // calculate the response function according to the method given in argument or take the response provided by the user - if(inputResponsePath.empty()) + for(int g = 0; g < groupedFilenames.size(); ++g) { - switch(calibrationMethod) - { - case ECalibrationMethod::LINEAR: - { - // set the response function to linear - response.setLinear(); - } - break; - - case ECalibrationMethod::DEBEVEC: - { - ALICEVISION_LOG_INFO("Debevec calibration"); - const float lambda = channelQuantization * 1.f; - const int nbPoints = 10000; - hdr::DebevecCalibrate calibration; - calibration.process(ldrImageGroups_sorted, channelQuantization, times_sorted, nbPoints, fisheye, calibrationWeight, lambda, response); - - response.exponential(); - response.scale(); - } - break; - - case ECalibrationMethod::ROBERTSON: - { - ALICEVISION_LOG_INFO("Robertson calibration"); - hdr::RobertsonCalibrate calibration(10); - const int nbPoints = 1000000; - calibration.process(ldrImageGroups_sorted, channelQuantization, times_sorted, nbPoints, fisheye, calibrationWeight, response); - response.scale(); - } - break; - - case ECalibrationMethod::GROSSBERG: - { - ALICEVISION_LOG_INFO("Grossberg calibration"); - const int nbPoints = 1000000; - hdr::GrossbergCalibrate calibration(3); - calibration.process(ldrImageGroups_sorted, channelQuantization, times_sorted, nbPoints, fisheye, response); - } - break; + std::vector> images(groupSize); + std::shared_ptr targetView = targetViews[g]; + if (targetView == nullptr) { + ALICEVISION_LOG_ERROR("Null view"); + return EXIT_FAILURE; } - } - if(!outputResponsePath.empty()) - { - std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); - if(fs::is_directory(fs::path(outputResponsePath))) - { - outputResponsePath = (fs::path(outputResponsePath) / (std::string("response_") + methodName + std::string(".csv"))).string(); + /* Load all images of the group */ + for (int i = 0; i < groupSize; i++) { + image::readImage(groupedFilenames[g][i], images[i], image::EImageColorSpace::SRGB); } - response.write(outputResponsePath); - ALICEVISION_LOG_INFO("Camera response function written as " << outputResponsePath); - } - - - for(int g = 0; g < nbGroups; ++g) - { - image::Image HDRimage(ldrImageGroups[g].front().Width(), ldrImageGroups[g].front().Height(), false); + + /**/ hdr::hdrMerge merge; - merge.process(ldrImageGroups_sorted[g], times_sorted[g], fusionWeight, response, HDRimage, targetTimes[g], false, clampedValueCorrection); + float targetTime = targetView->getMetadataShutter(); + image::Image HDRimage(targetView->getWidth(), targetView->getHeight(), false); + merge.process(images, groupedExposures[g], fusionWeight, response, HDRimage, targetTime, false, clampedValueCorrection); + + /*Output image file path*/ + std::string hdr_output_path; + std::stringstream sstream; + sstream << output_path << "/" << "hdr_" << std::setfill('0') << std::setw(4) << ".exr"; - image::writeImage(outputHDRImagesPath[g], HDRimage, image::EImageColorSpace::AUTO, targetMetadatas[g]); + /*Write an image with parameters from the target view*/ + oiio::ParamValueList targetMetadata = image::readImageMetadata(targetView->getImagePath()); + image::writeImage(sstream.str(), HDRimage, image::EImageColorSpace::AUTO, targetMetadata); - ALICEVISION_LOG_INFO("Successfull HDR fusion of " << nbImages << " LDR images centered on " << targets[g]); - ALICEVISION_LOG_INFO("HDR image written as " << outputHDRImagesPath[g]); + targetViews[g]->setImagePath(sstream.str()); + vs[targetViews[g]->getViewId()] = targetViews[g]; } + /* + Save output sfmdata + */ + if (!sfmDataIO::Save(sfmData, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) + { + ALICEVISION_LOG_ERROR("Can not save output sfm file at " << sfmOutputDataFilename); + return EXIT_FAILURE; + } return EXIT_SUCCESS; -} +} \ No newline at end of file From fd99ee21a603dc6422979252d363bf43ba46ea1a Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 08:45:48 +0200 Subject: [PATCH 089/174] less memory usage --- src/aliceVision/hdr/DebevecCalibrate.cpp | 200 +++++++++++------- src/aliceVision/hdr/DebevecCalibrate.hpp | 2 +- src/software/convert/main_convertLDRToHDR.cpp | 12 +- 3 files changed, 130 insertions(+), 84 deletions(-) diff --git a/src/aliceVision/hdr/DebevecCalibrate.cpp b/src/aliceVision/hdr/DebevecCalibrate.cpp index 5c930f0607..b26388a082 100644 --- a/src/aliceVision/hdr/DebevecCalibrate.cpp +++ b/src/aliceVision/hdr/DebevecCalibrate.cpp @@ -18,7 +18,7 @@ namespace hdr { using T = Eigen::Triplet; -void DebevecCalibrate::process(const std::vector< std::vector> & imagePathsGroups, +bool DebevecCalibrate::process(const std::vector< std::vector> & imagePathsGroups, const std::size_t channelQuantization, const std::vector > ×, const int nbPoints, @@ -31,138 +31,176 @@ void DebevecCalibrate::process(const std::vector< std::vector> & im const int nbImages = imagePathsGroups.front().size(); const int samplesPerImage = nbPoints / (nbGroups*nbImages); - //set channels count always RGB - static const std::size_t channels = 3; + /* + Always 3 channels for the input images ... + */ + static const std::size_t channelsCount = 3; //initialize response response = rgbCurve(channelQuantization); - for(unsigned int channel=0; channel tripletList_array[channelsCount]; - std::vector tripletList; + /* Initialize intermediate buffers */ + for(unsigned int channel=0; channel < channelsCount; ++channel) { + Vec & b = b_array[channel]; + b = Vec::Zero(nbPoints + channelQuantization + 1); + std::vector & tripletList = tripletList_array[channel]; tripletList.reserve(2 * nbPoints + 1 + 3 * channelQuantization); + } - ALICEVISION_LOG_TRACE("filling A and b matrices"); + + size_t count = 0; + for (unsigned int g = 0; g < nbGroups; g++) + { + const std::vector &imagePaths = imagePathsGroups[g]; + std::vector> ldrImagesGroup(imagePaths.size()); + + for (int i = 0; i < imagePaths.size(); i++) { + image::readImage(imagePaths[i], ldrImagesGroup[i], image::EImageColorSpace::SRGB); + } - for(unsigned int g=0; g &imagePaths = imagePathsGroups[g]; - std::vector> ldrImagesGroup(imagePaths.size()); + const std::vector & ldrTimes = times[g]; + const std::size_t width = ldrImagesGroup.front().Width(); + const std::size_t height = ldrImagesGroup.front().Height(); - for (int i = 0; i < imagePaths.size(); i++) { - image::readImage(imagePaths[i], ldrImagesGroup[i], image::EImageColorSpace::SRGB); - } + /* + Include the data-fitting equations. + If images are fisheye, we take only pixels inside a disk with a radius of image's minimum side + */ + if(fisheye) + { + const std::size_t minSize = std::min(width, height) * 0.97; + const Vec2i center(width/2, height/2); - const std::vector &ldrTimes = times[g]; - const std::size_t width = ldrImagesGroup.front().Width(); - const std::size_t height = ldrImagesGroup.front().Height(); + const int xMin = std::ceil(center(0) - minSize/2); + const int yMin = std::ceil(center(1) - minSize/2); + const int xMax = std::floor(center(0) + minSize/2); + const int yMax = std::floor(center(1) + minSize/2); + const std::size_t maxDist2 = pow(minSize * 0.5, 2); - // include the data-fitting equations - // if images are fisheye, we take only pixels inside a disk with a radius of image's minimum side - if(fisheye) + const int step = std::ceil(minSize / sqrt(samplesPerImage)); + for(unsigned int j=0; j &image = ldrImagesGroup.at(j); + const float time = std::log(ldrTimes.at(j)); + for(int y = yMin; y < yMax-step; y+=step) { - int countValidPixels = 0; - - const image::Image &image = ldrImagesGroup.at(j); - const float time = std::log(ldrTimes.at(j)); - for(int y = yMin; y < yMax-step; y+=step) + for(int x = xMin; x < xMax-step; x+=step) { - for(int x = xMin; x < xMax-step; x+=step) - { - std::size_t dist2 = pow(center(0)-x, 2) + pow(center(1)-y, 2); - if(dist2 > maxDist2) + std::size_t dist2 = pow(center(0)-x, 2) + pow(center(1)-y, 2); + + /*This looks stupid ...*/ + if(dist2 > maxDist2) { continue; + } + + for (int channel = 0; channel < channelsCount; channel++) { float sample = clamp(image(y, x)(channel), 0.f, 1.f); float w_ij = weight(sample, channel); std::size_t index = std::round(sample * (channelQuantization - 1)); - tripletList.push_back(T(count, index, w_ij)); - tripletList.push_back(T(count, channelQuantization + g*samplesPerImage + countValidPixels, -w_ij)); + tripletList_array[channel].push_back(T(count, index, w_ij)); + tripletList_array[channel].push_back(T(count, channelQuantization + g * samplesPerImage + countValidPixels, -w_ij)); - b(count) = w_ij * time; - count += 1; - countValidPixels += 1; + b_array[channel](count) = w_ij * time; } + + count += 1; + countValidPixels += 1; } } } - else + } + else + { + const int step = std::floor(width*height / samplesPerImage); + for(unsigned int j=0; j &image = ldrImagesGroup.at(j); + const float time = std::log(ldrTimes.at(j)); + + for(unsigned int i=0; i &image = ldrImagesGroup.at(j); - const float time = std::log(ldrTimes.at(j)); - for(unsigned int i=0; i> solver; solver.compute(A); - if(solver.info() != Eigen::Success) return; // decomposition failed - Vec x = solver.solve(b); - if(solver.info() != Eigen::Success) return; // solving failed - ALICEVISION_LOG_TRACE("system solved"); + /*Check solver failure*/ + if (solver.info() != Eigen::Success) { + return false; + } + + Vec x = solver.solve(b_array[channel]); + + /*Check solver failure*/ + if(solver.info() != Eigen::Success) { + return false; + } - double relative_error = (A*x - b).norm() / b.norm(); - ALICEVISION_LOG_DEBUG("relative error is : " << relative_error); + double relative_error = (A*x - b_array[channel]).norm() / b_array[channel].norm(); - for(std::size_t k=0; k> &ldrImageGroups, + bool process(const std::vector> &ldrImageGroups, const std::size_t channelQuantization, const std::vector> ×, const int nbPoints, diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 7b180fd6f7..cbc9ef2100 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -58,6 +58,8 @@ int main(int argc, char * argv[]) { ; po::options_description optionalParams("Optional parameters"); + optionalParams.add_options() + ("expandDynamicRange,e", po::value(&clampedValueCorrection)->default_value(clampedValueCorrection), "float value between 0 and 1 to correct clamped high values in dynamic range: use 0 for no correction, 0.5 for interior lighting and 1 for outdoor lighting."); po::options_description logParams("Log parameters"); logParams.add_options() @@ -234,7 +236,13 @@ int main(int argc, char * argv[]) { calibrationWeight.setTriangular(); hdr::DebevecCalibrate calibration; - calibration.process(groupedFilenames, channelQuantization, groupedExposures, nbPoints, fisheye, calibrationWeight, lambda, response); + if (!calibration.process(groupedFilenames, channelQuantization, groupedExposures, nbPoints, fisheye, calibrationWeight, lambda, response)) { + ALICEVISION_LOG_ERROR("Calibration failed."); + return EXIT_FAILURE; + } + + ALICEVISION_LOG_INFO("Calibration done."); + response.exponential(); response.scale(); @@ -268,7 +276,7 @@ int main(int argc, char * argv[]) { /*Output image file path*/ std::string hdr_output_path; std::stringstream sstream; - sstream << output_path << "/" << "hdr_" << std::setfill('0') << std::setw(4) << ".exr"; + sstream << output_path << "/" << "hdr_" << std::setfill('0') << std::setw(4) << g << ".exr"; /*Write an image with parameters from the target view*/ oiio::ParamValueList targetMetadata = image::readImageMetadata(targetView->getImagePath()); From 633f04698cfc13ca9a72c8e00a699ed78ec5aa4f Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 10:54:24 +0200 Subject: [PATCH 090/174] update to hdr --- src/aliceVision/hdr/DebevecCalibrate.cpp | 84 ++++--------------- src/aliceVision/hdr/hdrMerge.cpp | 45 +--------- src/software/convert/main_convertLDRToHDR.cpp | 2 +- .../pipeline/main_panoramaExternalInfo.cpp | 1 + .../pipeline/main_panoramaStitching.cpp | 82 ++++++++++++------ 5 files changed, 76 insertions(+), 138 deletions(-) diff --git a/src/aliceVision/hdr/DebevecCalibrate.cpp b/src/aliceVision/hdr/DebevecCalibrate.cpp index b26388a082..d1162b6e76 100644 --- a/src/aliceVision/hdr/DebevecCalibrate.cpp +++ b/src/aliceVision/hdr/DebevecCalibrate.cpp @@ -66,81 +66,27 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im const std::size_t width = ldrImagesGroup.front().Width(); const std::size_t height = ldrImagesGroup.front().Height(); - /* - Include the data-fitting equations. - If images are fisheye, we take only pixels inside a disk with a radius of image's minimum side - */ - if(fisheye) + + const int step = std::floor(width*height / samplesPerImage); + for(unsigned int j=0; j &image = ldrImagesGroup.at(j); + const float time = std::log(ldrTimes.at(j)); - const image::Image &image = ldrImagesGroup.at(j); - const float time = std::log(ldrTimes.at(j)); - for(int y = yMin; y < yMax-step; y+=step) - { - for(int x = xMin; x < xMax-step; x+=step) - { - std::size_t dist2 = pow(center(0)-x, 2) + pow(center(1)-y, 2); - - /*This looks stupid ...*/ - if(dist2 > maxDist2) { - continue; - } - - - for (int channel = 0; channel < channelsCount; channel++) { - float sample = clamp(image(y, x)(channel), 0.f, 1.f); - float w_ij = weight(sample, channel); - std::size_t index = std::round(sample * (channelQuantization - 1)); - - tripletList_array[channel].push_back(T(count, index, w_ij)); - tripletList_array[channel].push_back(T(count, channelQuantization + g * samplesPerImage + countValidPixels, -w_ij)); - - b_array[channel](count) = w_ij * time; - } - - count += 1; - countValidPixels += 1; - } - } - } - } - else - { - const int step = std::floor(width*height / samplesPerImage); - for(unsigned int j=0; j &image = ldrImagesGroup.at(j); - const float time = std::log(ldrTimes.at(j)); - - for(unsigned int i=0; i > &imag const std::size_t width = images.front().Width(); const std::size_t height = images.front().Height(); -// //min and max trusted values -// const float minTrustedValue = 0.0f - std::numeric_limits::epsilon(); -// const float maxTrustedValue = 1.0f + std::numeric_limits::epsilon(); - const float maxLum = 1000.0; const float minLum = 0.0001; @@ -52,18 +48,12 @@ void hdrMerge::process(const std::vector< image::Image > &imag //for each pixels image::RGBfColor &radianceColor = radiance(y, x); -// double highValue = images.at(0)(y, x).maxCoeff(); -// double lowValue = images.at(images.size()-1)(y, x).minCoeff(); - for(std::size_t channel = 0; channel < 3; ++channel) { double wsum = 0.0; double wdiv = 0.0; - double highValue = images.at(0)(y, x)(channel); - double lowValue = images.at(images.size()-1)(y, x)(channel); - -// float minTimeSaturation = std::numeric_limits::max(); -// float maxTimeSaturation = std::numeric_limits::min(); + double lowValue = images.at(0)(y, x)(channel); + double highValue = images.at(images.size()-1)(y, x)(channel); for(std::size_t i = 0; i < images.size(); ++i) @@ -86,43 +76,12 @@ void hdrMerge::process(const std::vector< image::Image > &imag if(!robCalibrate && clampedValueCorrection != 0.f) { radianceColor(channel) = (1.0 - clampedHighValue - clampedLowValue) * wsum / std::max(0.001, wdiv) * targetTime + clampedHighValue * maxLum * clampedValueCorrection + clampedLowValue * minLum * clampedValueCorrection; -// radianceColor(channel) = (1.0 - clampedHighValue - clampedLowValue) * wsum / std::max(0.001, wdiv) + clampedHighValue * maxLum * clampedValueCorrection + clampedLowValue * minLum * clampedValueCorrection; } else { radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetTime; -// radianceColor(channel) = wsum / std::max(0.001, wdiv); } - - -// //saturation detection -// if(value > maxTrustedValue) -// { -// minTimeSaturation = std::min(minTimeSaturation, time); -// } -// -// if(value < minTrustedValue) -// { -// maxTimeSaturation = std::max(maxTimeSaturation, time); -// } - -// //saturation correction -// if((wdiv == 0.0f) && -// (maxTimeSaturation > std::numeric_limits::min())) -// { -// wsum = minTrustedValue; -// wdiv = maxTimeSaturation; -// } -//2 -// if((wdiv == 0.0f) && -// (minTimeSaturation < std::numeric_limits::max())) -// { -// wsum = maxTrustedValue; -// wdiv = minTimeSaturation; -// } - } - } } } diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index cbc9ef2100..fc118c6f7e 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -289,7 +289,7 @@ int main(int argc, char * argv[]) { /* Save output sfmdata */ - if (!sfmDataIO::Save(sfmData, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) + if (!sfmDataIO::Save(outputSfm, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) { ALICEVISION_LOG_ERROR("Can not save output sfm file at " << sfmOutputDataFilename); return EXIT_FAILURE; diff --git a/src/software/pipeline/main_panoramaExternalInfo.cpp b/src/software/pipeline/main_panoramaExternalInfo.cpp index e73e512401..59072e1508 100644 --- a/src/software/pipeline/main_panoramaExternalInfo.cpp +++ b/src/software/pipeline/main_panoramaExternalInfo.cpp @@ -153,6 +153,7 @@ int main(int argc, char * argv[]) { if (sfmData.getViews().size() != rotations.size()) { ALICEVISION_LOG_ERROR("The input SfMData has an incorrect number of views."); + ALICEVISION_LOG_ERROR("Input has " << sfmData.getViews().size() << ", xml has " << rotations.size()); return EXIT_FAILURE; } diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 16861d3b5f..4c3f16aa61 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -17,6 +17,9 @@ #include #include +/*IO*/ +#include + // These constants define the current software version. // They must be updated when the command line is changed. #define ALICEVISION_SOFTWARE_VERSION_MAJOR 1 @@ -970,6 +973,25 @@ class Warper { return _real_height; } + void dump(std::ofstream & output) { + output.write((char*)&_offset_x, sizeof(_offset_x)); + output.write((char*)&_offset_y, sizeof(_offset_y)); + output.write((char*)&_real_width, sizeof(_real_width)); + output.write((char*)&_real_height, sizeof(_real_height)); + + char * ptr_color = (char*)(_color.data()); + for (int i = 0; i < _real_height; i++) { + output.write(ptr_color, _real_width * sizeof(decltype(_color)::Scalar)); + ptr_color += _color.rowStride(); + } + + char * ptr_mask = (char*)(_mask.data()); + for (int i = 0; i < _real_height; i++) { + output.write(ptr_mask, _real_width * sizeof(decltype(_mask)::Scalar)); + ptr_mask += _mask.rowStride(); + } + } + protected: size_t _offset_x = 0; size_t _offset_y = 0; @@ -1502,7 +1524,6 @@ int main(int argc, char **argv) { optionalParams.add_options() ("panoramaWidth,w", po::value(&panoramaSize.first)->default_value(panoramaSize.first), "Panorama Width in pixels."); allParams.add(optionalParams); - panoramaSize.second = panoramaSize.first / 2; /** * Setup log level given command line @@ -1564,31 +1585,46 @@ int main(int argc, char **argv) { return EXIT_FAILURE; } + + /*Order views by their image names for easier debugging*/ + std::vector> viewsOrderedByName; + for (auto & viewIt: sfmData.getViews()) { + viewsOrderedByName.push_back(viewIt.second); + } + std::sort(viewsOrderedByName.begin(), viewsOrderedByName.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { + if (a == nullptr || b == nullptr) return true; + return (a->getImagePath() < b->getImagePath()); + }); + + /*If panorama width is undefined, estimate it*/ if (panoramaSize.first <= 0) { std::pair optimalPanoramaSize; if (computeOptimalPanoramaSize(optimalPanoramaSize, sfmData)) { - ALICEVISION_LOG_INFO("Optimal panorama size : " << optimalPanoramaSize.first << "x" << optimalPanoramaSize.second); panoramaSize = optimalPanoramaSize; } } + else { + panoramaSize.second = panoramaSize.first / 2; + } + + ALICEVISION_LOG_INFO("Choosen panorama size : " << panoramaSize.first << "x" << panoramaSize.second); /** * Create compositer */ - AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** * Preprocessing per view */ size_t pos = 0; - for (auto & viewIt: sfmData.getViews()) { - - /*if (pos == 12) */{ + for (const std::shared_ptr & viewIt: viewsOrderedByName) { + /** * Retrieve view */ - const sfmData::View& view = *viewIt.second.get(); + const sfmData::View& view = *viewIt; if (!sfmData.isPoseAndIntrinsicDefined(&view)) { continue; } @@ -1610,46 +1646,42 @@ int main(int argc, char **argv) { /** * Load image and convert it to linear colorspace */ - ALICEVISION_LOG_INFO("Load image\n"); std::string imagePath = view.getImagePath(); + ALICEVISION_LOG_INFO("Load image with path " << imagePath); image::Image source; image::readImage(imagePath, source, image::EImageColorSpace::LINEAR); - + + /** * Warp image */ - ALICEVISION_LOG_INFO("Warp\n"); GaussianWarper warper; warper.warp(map, source); - /*{ - const aliceVision::image::Image & panorama = warper.getColor(); - char filename[512]; - sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); - }*/ + /*std::ofstream out("/home/mmoc/test.dat", std::ios::binary); + warper.dump(out); + out.close();*/ + /** + * Alpha mask + */ AlphaBuilder alphabuilder; alphabuilder.build(map, intrinsic); /** *Composite image into output */ - ALICEVISION_LOG_INFO("Composite\n"); + ALICEVISION_LOG_INFO("Composite to final panorama\n"); compositer.append(warper, alphabuilder); - } pos++; } - { - ALICEVISION_LOG_INFO("Save\n"); - const aliceVision::image::Image & panorama = compositer.getPanorama(); - char filename[512]; - sprintf(filename, "%s.exr", outputPanorama.c_str()); - image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); - } + + ALICEVISION_LOG_INFO("Save final panoram\n"); + const aliceVision::image::Image & panorama = compositer.getPanorama(); + image::writeImage(outputPanorama, panorama, image::EImageColorSpace::SRGB); return EXIT_SUCCESS; } \ No newline at end of file From e68fa670b1fbb83ad9ccf991a57760ff1a9b4e16 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 14:28:25 +0200 Subject: [PATCH 091/174] Revert "update to hdr" This reverts commit 633f04698cfc13ca9a72c8e00a699ed78ec5aa4f. --- src/aliceVision/hdr/DebevecCalibrate.cpp | 84 +++++++++++++++---- src/aliceVision/hdr/hdrMerge.cpp | 45 +++++++++- src/software/convert/main_convertLDRToHDR.cpp | 2 +- .../pipeline/main_panoramaExternalInfo.cpp | 1 - .../pipeline/main_panoramaStitching.cpp | 82 ++++++------------ 5 files changed, 138 insertions(+), 76 deletions(-) diff --git a/src/aliceVision/hdr/DebevecCalibrate.cpp b/src/aliceVision/hdr/DebevecCalibrate.cpp index d1162b6e76..b26388a082 100644 --- a/src/aliceVision/hdr/DebevecCalibrate.cpp +++ b/src/aliceVision/hdr/DebevecCalibrate.cpp @@ -66,27 +66,81 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im const std::size_t width = ldrImagesGroup.front().Width(); const std::size_t height = ldrImagesGroup.front().Height(); - - const int step = std::floor(width*height / samplesPerImage); - for(unsigned int j=0; j &image = ldrImagesGroup.at(j); - const float time = std::log(ldrTimes.at(j)); + const std::size_t minSize = std::min(width, height) * 0.97; + const Vec2i center(width/2, height/2); + + const int xMin = std::ceil(center(0) - minSize/2); + const int yMin = std::ceil(center(1) - minSize/2); + const int xMax = std::floor(center(0) + minSize/2); + const int yMax = std::floor(center(1) + minSize/2); + const std::size_t maxDist2 = pow(minSize * 0.5, 2); + + const int step = std::ceil(minSize / sqrt(samplesPerImage)); + for(unsigned int j=0; j &image = ldrImagesGroup.at(j); + const float time = std::log(ldrTimes.at(j)); + for(int y = yMin; y < yMax-step; y+=step) + { + for(int x = xMin; x < xMax-step; x+=step) + { + std::size_t dist2 = pow(center(0)-x, 2) + pow(center(1)-y, 2); + + /*This looks stupid ...*/ + if(dist2 > maxDist2) { + continue; + } + + + for (int channel = 0; channel < channelsCount; channel++) { + float sample = clamp(image(y, x)(channel), 0.f, 1.f); + float w_ij = weight(sample, channel); + std::size_t index = std::round(sample * (channelQuantization - 1)); + + tripletList_array[channel].push_back(T(count, index, w_ij)); + tripletList_array[channel].push_back(T(count, channelQuantization + g * samplesPerImage + countValidPixels, -w_ij)); + + b_array[channel](count) = w_ij * time; + } + + count += 1; + countValidPixels += 1; + } + } + } + } + else + { + const int step = std::floor(width*height / samplesPerImage); + for(unsigned int j=0; j &image = ldrImagesGroup.at(j); + const float time = std::log(ldrTimes.at(j)); + + for(unsigned int i=0; i > &imag const std::size_t width = images.front().Width(); const std::size_t height = images.front().Height(); +// //min and max trusted values +// const float minTrustedValue = 0.0f - std::numeric_limits::epsilon(); +// const float maxTrustedValue = 1.0f + std::numeric_limits::epsilon(); + const float maxLum = 1000.0; const float minLum = 0.0001; @@ -48,12 +52,18 @@ void hdrMerge::process(const std::vector< image::Image > &imag //for each pixels image::RGBfColor &radianceColor = radiance(y, x); +// double highValue = images.at(0)(y, x).maxCoeff(); +// double lowValue = images.at(images.size()-1)(y, x).minCoeff(); + for(std::size_t channel = 0; channel < 3; ++channel) { double wsum = 0.0; double wdiv = 0.0; - double lowValue = images.at(0)(y, x)(channel); - double highValue = images.at(images.size()-1)(y, x)(channel); + double highValue = images.at(0)(y, x)(channel); + double lowValue = images.at(images.size()-1)(y, x)(channel); + +// float minTimeSaturation = std::numeric_limits::max(); +// float maxTimeSaturation = std::numeric_limits::min(); for(std::size_t i = 0; i < images.size(); ++i) @@ -76,12 +86,43 @@ void hdrMerge::process(const std::vector< image::Image > &imag if(!robCalibrate && clampedValueCorrection != 0.f) { radianceColor(channel) = (1.0 - clampedHighValue - clampedLowValue) * wsum / std::max(0.001, wdiv) * targetTime + clampedHighValue * maxLum * clampedValueCorrection + clampedLowValue * minLum * clampedValueCorrection; +// radianceColor(channel) = (1.0 - clampedHighValue - clampedLowValue) * wsum / std::max(0.001, wdiv) + clampedHighValue * maxLum * clampedValueCorrection + clampedLowValue * minLum * clampedValueCorrection; } else { radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetTime; +// radianceColor(channel) = wsum / std::max(0.001, wdiv); } + + +// //saturation detection +// if(value > maxTrustedValue) +// { +// minTimeSaturation = std::min(minTimeSaturation, time); +// } +// +// if(value < minTrustedValue) +// { +// maxTimeSaturation = std::max(maxTimeSaturation, time); +// } + +// //saturation correction +// if((wdiv == 0.0f) && +// (maxTimeSaturation > std::numeric_limits::min())) +// { +// wsum = minTrustedValue; +// wdiv = maxTimeSaturation; +// } +//2 +// if((wdiv == 0.0f) && +// (minTimeSaturation < std::numeric_limits::max())) +// { +// wsum = maxTrustedValue; +// wdiv = minTimeSaturation; +// } + } + } } } diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index fc118c6f7e..cbc9ef2100 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -289,7 +289,7 @@ int main(int argc, char * argv[]) { /* Save output sfmdata */ - if (!sfmDataIO::Save(outputSfm, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) + if (!sfmDataIO::Save(sfmData, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) { ALICEVISION_LOG_ERROR("Can not save output sfm file at " << sfmOutputDataFilename); return EXIT_FAILURE; diff --git a/src/software/pipeline/main_panoramaExternalInfo.cpp b/src/software/pipeline/main_panoramaExternalInfo.cpp index 59072e1508..e73e512401 100644 --- a/src/software/pipeline/main_panoramaExternalInfo.cpp +++ b/src/software/pipeline/main_panoramaExternalInfo.cpp @@ -153,7 +153,6 @@ int main(int argc, char * argv[]) { if (sfmData.getViews().size() != rotations.size()) { ALICEVISION_LOG_ERROR("The input SfMData has an incorrect number of views."); - ALICEVISION_LOG_ERROR("Input has " << sfmData.getViews().size() << ", xml has " << rotations.size()); return EXIT_FAILURE; } diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 4c3f16aa61..16861d3b5f 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -17,9 +17,6 @@ #include #include -/*IO*/ -#include - // These constants define the current software version. // They must be updated when the command line is changed. #define ALICEVISION_SOFTWARE_VERSION_MAJOR 1 @@ -973,25 +970,6 @@ class Warper { return _real_height; } - void dump(std::ofstream & output) { - output.write((char*)&_offset_x, sizeof(_offset_x)); - output.write((char*)&_offset_y, sizeof(_offset_y)); - output.write((char*)&_real_width, sizeof(_real_width)); - output.write((char*)&_real_height, sizeof(_real_height)); - - char * ptr_color = (char*)(_color.data()); - for (int i = 0; i < _real_height; i++) { - output.write(ptr_color, _real_width * sizeof(decltype(_color)::Scalar)); - ptr_color += _color.rowStride(); - } - - char * ptr_mask = (char*)(_mask.data()); - for (int i = 0; i < _real_height; i++) { - output.write(ptr_mask, _real_width * sizeof(decltype(_mask)::Scalar)); - ptr_mask += _mask.rowStride(); - } - } - protected: size_t _offset_x = 0; size_t _offset_y = 0; @@ -1524,6 +1502,7 @@ int main(int argc, char **argv) { optionalParams.add_options() ("panoramaWidth,w", po::value(&panoramaSize.first)->default_value(panoramaSize.first), "Panorama Width in pixels."); allParams.add(optionalParams); + panoramaSize.second = panoramaSize.first / 2; /** * Setup log level given command line @@ -1585,46 +1564,31 @@ int main(int argc, char **argv) { return EXIT_FAILURE; } - - /*Order views by their image names for easier debugging*/ - std::vector> viewsOrderedByName; - for (auto & viewIt: sfmData.getViews()) { - viewsOrderedByName.push_back(viewIt.second); - } - std::sort(viewsOrderedByName.begin(), viewsOrderedByName.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { - if (a == nullptr || b == nullptr) return true; - return (a->getImagePath() < b->getImagePath()); - }); - - /*If panorama width is undefined, estimate it*/ if (panoramaSize.first <= 0) { std::pair optimalPanoramaSize; if (computeOptimalPanoramaSize(optimalPanoramaSize, sfmData)) { + ALICEVISION_LOG_INFO("Optimal panorama size : " << optimalPanoramaSize.first << "x" << optimalPanoramaSize.second); panoramaSize = optimalPanoramaSize; } } - else { - panoramaSize.second = panoramaSize.first / 2; - } - - ALICEVISION_LOG_INFO("Choosen panorama size : " << panoramaSize.first << "x" << panoramaSize.second); /** * Create compositer */ - LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** * Preprocessing per view */ size_t pos = 0; - for (const std::shared_ptr & viewIt: viewsOrderedByName) { - + for (auto & viewIt: sfmData.getViews()) { + + /*if (pos == 12) */{ /** * Retrieve view */ - const sfmData::View& view = *viewIt; + const sfmData::View& view = *viewIt.second.get(); if (!sfmData.isPoseAndIntrinsicDefined(&view)) { continue; } @@ -1646,42 +1610,46 @@ int main(int argc, char **argv) { /** * Load image and convert it to linear colorspace */ + ALICEVISION_LOG_INFO("Load image\n"); std::string imagePath = view.getImagePath(); - ALICEVISION_LOG_INFO("Load image with path " << imagePath); image::Image source; image::readImage(imagePath, source, image::EImageColorSpace::LINEAR); - - + /** * Warp image */ + ALICEVISION_LOG_INFO("Warp\n"); GaussianWarper warper; warper.warp(map, source); - /*std::ofstream out("/home/mmoc/test.dat", std::ios::binary); - warper.dump(out); - out.close();*/ + /*{ + const aliceVision::image::Image & panorama = warper.getColor(); + char filename[512]; + sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); + image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); + }*/ - /** - * Alpha mask - */ AlphaBuilder alphabuilder; alphabuilder.build(map, intrinsic); /** *Composite image into output */ - ALICEVISION_LOG_INFO("Composite to final panorama\n"); + ALICEVISION_LOG_INFO("Composite\n"); compositer.append(warper, alphabuilder); + } pos++; } - - ALICEVISION_LOG_INFO("Save final panoram\n"); - const aliceVision::image::Image & panorama = compositer.getPanorama(); - image::writeImage(outputPanorama, panorama, image::EImageColorSpace::SRGB); + { + ALICEVISION_LOG_INFO("Save\n"); + const aliceVision::image::Image & panorama = compositer.getPanorama(); + char filename[512]; + sprintf(filename, "%s.exr", outputPanorama.c_str()); + image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); + } return EXIT_SUCCESS; } \ No newline at end of file From 18704547c7c38bad1ff2a08662ec1228f9519048 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 14:30:02 +0200 Subject: [PATCH 092/174] Correct an error with the output sfm --- src/software/convert/main_convertLDRToHDR.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index cbc9ef2100..fc118c6f7e 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -289,7 +289,7 @@ int main(int argc, char * argv[]) { /* Save output sfmdata */ - if (!sfmDataIO::Save(sfmData, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) + if (!sfmDataIO::Save(outputSfm, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) { ALICEVISION_LOG_ERROR("Can not save output sfm file at " << sfmOutputDataFilename); return EXIT_FAILURE; From 95bb84579c7a30d3ac49a10d7c14707e6566ce38 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 14:32:04 +0200 Subject: [PATCH 093/174] Some bug in stitching --- .../pipeline/main_panoramaStitching.cpp | 82 +++++++++++++------ 1 file changed, 57 insertions(+), 25 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 16861d3b5f..4c3f16aa61 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -17,6 +17,9 @@ #include #include +/*IO*/ +#include + // These constants define the current software version. // They must be updated when the command line is changed. #define ALICEVISION_SOFTWARE_VERSION_MAJOR 1 @@ -970,6 +973,25 @@ class Warper { return _real_height; } + void dump(std::ofstream & output) { + output.write((char*)&_offset_x, sizeof(_offset_x)); + output.write((char*)&_offset_y, sizeof(_offset_y)); + output.write((char*)&_real_width, sizeof(_real_width)); + output.write((char*)&_real_height, sizeof(_real_height)); + + char * ptr_color = (char*)(_color.data()); + for (int i = 0; i < _real_height; i++) { + output.write(ptr_color, _real_width * sizeof(decltype(_color)::Scalar)); + ptr_color += _color.rowStride(); + } + + char * ptr_mask = (char*)(_mask.data()); + for (int i = 0; i < _real_height; i++) { + output.write(ptr_mask, _real_width * sizeof(decltype(_mask)::Scalar)); + ptr_mask += _mask.rowStride(); + } + } + protected: size_t _offset_x = 0; size_t _offset_y = 0; @@ -1502,7 +1524,6 @@ int main(int argc, char **argv) { optionalParams.add_options() ("panoramaWidth,w", po::value(&panoramaSize.first)->default_value(panoramaSize.first), "Panorama Width in pixels."); allParams.add(optionalParams); - panoramaSize.second = panoramaSize.first / 2; /** * Setup log level given command line @@ -1564,31 +1585,46 @@ int main(int argc, char **argv) { return EXIT_FAILURE; } + + /*Order views by their image names for easier debugging*/ + std::vector> viewsOrderedByName; + for (auto & viewIt: sfmData.getViews()) { + viewsOrderedByName.push_back(viewIt.second); + } + std::sort(viewsOrderedByName.begin(), viewsOrderedByName.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { + if (a == nullptr || b == nullptr) return true; + return (a->getImagePath() < b->getImagePath()); + }); + + /*If panorama width is undefined, estimate it*/ if (panoramaSize.first <= 0) { std::pair optimalPanoramaSize; if (computeOptimalPanoramaSize(optimalPanoramaSize, sfmData)) { - ALICEVISION_LOG_INFO("Optimal panorama size : " << optimalPanoramaSize.first << "x" << optimalPanoramaSize.second); panoramaSize = optimalPanoramaSize; } } + else { + panoramaSize.second = panoramaSize.first / 2; + } + + ALICEVISION_LOG_INFO("Choosen panorama size : " << panoramaSize.first << "x" << panoramaSize.second); /** * Create compositer */ - AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** * Preprocessing per view */ size_t pos = 0; - for (auto & viewIt: sfmData.getViews()) { - - /*if (pos == 12) */{ + for (const std::shared_ptr & viewIt: viewsOrderedByName) { + /** * Retrieve view */ - const sfmData::View& view = *viewIt.second.get(); + const sfmData::View& view = *viewIt; if (!sfmData.isPoseAndIntrinsicDefined(&view)) { continue; } @@ -1610,46 +1646,42 @@ int main(int argc, char **argv) { /** * Load image and convert it to linear colorspace */ - ALICEVISION_LOG_INFO("Load image\n"); std::string imagePath = view.getImagePath(); + ALICEVISION_LOG_INFO("Load image with path " << imagePath); image::Image source; image::readImage(imagePath, source, image::EImageColorSpace::LINEAR); - + + /** * Warp image */ - ALICEVISION_LOG_INFO("Warp\n"); GaussianWarper warper; warper.warp(map, source); - /*{ - const aliceVision::image::Image & panorama = warper.getColor(); - char filename[512]; - sprintf(filename, "%s_source_%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); - }*/ + /*std::ofstream out("/home/mmoc/test.dat", std::ios::binary); + warper.dump(out); + out.close();*/ + /** + * Alpha mask + */ AlphaBuilder alphabuilder; alphabuilder.build(map, intrinsic); /** *Composite image into output */ - ALICEVISION_LOG_INFO("Composite\n"); + ALICEVISION_LOG_INFO("Composite to final panorama\n"); compositer.append(warper, alphabuilder); - } pos++; } - { - ALICEVISION_LOG_INFO("Save\n"); - const aliceVision::image::Image & panorama = compositer.getPanorama(); - char filename[512]; - sprintf(filename, "%s.exr", outputPanorama.c_str()); - image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); - } + + ALICEVISION_LOG_INFO("Save final panoram\n"); + const aliceVision::image::Image & panorama = compositer.getPanorama(); + image::writeImage(outputPanorama, panorama, image::EImageColorSpace::SRGB); return EXIT_SUCCESS; } \ No newline at end of file From 802571c2f0182ef1bc5a4a30d5ff0818cbb265f9 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 14:33:41 +0200 Subject: [PATCH 094/174] remove commented code --- src/aliceVision/hdr/hdrMerge.cpp | 40 -------------------------------- 1 file changed, 40 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index c7c330c7d0..3553a20b83 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -37,10 +37,6 @@ void hdrMerge::process(const std::vector< image::Image > &imag const std::size_t width = images.front().Width(); const std::size_t height = images.front().Height(); -// //min and max trusted values -// const float minTrustedValue = 0.0f - std::numeric_limits::epsilon(); -// const float maxTrustedValue = 1.0f + std::numeric_limits::epsilon(); - const float maxLum = 1000.0; const float minLum = 0.0001; @@ -52,9 +48,6 @@ void hdrMerge::process(const std::vector< image::Image > &imag //for each pixels image::RGBfColor &radianceColor = radiance(y, x); -// double highValue = images.at(0)(y, x).maxCoeff(); -// double lowValue = images.at(images.size()-1)(y, x).minCoeff(); - for(std::size_t channel = 0; channel < 3; ++channel) { double wsum = 0.0; @@ -62,9 +55,6 @@ void hdrMerge::process(const std::vector< image::Image > &imag double highValue = images.at(0)(y, x)(channel); double lowValue = images.at(images.size()-1)(y, x)(channel); -// float minTimeSaturation = std::numeric_limits::max(); -// float maxTimeSaturation = std::numeric_limits::min(); - for(std::size_t i = 0; i < images.size(); ++i) { @@ -86,41 +76,11 @@ void hdrMerge::process(const std::vector< image::Image > &imag if(!robCalibrate && clampedValueCorrection != 0.f) { radianceColor(channel) = (1.0 - clampedHighValue - clampedLowValue) * wsum / std::max(0.001, wdiv) * targetTime + clampedHighValue * maxLum * clampedValueCorrection + clampedLowValue * minLum * clampedValueCorrection; -// radianceColor(channel) = (1.0 - clampedHighValue - clampedLowValue) * wsum / std::max(0.001, wdiv) + clampedHighValue * maxLum * clampedValueCorrection + clampedLowValue * minLum * clampedValueCorrection; } else { radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetTime; -// radianceColor(channel) = wsum / std::max(0.001, wdiv); } - - -// //saturation detection -// if(value > maxTrustedValue) -// { -// minTimeSaturation = std::min(minTimeSaturation, time); -// } -// -// if(value < minTrustedValue) -// { -// maxTimeSaturation = std::max(maxTimeSaturation, time); -// } - -// //saturation correction -// if((wdiv == 0.0f) && -// (maxTimeSaturation > std::numeric_limits::min())) -// { -// wsum = minTrustedValue; -// wdiv = maxTimeSaturation; -// } -//2 -// if((wdiv == 0.0f) && -// (minTimeSaturation < std::numeric_limits::max())) -// { -// wsum = maxTrustedValue; -// wdiv = minTimeSaturation; -// } - } } From a028475356ce739053310290e48d7a4be052ef71 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 14:57:40 +0200 Subject: [PATCH 095/174] Replace laplacian --- src/software/pipeline/main_panoramaStitching.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 4c3f16aa61..a9bd3d30fd 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -1613,7 +1613,7 @@ int main(int argc, char **argv) { /** * Create compositer */ - LaplacianCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); /** * Preprocessing per view From fd795d1733b1cd6c273eb6f318c5f2d865c7be2d Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 16:11:57 +0200 Subject: [PATCH 096/174] Handle rotated images for unknown reason --- src/software/convert/main_convertLDRToHDR.cpp | 101 ++++++++++++++++-- 1 file changed, 95 insertions(+), 6 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index fc118c6f7e..e19987eca4 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -109,8 +109,12 @@ int main(int argc, char * argv[]) { return EXIT_FAILURE; } + /*Analyze path*/ + boost::filesystem::path path(sfmOutputDataFilename); + std::string output_path = path.parent_path().string(); + /** - * Update sfm accordingly + * Read sfm data */ sfmData::SfMData sfmData; if(!sfmDataIO::Load(sfmData, sfmInputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS | sfmDataIO::INTRINSICS))) { @@ -132,23 +136,108 @@ int main(int argc, char * argv[]) { size_t countGroups = countImages / groupSize; /*Make sure there is only one kind of image in dataset*/ - if (sfmData.getIntrinsics().size() != 1) { + if (sfmData.getIntrinsics().size() > 2) { ALICEVISION_LOG_ERROR("Multiple intrinsics : Different kind of images in dataset"); return EXIT_FAILURE; } - /*Order views by their image names*/ + /*If two intrinsics, may be some images are simply rotated*/ + if (sfmData.getIntrinsics().size() == 2) { + const sfmData::Intrinsics & intrinsics = sfmData.getIntrinsics(); + + unsigned int w = intrinsics.begin()->second->w(); + unsigned int h = intrinsics.begin()->second->h(); + unsigned int rw = intrinsics.rbegin()->second->w(); + unsigned int rh = intrinsics.rbegin()->second->h(); + + if (w != rh || h != rw) { + ALICEVISION_LOG_ERROR("Multiple intrinsics : Different kind of images in dataset"); + return EXIT_FAILURE; + } + + IndexT firstId = intrinsics.begin()->first; + IndexT secondId = intrinsics.rbegin()->first; + + size_t first = 0; + size_t second = 0; + sfmData::Views & views = sfmData.getViews(); + for (const auto & v : views) { + if (v.second->getIntrinsicId() == firstId) { + first++; + } + else { + second++; + } + } + + /* Set all view with the majority intrinsics */ + if (first > second) { + for (const auto & v : views) { + v.second->setIntrinsicId(firstId); + } + + sfmData.getIntrinsics().erase(secondId); + } + else { + for (const auto & v : views) { + v.second->setIntrinsicId(secondId); + } + + sfmData.getIntrinsics().erase(firstId); + } + } + + /* Rotate needed images */ + { + const sfmData::Intrinsics & intrinsics = sfmData.getIntrinsics(); + unsigned int w = intrinsics.begin()->second->w(); + unsigned int h = intrinsics.begin()->second->h(); + + sfmData::Views & views = sfmData.getViews(); + for (auto & v : views) { + if (v.second->getWidth() == h && v.second->getHeight() == w) { + ALICEVISION_LOG_INFO("Create intermediate rotated image !"); + + /*Read original image*/ + image::Image originalImage; + image::readImage(v.second->getImagePath(), originalImage, image::EImageColorSpace::SRGB); + + /*Create a rotated image*/ + image::Image rotated(w, h); + for (int k = 0; k < h; k++) { + for (int l = 0; l < w; l++) { + rotated(k, l) = originalImage(l, rotated.Height() - 1 - k); + } + } + + boost::filesystem::path old_path(v.second->getImagePath()); + std::string old_filename = old_path.stem().string(); + + /*Save this image*/ + std::stringstream sstream; + sstream << output_path << "/" << old_filename << ".exr"; + oiio::ParamValueList metadata = image::readImageMetadata(v.second->getImagePath()); + image::writeImage(sstream.str(), rotated, image::EImageColorSpace::AUTO, metadata); + + v.second->setImagePath(sstream.str()); + } + } + } + + /*Order views by their image names (without path and extension to make sure we handle rotated images) */ std::vector> viewsOrderedByName; for (auto & viewIt: sfmData.getViews()) { viewsOrderedByName.push_back(viewIt.second); } std::sort(viewsOrderedByName.begin(), viewsOrderedByName.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { if (a == nullptr || b == nullptr) return true; - return (a->getImagePath() < b->getImagePath()); + + boost::filesystem::path path_a(a->getImagePath()); + boost::filesystem::path path_b(b->getImagePath()); + + return (path_a.stem().string() < path_b.stem().string()); }); - boost::filesystem::path path(sfmOutputDataFilename); - std::string output_path = path.parent_path().string(); /* From 5d748a1b2e202a9516b8951517b08c7cd5600a8e Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 25 Oct 2019 16:45:13 +0200 Subject: [PATCH 097/174] forgot to update image size after rotation --- src/aliceVision/hdr/DebevecCalibrate.cpp | 2 ++ src/software/convert/main_convertLDRToHDR.cpp | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/src/aliceVision/hdr/DebevecCalibrate.cpp b/src/aliceVision/hdr/DebevecCalibrate.cpp index b26388a082..c1ebc3b06d 100644 --- a/src/aliceVision/hdr/DebevecCalibrate.cpp +++ b/src/aliceVision/hdr/DebevecCalibrate.cpp @@ -55,10 +55,12 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im size_t count = 0; for (unsigned int g = 0; g < nbGroups; g++) { + const std::vector &imagePaths = imagePathsGroups[g]; std::vector> ldrImagesGroup(imagePaths.size()); for (int i = 0; i < imagePaths.size(); i++) { + ALICEVISION_LOG_INFO("Load " << imagePaths[i]); image::readImage(imagePaths[i], ldrImagesGroup[i], image::EImageColorSpace::SRGB); } diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index e19987eca4..58bcdf4124 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -219,6 +219,9 @@ int main(int argc, char * argv[]) { oiio::ParamValueList metadata = image::readImageMetadata(v.second->getImagePath()); image::writeImage(sstream.str(), rotated, image::EImageColorSpace::AUTO, metadata); + /*Update view for this modification*/ + v.second->setWidth(w); + v.second->setHeight(h); v.second->setImagePath(sstream.str()); } } @@ -351,6 +354,7 @@ int main(int argc, char * argv[]) { /* Load all images of the group */ for (int i = 0; i < groupSize; i++) { + ALICEVISION_LOG_INFO("Load " << groupedFilenames[g][i]); image::readImage(groupedFilenames[g][i], images[i], image::EImageColorSpace::SRGB); } From 3547e8361fc71cc2c322bf0f77ef9f581d166931 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 1 Nov 2019 13:36:00 +0100 Subject: [PATCH 098/174] [software] stitching: minor change for openimageio 1.8.12 Do we need an ifdef for other version? --- src/software/pipeline/main_panoramaStitching.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index a9bd3d30fd..796b0e2ee2 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -377,7 +377,8 @@ class GaussianPyramidNoMask { /** * Kernel */ - oiio::ImageBuf K = oiio::ImageBufAlgo::make_kernel("gaussian", 5, 5); + oiio::ImageBuf K; + oiio::ImageBufAlgo::make_kernel(K, "gaussian", 5, 5); /** * Build pyramid From 0f6ee909e9d32b5b4c6980992ac6bf6c94e31e2b Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 1 Nov 2019 13:37:11 +0100 Subject: [PATCH 099/174] [software] stitching: windows build compatibility (disable openmp on windows) --- src/software/pipeline/main_panoramaStitching.cpp | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 796b0e2ee2..ed530be516 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -20,6 +20,9 @@ /*IO*/ #include +#include + + // These constants define the current software version. // They must be updated when the command line is changed. #define ALICEVISION_SOFTWARE_VERSION_MAJOR 1 @@ -469,8 +472,13 @@ class CoordinatesMap { size_t max_y = 0; size_t min_x = panoramaSize.first; size_t min_y = panoramaSize.second; - + +#ifdef _MSC_VER + // TODO + // no support for reduction min in MSVC implementation of openmp +#else #pragma omp parallel for reduction(min: min_x, min_y) reduction(max: max_x, max_y) +#endif for (size_t y = 0; y < coarse_bbox.height; y++) { size_t cy = y + coarse_bbox.top; From b2444a4af06fa5f47c8f7998261d71f5e49f5c6a Mon Sep 17 00:00:00 2001 From: fabienservant Date: Mon, 4 Nov 2019 14:08:54 +0100 Subject: [PATCH 100/174] remove colorspace error when rotating --- src/software/convert/main_convertLDRToHDR.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 58bcdf4124..d1f4f38634 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -200,7 +200,7 @@ int main(int argc, char * argv[]) { /*Read original image*/ image::Image originalImage; - image::readImage(v.second->getImagePath(), originalImage, image::EImageColorSpace::SRGB); + image::readImage(v.second->getImagePath(), originalImage, image::EImageColorSpace::LINEAR); /*Create a rotated image*/ image::Image rotated(w, h); From 197a3412c4ac27e88a3fd8897d1bf9616fe8d9cc Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 6 Nov 2019 10:41:12 +0100 Subject: [PATCH 101/174] add rotation prior on sfmdata --- .../ReconstructionEngine_panorama.cpp | 31 +++++++++++++ src/aliceVision/sfmData/RotationPrior.hpp | 43 +++++++++++++++++++ src/aliceVision/sfmData/SfMData.hpp | 13 ++++++ 3 files changed, 87 insertions(+) create mode 100644 src/aliceVision/sfmData/RotationPrior.hpp diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 15ee1aa44a..4578ecd0ec 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -463,6 +463,37 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging /// pairwise view relation between poseIds typedef std::map PoseWiseMatches; + sfmData::RotationPriors & rotationpriors = _sfmData.getRotationPriors(); + for (auto & iter_v1 :_sfmData.getViews()) { + + if (!_sfmData.isPoseAndIntrinsicDefined(iter_v1.first)) { + continue; + } + + for (auto & iter_v2 :_sfmData.getViews()) { + if (iter_v1.first == iter_v2.first) { + continue; + } + + if (!_sfmData.isPoseAndIntrinsicDefined(iter_v2.first)) { + continue; + } + + IndexT pid1 = iter_v1.second->getPoseId(); + IndexT pid2 = iter_v2.second->getPoseId(); + + CameraPose oneTo = _sfmData.getAbsolutePose(iter_v1.second->getPoseId()); + CameraPose twoTo = _sfmData.getAbsolutePose(iter_v2.second->getPoseId()); + Eigen::Matrix3d oneRo = oneTo.getTransform().rotation(); + Eigen::Matrix3d twoRo = twoTo.getTransform().rotation(); + Eigen::Matrix3d twoRone = twoRo * oneRo.transpose(); + + sfmData::RotationPrior prior(iter_v1.first, iter_v2.first, twoRone); + rotationpriors.push_back(prior); + } + } + + // List shared correspondences (pairs) between poses PoseWiseMatches poseWiseMatches; for (matching::PairwiseMatches::const_iterator iterMatches = _pairwiseMatches->begin(); iterMatches != _pairwiseMatches->end(); ++iterMatches) diff --git a/src/aliceVision/sfmData/RotationPrior.hpp b/src/aliceVision/sfmData/RotationPrior.hpp new file mode 100644 index 0000000000..9fe8573ac3 --- /dev/null +++ b/src/aliceVision/sfmData/RotationPrior.hpp @@ -0,0 +1,43 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2016 AliceVision contributors. +// Copyright (c) 2012 openMVG contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#pragma once + +#include + +namespace aliceVision { +namespace sfmData { + +/** + * @brief RotationPrior is an information prior about the rotation. + */ +struct RotationPrior +{ + RotationPrior() = default; + + RotationPrior(IndexT view_first = UndefinedIndexT, + IndexT view_second = UndefinedIndexT, + Eigen::Matrix3d second_R_first = Eigen::Matrix3d::Identity()) + : ViewFirst(view_first), + ViewSecond(view_second), + _second_R_first(second_R_first) + {} + + IndexT ViewFirst; + IndexT ViewSecond; + Eigen::Matrix3d _second_R_first; + + bool operator==(const RotationPrior& other) const + { + return (ViewFirst == other.ViewFirst) && + (ViewSecond == other.ViewSecond) && + (_second_R_first == other._second_R_first); + } +}; + +} // namespace sfmData +} // namespace aliceVision diff --git a/src/aliceVision/sfmData/SfMData.hpp b/src/aliceVision/sfmData/SfMData.hpp index ea73be7dd6..9a3ef37700 100644 --- a/src/aliceVision/sfmData/SfMData.hpp +++ b/src/aliceVision/sfmData/SfMData.hpp @@ -10,6 +10,7 @@ #include #include #include +#include #include #include #include @@ -45,6 +46,9 @@ using LandmarksUncertainty = HashMap; ///Define a collection of constraints using Constraints2D = std::vector; +///Define a collection of rotation priors +using RotationPriors = std::vector; + /** * @brief SfMData container * Store structure and camera properties @@ -66,6 +70,8 @@ class SfMData LandmarksUncertainty _landmarksUncertainty; /// 2D Constraints Constraints2D constraints2d; + /// Rotation priors + RotationPriors rotationpriors; // Operators @@ -115,6 +121,13 @@ class SfMData const Constraints2D& getConstraints2D() const {return constraints2d;} Constraints2D& getConstraints2D() {return constraints2d;} + /** + * @brief Get RotationPriors + * @return RotationPriors + */ + const RotationPriors& getRotationPriors() const {return rotationpriors;} + RotationPriors& getRotationPriors() {return rotationpriors;} + /** * @brief Get control points * @return control points From 861c1af3cf23d7145b242b9629181e51a7b0b97e Mon Sep 17 00:00:00 2001 From: fabienservant Date: Thu, 7 Nov 2019 08:28:57 +0100 Subject: [PATCH 102/174] rotation constraints --- src/aliceVision/sfm/BundleAdjustmentCeres.cpp | 31 +++++++++- src/aliceVision/sfm/BundleAdjustmentCeres.hpp | 8 +++ .../sfm/ResidualErrorConstraintFunctor.hpp | 2 +- .../sfm/ResidualErrorRotationPriorFunctor.hpp | 60 +++++++++++++++++++ 4 files changed, 98 insertions(+), 3 deletions(-) create mode 100644 src/aliceVision/sfm/ResidualErrorRotationPriorFunctor.hpp diff --git a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp index 7cd978226e..7fd77f1f40 100644 --- a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp +++ b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp @@ -8,6 +8,7 @@ #include #include #include +#include #include #include #include @@ -271,7 +272,8 @@ void BundleAdjustmentCeres::setSolverOptions(ceres::Solver::Options& solverOptio solverOptions.minimizer_progress_to_stdout = _ceresOptions.verbose; solverOptions.logging_type = ceres::SILENT; solverOptions.num_threads = _ceresOptions.nbThreads; - + + #if CERES_VERSION_MAJOR < 2 solverOptions.num_linear_solver_threads = _ceresOptions.nbThreads; #endif @@ -632,6 +634,28 @@ void BundleAdjustmentCeres::addConstraints2DToProblem(const sfmData::SfMData& sf } } +void BundleAdjustmentCeres::addRotationPriorsToProblem(const sfmData::SfMData& sfmData, ERefineOptions refineOptions, ceres::Problem& problem) +{ + // set a LossFunction to be less penalized by false measurements. + // note: set it to NULL if you don't want use a lossFunction. + ceres::LossFunction* lossFunction = nullptr; + + for (const auto & prior : sfmData.getRotationPriors()) { + const sfmData::View& view_1 = sfmData.getView(prior.ViewFirst); + const sfmData::View& view_2 = sfmData.getView(prior.ViewSecond); + + assert(getPoseState(view_1.getPoseId()) != EParameterState::IGNORED); + assert(getPoseState(view_2.getPoseId()) != EParameterState::IGNORED); + + double * poseBlockPtr_1 = _posesBlocks.at(view_1.getPoseId()).data(); + double * poseBlockPtr_2 = _posesBlocks.at(view_2.getPoseId()).data(); + + + ceres::CostFunction* costFunction = new ceres::AutoDiffCostFunction(new ResidualErrorRotationPriorFunctor(prior._second_R_first)); + problem.AddResidualBlock(costFunction, lossFunction, poseBlockPtr_1, poseBlockPtr_2); + } +} + void BundleAdjustmentCeres::createProblem(const sfmData::SfMData& sfmData, ERefineOptions refineOptions, ceres::Problem& problem) @@ -654,6 +678,9 @@ void BundleAdjustmentCeres::createProblem(const sfmData::SfMData& sfmData, // add 2D constraints to the Ceres problem addConstraints2DToProblem(sfmData, refineOptions, problem); + + // add rotation priors to the Ceres problem + addRotationPriorsToProblem(sfmData, refineOptions, problem); } void BundleAdjustmentCeres::updateFromSolution(sfmData::SfMData& sfmData, ERefineOptions refineOptions) const @@ -769,7 +796,7 @@ bool BundleAdjustmentCeres::adjust(sfmData::SfMData& sfmData, ERefineOptions ref setSolverOptions(options); // solve BA - ceres::Solver::Summary summary; + ceres::Solver::Summary summary; ceres::Solve(options, &problem, &summary); // print summary diff --git a/src/aliceVision/sfm/BundleAdjustmentCeres.hpp b/src/aliceVision/sfm/BundleAdjustmentCeres.hpp index a4c5d89bf5..1708425e20 100644 --- a/src/aliceVision/sfm/BundleAdjustmentCeres.hpp +++ b/src/aliceVision/sfm/BundleAdjustmentCeres.hpp @@ -214,6 +214,14 @@ class BundleAdjustmentCeres : public BundleAdjustment */ void addConstraints2DToProblem(const sfmData::SfMData& sfmData, ERefineOptions refineOptions, ceres::Problem& problem); + /** + * @brief Create a residual block for each rotation priors + * @param[in] sfmData The input SfMData contains all the information about the reconstruction, notably the intrinsics + * @param[in] refineOptions The chosen refine flag + * @param[out] problem The Ceres bundle adjustement problem + */ + void addRotationPriorsToProblem(const sfmData::SfMData& sfmData, ERefineOptions refineOptions, ceres::Problem& problem); + /** * @brief Create the Ceres bundle adjustement problem with: * - extrincics and intrinsics parameters blocks. diff --git a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp index 7c94233d36..c4bde80cfc 100644 --- a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp +++ b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp @@ -221,7 +221,7 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 { Eigen::Matrix oneRo, twoRo, twoRone; - Eigen::Matrix< T, 3, 1> pt3d_1; + Eigen::Matrix pt3d_1; //From pixel to meters lift(cam_K, m_pos_2dpoint_first, pt3d_1); diff --git a/src/aliceVision/sfm/ResidualErrorRotationPriorFunctor.hpp b/src/aliceVision/sfm/ResidualErrorRotationPriorFunctor.hpp new file mode 100644 index 0000000000..f54c613bca --- /dev/null +++ b/src/aliceVision/sfm/ResidualErrorRotationPriorFunctor.hpp @@ -0,0 +1,60 @@ +#pragma once + +#include +#include + + + +namespace aliceVision { +namespace sfm { + +/** + * @brief Ceres functor to use a pair of pinhole on a pure rotation 2D constraint. + * + * Data parameter blocks are the following <3,6,6> + * - 3 => dimension of the residuals, + * - 6 => the camera extrinsic data block for the first view + * - 6 => the camera extrinsic data block for the second view + */ +struct ResidualErrorRotationPriorFunctor +{ + ResidualErrorRotationPriorFunctor(const Eigen::Matrix3d & two_R_one) + : m_two_R_one(two_R_one) + { + } + + + /** + * @param[in] cam_Rt: Camera parameterized using one block of 6 parameters [R;t]: + * - 3 for rotation(angle axis), 3 for translation + * @param[in] pos_3dpoint + * @param[out] out_residuals + */ + template + bool operator()( + const T* const cam_R1, + const T* const cam_R2, + T* out_residuals) const + { + Eigen::Matrix oneRo, twoRo, twoRone, R_error; + + ceres::AngleAxisToRotationMatrix(cam_R1, oneRo.data()); + ceres::AngleAxisToRotationMatrix(cam_R2, twoRo.data()); + twoRone = twoRo * oneRo.transpose(); + R_error = twoRone * m_two_R_one.transpose(); + + ceres::RotationMatrixToAngleAxis(R_error.data(), out_residuals); + + out_residuals[0] *= 180.0 / M_PI; + out_residuals[1] *= 180.0 / M_PI; + out_residuals[2] *= 180.0 / M_PI; + + + return true; + } + + Eigen::Matrix3d m_two_R_one; +}; + +} // namespace sfm +} // namespace aliceVision From 8dbc7afa9bb892dea550854e4d8df7aaa490f083 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Thu, 7 Nov 2019 09:30:35 +0100 Subject: [PATCH 103/174] Ignore pose lock --- .../sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index 4578ecd0ec..ac25def1bc 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -493,7 +493,6 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging } } - // List shared correspondences (pairs) between poses PoseWiseMatches poseWiseMatches; for (matching::PairwiseMatches::const_iterator iterMatches = _pairwiseMatches->begin(); iterMatches != _pairwiseMatches->end(); ++iterMatches) @@ -695,14 +694,14 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging } /*If a best view is defined, lock it*/ - sfmData::Poses & poses = _sfmData.getPoses(); + /*sfmData::Poses & poses = _sfmData.getPoses(); if (max_index != UndefinedIndexT) { sfmData::View & v = _sfmData.getView(max_index); IndexT poseid = v.getPoseId(); if (poseid != UndefinedIndexT) { poses[v.getPoseId()].lock(); } - } + }*/ /*Debug result*/ ALICEVISION_LOG_DEBUG("Compute_Relative_Rotations: vec_relatives_R.size(): " << vec_relatives_R.size()); From d3a8181a80804b1b95b68cbb40a79b025eaf229a Mon Sep 17 00:00:00 2001 From: fabienservant Date: Thu, 7 Nov 2019 09:45:30 +0100 Subject: [PATCH 104/174] A new node for camera downscale for faster stitching debug --- src/software/pipeline/CMakeLists.txt | 11 + .../pipeline/main_cameraDownscale.cpp | 112 ++++++++++ .../pipeline/main_panoramaStitching.cpp | 204 ++---------------- 3 files changed, 136 insertions(+), 191 deletions(-) create mode 100644 src/software/pipeline/main_cameraDownscale.cpp diff --git a/src/software/pipeline/CMakeLists.txt b/src/software/pipeline/CMakeLists.txt index 05cac968e4..2c26d2583c 100644 --- a/src/software/pipeline/CMakeLists.txt +++ b/src/software/pipeline/CMakeLists.txt @@ -122,6 +122,17 @@ if(ALICEVISION_BUILD_SFM) aliceVision_sfmDataIO ${Boost_LIBRARIES} ) + alicevision_add_software(aliceVision_cameraDownscale + SOURCE main_cameraDownscale.cpp + FOLDER ${FOLDER_SOFTWARE_PIPELINE} + LINKS aliceVision_system + aliceVision_image + aliceVision_feature + aliceVision_sfm + aliceVision_sfmData + aliceVision_sfmDataIO + ${Boost_LIBRARIES} + ) # Compute structure from known camera poses alicevision_add_software(aliceVision_computeStructureFromKnownPoses diff --git a/src/software/pipeline/main_cameraDownscale.cpp b/src/software/pipeline/main_cameraDownscale.cpp new file mode 100644 index 0000000000..a905fcefe0 --- /dev/null +++ b/src/software/pipeline/main_cameraDownscale.cpp @@ -0,0 +1,112 @@ +#include +#include +#include +#include +#include +#include + +#include +#include + +// These constants define the current software version. +// They must be updated when the command line is changed. +#define ALICEVISION_SOFTWARE_VERSION_MAJOR 2 +#define ALICEVISION_SOFTWARE_VERSION_MINOR 0 + +using namespace aliceVision; +namespace po = boost::program_options; +namespace fs = boost::filesystem; + +int main(int argc, char * argv[]) { + std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); + std::string sfmInputDataFilename = ""; + std::string sfmOutputDataFilename = ""; + float rescaleFactor = 1.0f; + + /***** + * DESCRIBE COMMAND LINE PARAMETERS + */ + po::options_description allParams( + "Parse external information about cameras used in a panorama.\n" + "AliceVision PanoramaExternalInfo"); + + po::options_description requiredParams("Required parameters"); + requiredParams.add_options() + ("rescalefactor,r", po::value(&rescaleFactor)->default_value(rescaleFactor), "Rescale Factor ]0.0, 1.0].") + ("input,i", po::value(&sfmInputDataFilename)->required(), "SfMData file input.") + ("outSfMDataFilename,o", po::value(&sfmOutputDataFilename)->required(), "SfMData file output.") + ; + + po::options_description optionalParams("Optional parameters"); + + po::options_description logParams("Log parameters"); + logParams.add_options() + ("verboseLevel,v", po::value(&verboseLevel)->default_value(verboseLevel), + "verbosity level (fatal, error, warning, info, debug, trace)."); + + allParams.add(requiredParams).add(optionalParams).add(logParams); + + + /** + * READ COMMAND LINE + */ + po::variables_map vm; + try + { + po::store(po::parse_command_line(argc, argv, allParams), vm); + + if(vm.count("help") || (argc == 1)) + { + ALICEVISION_COUT(allParams); + return EXIT_SUCCESS; + } + po::notify(vm); + } + catch(boost::program_options::required_option& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + catch(boost::program_options::error& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + + ALICEVISION_COUT("Program called with the following parameters:"); + ALICEVISION_COUT(vm); + + /** + * set verbose level + **/ + system::Logger::get()->setLogLevel(verboseLevel); + + + /*Analyze path*/ + boost::filesystem::path path(sfmOutputDataFilename); + std::string output_path = path.parent_path().string(); + + /** + * Read input + */ + sfmData::SfMData sfmData; + if(!sfmDataIO::Load(sfmData, sfmInputDataFilename, sfmDataIO::ESfMData(sfmDataIO::ALL))) + { + ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmInputDataFilename << "' cannot be read."); + return EXIT_FAILURE; + } + + + + /*** + * Save downscaled sfmData + */ + if (!sfmDataIO::Save(sfmData, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::ALL))) { + ALICEVISION_LOG_ERROR("The output SfMData file '" << sfmOutputDataFilename << "' cannot be write."); + return EXIT_FAILURE; + } + + return 0; +} \ No newline at end of file diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index a9bd3d30fd..997ec7c8f6 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -609,192 +609,6 @@ class CoordinatesMap { coarse_bbox.width = panoramaSize.first; coarse_bbox.height = panoramaSize.second; - int bbox_left, bbox_top; - int bbox_right, bbox_bottom; - int bbox_width, bbox_height; - - /*Estimate distorted maximal distance from optical center*/ - Vec2 pts[] = {{0.0f, 0.0f}, {intrinsics.w(), 0.0f}, {intrinsics.w(), intrinsics.h()}, {0.0f, intrinsics.h()}}; - float max_radius = 0.0; - for (int i = 0; i < 4; i++) { - - Vec2 ptmeter = intrinsics.ima2cam(pts[i]); - float radius = ptmeter.norm(); - max_radius = std::max(max_radius, radius); - } - - /* Estimate undistorted maximal distance from optical center */ - float max_radius_distorted = intrinsics.getMaximalDistortion(0.0, max_radius); - - /* - Coarse rectangle bouding box in camera space - We add intermediate points to ensure arclength between 2 points is never more than 180° - */ - Vec2 pts_radius[] = { - {-max_radius_distorted, -max_radius_distorted}, - {0, -max_radius_distorted}, - {max_radius_distorted, -max_radius_distorted}, - {max_radius_distorted, 0}, - {max_radius_distorted, max_radius_distorted}, - {0, max_radius_distorted}, - {-max_radius_distorted, max_radius_distorted}, - {-max_radius_distorted, 0} - }; - - - /* - Transform bounding box into the panorama frame. - Point are on a unit sphere. - */ - Vec3 rotated_pts[8]; - for (int i = 0; i < 8; i++) { - Vec3 pt3d = pts_radius[i].homogeneous().normalized(); - rotated_pts[i] = pose.rotation().transpose() * pt3d; - } - - /* Vertical Default solution : no pole*/ - bbox_top = panoramaSize.second; - bbox_bottom = 0; - - for (int i = 0; i < 8; i++) { - int i2 = i + 1; - if (i2 > 7) i2 = 0; - - Vec3 extremaY = getExtremaY(rotated_pts[i], rotated_pts[i2]); - - Vec2 res; - res = SphericalMapping::toEquirectangular(extremaY, panoramaSize.first, panoramaSize.second); - bbox_top = std::min(int(floor(res(1))), bbox_top); - bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); - - res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); - bbox_top = std::min(int(floor(res(1))), bbox_top); - bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); - } - - /* - Check if our region circumscribe a pole of the sphere : - Check that the region projected on the Y=0 plane contains the point (0, 0) - This is a special projection case - */ - bool pole = isPoleInTriangle(rotated_pts[0], rotated_pts[1], rotated_pts[7]); - pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[2], rotated_pts[3]); - pole |= isPoleInTriangle(rotated_pts[3], rotated_pts[4], rotated_pts[5]); - pole |= isPoleInTriangle(rotated_pts[7], rotated_pts[5], rotated_pts[6]); - pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[3], rotated_pts[5]); - pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[5], rotated_pts[7]); - - - if (pole) { - Vec3 normal = (rotated_pts[1] - rotated_pts[0]).cross(rotated_pts[3] - rotated_pts[0]); - if (normal(1) > 0) { - //Lower pole - bbox_bottom = panoramaSize.second - 1; - } - else { - //upper pole - bbox_top = 0; - } - } - - bbox_height = bbox_bottom - bbox_top + 1; - - - /*Check if we cross the horizontal loop*/ - bool crossH; - for (int i = 0; i < 8; i++) { - int i2 = i + 1; - if (i2 > 7) i2 = 0; - - bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); - if (i == 0) crossH = cross; - else crossH |= cross; - } - - - if (pole) { - /*Easy : if we cross the pole, the width is full*/ - bbox_left = 0; - bbox_right = panoramaSize.first - 1; - bbox_width = bbox_right - bbox_left + 1; - } - else if (crossH) { - - bbox_left = panoramaSize.first; - bbox_right = 0; - - for (int i = 0; i < 8; i++) { - int i2 = i + 1; - if (i2 > 7) i2 = 0; - - Vec2 res_left = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); - Vec2 res_right = SphericalMapping::toEquirectangular(rotated_pts[i2], panoramaSize.first, panoramaSize.second); - - if (crossHorizontalLoop(rotated_pts[i], rotated_pts[i2])) { - - if (res_left(0) > res_right(0)) { - //Left --> border; border --> right - bbox_left = std::min(bbox_left, int(floor(res_left(0)))); - bbox_right = std::max(bbox_right, int(ceil(res_right(0)))); - - if (i % 2 == 0) { - //next segment is continuity - int next = i2 + 1; - if (next > 7) next = 0; - - Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[next], panoramaSize.first, panoramaSize.second); - bbox_right = std::max(bbox_right, int(ceil(res(0)))); - } - else { - int prev = i - 1; - if (prev < 0) prev = 7; - Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[prev], panoramaSize.first, panoramaSize.second); - bbox_left = std::min(bbox_left, int(ceil(res(0)))); - } - } - else { - //right --> border; border --> left - bbox_left = std::min(bbox_left, int(floor(res_right(0)))); - bbox_right = std::max(bbox_right, int(ceil(res_left(0)))); - - if (i % 2 == 0) { - //next segment is continuity - int next = i2 + 1; - if (next > 7) next = 0; - - Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[next], panoramaSize.first, panoramaSize.second); - bbox_left = std::min(bbox_left, int(ceil(res(0)))); - } - else { - int prev = i - 1; - if (prev < 0) prev = 7; - Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[prev], panoramaSize.first, panoramaSize.second); - bbox_right = std::max(bbox_right, int(ceil(res(0)))); - } - } - } - } - - bbox_width = bbox_right + 1 + (panoramaSize.first - bbox_left + 1); - } - else { - /*horizontal default solution : no border crossing, no pole*/ - bbox_left = panoramaSize.first; - bbox_right = 0; - for (int i = 0; i < 8; i++) { - Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); - bbox_left = std::min(int(floor(res(0))), bbox_left); - bbox_right = std::max(int(ceil(res(0))), bbox_right); - } - bbox_width = bbox_right - bbox_left + 1; - } - - /*Assign solution to result*/ - coarse_bbox.left = bbox_left; - coarse_bbox.top = bbox_top; - coarse_bbox.width = bbox_width; - coarse_bbox.height = bbox_height; - return true; } @@ -1629,6 +1443,7 @@ int main(int argc, char **argv) { continue; } + /*if (pos >= 2 && pos < 3)*/ { ALICEVISION_LOG_INFO("Processing view " << view.getViewId()); /** @@ -1659,10 +1474,6 @@ int main(int argc, char **argv) { GaussianWarper warper; warper.warp(map, source); - /*std::ofstream out("/home/mmoc/test.dat", std::ios::binary); - warper.dump(out); - out.close();*/ - /** * Alpha mask */ @@ -1674,7 +1485,18 @@ int main(int argc, char **argv) { */ ALICEVISION_LOG_INFO("Composite to final panorama\n"); compositer.append(warper, alphabuilder); - + + ALICEVISION_LOG_INFO("Save final panoram\n"); + + char filename[512]; + const aliceVision::image::Image & panorama = compositer.getPanorama(); + sprintf(filename, "%s_intermediate%d.exr", outputPanorama.c_str(), pos); + image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); + + const aliceVision::image::Image & cam = warper.getColor(); + sprintf(filename, "%s_view%d.exr", outputPanorama.c_str(), pos); + image::writeImage(filename, cam, image::EImageColorSpace::SRGB); + } pos++; } From 0510a1dc560670bef34bc95cb8d5c51304029d62 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Thu, 7 Nov 2019 14:50:01 +0100 Subject: [PATCH 105/174] rescale node --- src/aliceVision/camera/IntrinsicBase.hpp | 11 +++++ src/aliceVision/camera/Pinhole.hpp | 17 +++++++ .../pipeline/main_cameraDownscale.cpp | 45 +++++++++++++++++++ 3 files changed, 73 insertions(+) diff --git a/src/aliceVision/camera/IntrinsicBase.hpp b/src/aliceVision/camera/IntrinsicBase.hpp index 9766569fe3..d4e68178fc 100644 --- a/src/aliceVision/camera/IntrinsicBase.hpp +++ b/src/aliceVision/camera/IntrinsicBase.hpp @@ -400,6 +400,16 @@ struct IntrinsicBase return seed; } + /** + * @brief Rescale intrinsics to reflect a rescale of the camera image + * @param factor a scale factor + */ + virtual void rescale(float factor) { + + _w = (unsigned int)(floor(float(_w) * factor)); + _h = (unsigned int)(floor(float(_h) * factor)); + } + private: /// initialization mode @@ -463,5 +473,6 @@ inline double AngleBetweenRays(const geometry::Pose3& pose1, return AngleBetweenRays(ray1, ray2); } + } // namespace camera } // namespace aliceVision diff --git a/src/aliceVision/camera/Pinhole.hpp b/src/aliceVision/camera/Pinhole.hpp index 01e6145735..20977700d6 100644 --- a/src/aliceVision/camera/Pinhole.hpp +++ b/src/aliceVision/camera/Pinhole.hpp @@ -194,6 +194,23 @@ class Pinhole : public IntrinsicBase /// Return the distorted pixel (with added distortion) virtual Vec2 get_d_pixel(const Vec2& p) const override {return p;} + /** + * @brief Rescale intrinsics to reflect a rescale of the camera image + * @param factor a scale factor + */ + virtual void rescale(float factor) override { + + IntrinsicBase::rescale(factor); + + Mat3 scale; + scale.setIdentity(); + scale(0, 0) = factor; + scale(1, 1) = factor; + + _K = scale * _K; + _Kinv = _K.inverse(); + } + private: // Focal & principal point are embed into the calibration matrix K Mat3 _K, _Kinv; diff --git a/src/software/pipeline/main_cameraDownscale.cpp b/src/software/pipeline/main_cameraDownscale.cpp index a905fcefe0..f27753712f 100644 --- a/src/software/pipeline/main_cameraDownscale.cpp +++ b/src/software/pipeline/main_cameraDownscale.cpp @@ -7,6 +7,8 @@ #include #include +#include + // These constants define the current software version. // They must be updated when the command line is changed. @@ -83,6 +85,11 @@ int main(int argc, char * argv[]) { **/ system::Logger::get()->setLogLevel(verboseLevel); + if (rescaleFactor < 0.0001f || rescaleFactor > 1.0f) { + ALICEVISION_LOG_ERROR("Invalid scale factor (]0,1]"); + return EXIT_FAILURE; + } + /*Analyze path*/ boost::filesystem::path path(sfmOutputDataFilename); @@ -98,7 +105,45 @@ int main(int argc, char * argv[]) { return EXIT_FAILURE; } + for (auto & v : sfmData.getViews()) { + + /*Read original image*/ + image::Image originalImage; + image::readImage(v.second->getImagePath(), originalImage, image::EImageColorSpace::LINEAR); + + unsigned int w = v.second->getWidth(); + unsigned int h = v.second->getHeight(); + unsigned int nw = (unsigned int)(floor(float(w) * rescaleFactor)); + unsigned int nh = (unsigned int)(floor(float(h) * rescaleFactor)); + + /*Create a rotated image*/ + image::Image rescaled(nw, nh); + + oiio::ImageSpec imageSpecResized(nw, nh, 3, oiio::TypeDesc::FLOAT); + oiio::ImageSpec imageSpecOrigin(w, h, 3, oiio::TypeDesc::FLOAT); + oiio::ImageBuf bufferOrigin(imageSpecOrigin, originalImage.data()); + oiio::ImageBuf bufferResized(imageSpecResized, rescaled.data()); + + oiio::ImageBufAlgo::resample(bufferResized, bufferOrigin); + + boost::filesystem::path old_path(v.second->getImagePath()); + std::string old_filename = old_path.stem().string(); + + /*Save this image*/ + std::stringstream sstream; + sstream << output_path << "/" << old_filename << "_rescaled.exr"; + image::writeImage(sstream.str(), rescaled, image::EImageColorSpace::AUTO); + + /*Update view for this modification*/ + v.second->setWidth(nw); + v.second->setHeight(nh); + v.second->setImagePath(sstream.str()); + } + + for (auto & i : sfmData.getIntrinsics()) { + i.second->rescale(rescaleFactor); + } /*** * Save downscaled sfmData From 7ed0e6080c858b7374c6a36edae23f0c26992819 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 8 Nov 2019 09:35:00 +0100 Subject: [PATCH 106/174] Bounding box algorithm to be replaced --- .../pipeline/main_panoramaStitching.cpp | 152 +++++++++++++++++- 1 file changed, 148 insertions(+), 4 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 997ec7c8f6..2c212d1005 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -608,6 +608,148 @@ class CoordinatesMap { coarse_bbox.top = 0; coarse_bbox.width = panoramaSize.first; coarse_bbox.height = panoramaSize.second; + + int bbox_left, bbox_top; + int bbox_right, bbox_bottom; + int bbox_width, bbox_height; + + /*Estimate distorted maximal distance from optical center*/ + Vec2 pts[] = {{0.0f, 0.0f}, {intrinsics.w(), 0.0f}, {intrinsics.w(), intrinsics.h()}, {0.0f, intrinsics.h()}}; + float max_radius = 0.0; + for (int i = 0; i < 4; i++) { + + Vec2 ptmeter = intrinsics.ima2cam(pts[i]); + float radius = ptmeter.norm(); + max_radius = std::max(max_radius, radius); + } + + /* Estimate undistorted maximal distance from optical center */ + float max_radius_distorted = intrinsics.getMaximalDistortion(0.0, max_radius); + + /* + Coarse rectangle bouding box in camera space + We add intermediate points to ensure arclength between 2 points is never more than 180° + */ + Vec2 pts_radius[] = { + {-max_radius_distorted, -max_radius_distorted}, + {0, -max_radius_distorted}, + {max_radius_distorted, -max_radius_distorted}, + {max_radius_distorted, 0}, + {max_radius_distorted, max_radius_distorted}, + {0, max_radius_distorted}, + {-max_radius_distorted, max_radius_distorted}, + {-max_radius_distorted, 0} + }; + + + /* + Transform bounding box into the panorama frame. + Point are on a unit sphere. + */ + Vec3 rotated_pts[8]; + for (int i = 0; i < 8; i++) { + Vec3 pt3d = pts_radius[i].homogeneous().normalized(); + rotated_pts[i] = pose.rotation().transpose() * pt3d; + } + + /* Vertical Default solution : no pole*/ + bbox_top = panoramaSize.second; + bbox_bottom = 0; + + for (int i = 0; i < 8; i++) { + int i2 = i + 1; + if (i2 > 7) i2 = 0; + + Vec3 extremaY = getExtremaY(rotated_pts[i], rotated_pts[i2]); + + Vec2 res; + res = SphericalMapping::toEquirectangular(extremaY, panoramaSize.first, panoramaSize.second); + bbox_top = std::min(int(floor(res(1))), bbox_top); + bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); + + res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + bbox_top = std::min(int(floor(res(1))), bbox_top); + bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); + } + + /* + Check if our region circumscribe a pole of the sphere : + Check that the region projected on the Y=0 plane contains the point (0, 0) + This is a special projection case + */ + bool pole = isPoleInTriangle(rotated_pts[0], rotated_pts[1], rotated_pts[7]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[2], rotated_pts[3]); + pole |= isPoleInTriangle(rotated_pts[3], rotated_pts[4], rotated_pts[5]); + pole |= isPoleInTriangle(rotated_pts[7], rotated_pts[5], rotated_pts[6]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[3], rotated_pts[5]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[5], rotated_pts[7]); + + + if (pole) { + Vec3 normal = (rotated_pts[1] - rotated_pts[0]).cross(rotated_pts[3] - rotated_pts[0]); + if (normal(1) > 0) { + //Lower pole + bbox_bottom = panoramaSize.second - 1; + } + else { + //upper pole + bbox_top = 0; + } + } + + bbox_height = bbox_bottom - bbox_top + 1; + + + /*Check if we cross the horizontal loop*/ + bool crossH; + for (int i = 0; i < 8; i++) { + int i2 = i + 1; + if (i2 > 7) i2 = 0; + + bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); + if (i == 0) crossH = cross; + else crossH |= cross; + } + + + if (pole) { + /*Easy : if we cross the pole, the width is full*/ + bbox_left = 0; + bbox_right = panoramaSize.first - 1; + bbox_width = bbox_right - bbox_left + 1; + } + else if (crossH) { + + for (int i = 0; i < 8; i+= 2) { + int i2 = i + 1; + int i3 = i + 2; + if (i3 >= 8) { + i3 = 0; + } + + + } + bbox_left = 0; + bbox_right = panoramaSize.first - 1; + bbox_width = bbox_right - bbox_left + 1; + } + else { + /*horizontal default solution : no border crossing, no pole*/ + bbox_left = panoramaSize.first; + bbox_right = 0; + for (int i = 0; i < 8; i++) { + Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + bbox_left = std::min(int(floor(res(0))), bbox_left); + bbox_right = std::max(int(ceil(res(0))), bbox_right); + } + bbox_width = bbox_right - bbox_left + 1; + } + + /*Assign solution to result*/ + coarse_bbox.left = bbox_left; + coarse_bbox.top = bbox_top; + coarse_bbox.width = bbox_width; + coarse_bbox.height = bbox_height; return true; } @@ -876,7 +1018,9 @@ class GaussianWarper : public Warper { double scale = sqrt(det); - double flevel = log2(scale); + + + double flevel = std::max(0.0, log2(scale)); size_t blevel = std::min(max_level, size_t(floor(flevel))); double dscale, x, y; @@ -1443,7 +1587,7 @@ int main(int argc, char **argv) { continue; } - /*if (pos >= 2 && pos < 3)*/ { + /*if (pos >= 20 && pos < 305)*/ { ALICEVISION_LOG_INFO("Processing view " << view.getViewId()); /** @@ -1488,14 +1632,14 @@ int main(int argc, char **argv) { ALICEVISION_LOG_INFO("Save final panoram\n"); - char filename[512]; + /*char filename[512]; const aliceVision::image::Image & panorama = compositer.getPanorama(); sprintf(filename, "%s_intermediate%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); const aliceVision::image::Image & cam = warper.getColor(); sprintf(filename, "%s_view%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, cam, image::EImageColorSpace::SRGB); + image::writeImage(filename, cam, image::EImageColorSpace::SRGB);*/ } pos++; } From 783d4ab51376cb0cc795394bb5d96a8ecf715f57 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Fri, 8 Nov 2019 12:54:15 +0100 Subject: [PATCH 107/174] bugs with hdr --- src/aliceVision/hdr/hdrMerge.cpp | 24 +++--------------------- 1 file changed, 3 insertions(+), 21 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index 3553a20b83..165457c5db 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -37,9 +37,6 @@ void hdrMerge::process(const std::vector< image::Image > &imag const std::size_t width = images.front().Width(); const std::size_t height = images.front().Height(); - const float maxLum = 1000.0; - const float minLum = 0.0001; - #pragma omp parallel for for(int y = 0; y < height; ++y) { @@ -52,35 +49,20 @@ void hdrMerge::process(const std::vector< image::Image > &imag { double wsum = 0.0; double wdiv = 0.0; - double highValue = images.at(0)(y, x)(channel); - double lowValue = images.at(images.size()-1)(y, x)(channel); - for(std::size_t i = 0; i < images.size(); ++i) { //for each images const double value = images[i](y, x)(channel); const double time = times[i]; - double w = std::max(0.f, weight(value, channel) - weight(0.05, 0)); - + double w = std::max(0.f, weight(value, channel)); const double r = response(value, channel); - wsum += w * r / time; wdiv += w; } - double clampedHighValue = 1.0 - (1.0 / (1.0 + expf(10.0 * ((highValue - 0.9) / 0.2)))); - double clampedLowValue = 1.0 / (1.0 + expf(10.0 * ((lowValue - 0.005) / 0.01))); - - - if(!robCalibrate && clampedValueCorrection != 0.f) - { - radianceColor(channel) = (1.0 - clampedHighValue - clampedLowValue) * wsum / std::max(0.001, wdiv) * targetTime + clampedHighValue * maxLum * clampedValueCorrection + clampedLowValue * minLum * clampedValueCorrection; - } - else - { - radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetTime; - } + + radianceColor(channel) = wsum / std::max(1e-08, wdiv) * targetTime; } } From 76598cf637ca4c580e5cb2de1ac1919d499c61c4 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 8 Nov 2019 14:47:30 +0100 Subject: [PATCH 108/174] add new constraints for bounding box --- .../pipeline/main_panoramaStitching.cpp | 60 +++++++++++++++---- 1 file changed, 49 insertions(+), 11 deletions(-) diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaStitching.cpp index 2c212d1005..3bab167f37 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaStitching.cpp @@ -720,18 +720,58 @@ class CoordinatesMap { } else if (crossH) { - for (int i = 0; i < 8; i+= 2) { + int first_cross = 0; + for (int i = 0; i < 8; i++) { int i2 = i + 1; - int i3 = i + 2; - if (i3 >= 8) { - i3 = 0; + bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); + if (cross) { + first_cross = i; + break; } + } + + bbox_left = panoramaSize.first - 1; + bbox_right = 0; + bool is_right = true; + for (int index = 0; index < 8; index++) { + + int i = index + first_cross; + int i2 = i + 1; + if (i > 7) i = i - 8; + if (i2 > 7) i2 = i2 - 8; + Vec2 res_1 = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + Vec2 res_2 = SphericalMapping::toEquirectangular(rotated_pts[i2], panoramaSize.first, panoramaSize.second); + + /*[----right //// left-----]*/ + bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); + if (cross) { + if (res_1(0) > res_2(0)) { /*[----res2 //// res1----]*/ + bbox_left = std::min(int(res_1(0)), bbox_left); + bbox_right = std::max(int(res_2(0)), bbox_right); + is_right = true; + } + else { /*[----res1 //// res2----]*/ + bbox_left = std::min(int(res_2(0)), bbox_left); + bbox_right = std::max(int(res_1(0)), bbox_right); + is_right = false; + } + } + else { + if (is_right) { + bbox_right = std::max(int(res_1(0)), bbox_right); + bbox_right = std::max(int(res_2(0)), bbox_right); + } + else { + bbox_left = std::min(int(res_1(0)), bbox_left); + bbox_left = std::min(int(res_2(0)), bbox_left); + } + } } - bbox_left = 0; - bbox_right = panoramaSize.first - 1; - bbox_width = bbox_right - bbox_left + 1; + + + bbox_width = bbox_right + (panoramaSize.first - bbox_left); } else { /*horizontal default solution : no border crossing, no pole*/ @@ -1630,16 +1670,14 @@ int main(int argc, char **argv) { ALICEVISION_LOG_INFO("Composite to final panorama\n"); compositer.append(warper, alphabuilder); - ALICEVISION_LOG_INFO("Save final panoram\n"); - - /*char filename[512]; + char filename[512]; const aliceVision::image::Image & panorama = compositer.getPanorama(); sprintf(filename, "%s_intermediate%d.exr", outputPanorama.c_str(), pos); image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); const aliceVision::image::Image & cam = warper.getColor(); sprintf(filename, "%s_view%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, cam, image::EImageColorSpace::SRGB);*/ + image::writeImage(filename, cam, image::EImageColorSpace::SRGB); } pos++; } From 3c77d5d0878ef04c23c1eaa5948ba86e07d413f8 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Tue, 12 Nov 2019 12:47:29 +0100 Subject: [PATCH 109/174] [hdr] formatting changes --- src/aliceVision/hdr/DebevecCalibrate.cpp | 40 +++++++++++++----------- 1 file changed, 22 insertions(+), 18 deletions(-) diff --git a/src/aliceVision/hdr/DebevecCalibrate.cpp b/src/aliceVision/hdr/DebevecCalibrate.cpp index c1ebc3b06d..7ce26e31d4 100644 --- a/src/aliceVision/hdr/DebevecCalibrate.cpp +++ b/src/aliceVision/hdr/DebevecCalibrate.cpp @@ -44,23 +44,22 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im std::vector tripletList_array[channelsCount]; /* Initialize intermediate buffers */ - for(unsigned int channel=0; channel < channelsCount; ++channel) { + for(unsigned int channel=0; channel < channelsCount; ++channel) + { Vec & b = b_array[channel]; b = Vec::Zero(nbPoints + channelQuantization + 1); std::vector & tripletList = tripletList_array[channel]; tripletList.reserve(2 * nbPoints + 1 + 3 * channelQuantization); } - size_t count = 0; for (unsigned int g = 0; g < nbGroups; g++) { - const std::vector &imagePaths = imagePathsGroups[g]; std::vector> ldrImagesGroup(imagePaths.size()); - for (int i = 0; i < imagePaths.size(); i++) { - ALICEVISION_LOG_INFO("Load " << imagePaths[i]); + for (int i = 0; i < imagePaths.size(); i++) + { image::readImage(imagePaths[i], ldrImagesGroup[i], image::EImageColorSpace::SRGB); } @@ -97,12 +96,14 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im std::size_t dist2 = pow(center(0)-x, 2) + pow(center(1)-y, 2); /*This looks stupid ...*/ - if(dist2 > maxDist2) { + if(dist2 > maxDist2) + { continue; } - for (int channel = 0; channel < channelsCount; channel++) { + for (int channel = 0; channel < channelsCount; channel++) + { float sample = clamp(image(y, x)(channel), 0.f, 1.f); float w_ij = weight(sample, channel); std::size_t index = std::round(sample * (channelQuantization - 1)); @@ -129,8 +130,8 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im for(unsigned int i=0; i> & im } // fix the curve by setting its middle value to zero - for (int channel = 0; channel < channelsCount; channel ++) { + for (int channel = 0; channel < channelsCount; channel++) + { tripletList_array[channel].push_back(T(count, std::floor(channelQuantization/2), 1.f)); } count += 1; @@ -157,7 +159,8 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im for(std::size_t k = 0; k> & im count++; } - - - for (int channel = 0; channel < channelsCount; channel ++) { - + for (int channel = 0; channel < channelsCount; channel ++) + { sMat A(count, channelQuantization + samplesPerImage * nbGroups); A.setFromTriplets(tripletList_array[channel].begin(), tripletList_array[channel].end()); b_array[channel].conservativeResize(count); @@ -183,21 +184,24 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im solver.compute(A); /*Check solver failure*/ - if (solver.info() != Eigen::Success) { + if (solver.info() != Eigen::Success) + { return false; } Vec x = solver.solve(b_array[channel]); /*Check solver failure*/ - if(solver.info() != Eigen::Success) { + if(solver.info() != Eigen::Success) + { return false; } double relative_error = (A*x - b_array[channel]).norm() / b_array[channel].norm(); /* Save result to response curve*/ - for(std::size_t k = 0; k < channelQuantization; ++k) { + for(std::size_t k = 0; k < channelQuantization; ++k) + { response.setValue(k, channel, x(k)); } } From 684d073928fb8b1a96435f5e1ee2ced2ce51aef7 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Tue, 12 Nov 2019 12:48:54 +0100 Subject: [PATCH 110/174] [hdr] add option to export RGB response function as html chart --- src/aliceVision/hdr/rgbCurve.cpp | 31 ++++++++++++++++++++++++++ src/aliceVision/hdr/rgbCurve.hpp | 6 +++++ src/dependencies/htmlDoc/htmlDoc.hpp | 33 +++++++++++++++++++++------- 3 files changed, 62 insertions(+), 8 deletions(-) diff --git a/src/aliceVision/hdr/rgbCurve.cpp b/src/aliceVision/hdr/rgbCurve.cpp index 3d65ec261d..80aa4141a7 100644 --- a/src/aliceVision/hdr/rgbCurve.cpp +++ b/src/aliceVision/hdr/rgbCurve.cpp @@ -9,8 +9,12 @@ #include #include #include +#include + #include +#include + namespace aliceVision { namespace hdr { @@ -388,6 +392,33 @@ void rgbCurve::write(const std::string &path, const std::string &name) const file.close(); } +void rgbCurve::writeHtml(const std::string& path, const std::string& title) const +{ + using namespace htmlDocument; + + std::vector xBin(getCurveRed().size()); + std::iota(xBin.begin(), xBin.end(), 0); + + std::pair< std::pair, std::pair > range = autoJSXGraphViewport(xBin, getCurveRed()); + + JSXGraphWrapper jsxGraph; + jsxGraph.init(title, 800, 600); + + jsxGraph.addXYChart(xBin, getCurveRed(), "line", "ff0000"); + jsxGraph.addXYChart(xBin, getCurveGreen(), "line", "00ff00"); + jsxGraph.addXYChart(xBin, getCurveBlue(), "line", "0000ff"); + + jsxGraph.UnsuspendUpdate(); + jsxGraph.setViewport(range); + jsxGraph.close(); + + // save the reconstruction Log + std::ofstream htmlFileStream(path.c_str()); + htmlFileStream << htmlDocumentStream(title).getDoc(); + htmlFileStream << jsxGraph.toStr(); +} + + void rgbCurve::read(const std::string &path) { std::ifstream file(path); diff --git a/src/aliceVision/hdr/rgbCurve.hpp b/src/aliceVision/hdr/rgbCurve.hpp index 30171dad18..1d5e1df713 100644 --- a/src/aliceVision/hdr/rgbCurve.hpp +++ b/src/aliceVision/hdr/rgbCurve.hpp @@ -293,6 +293,12 @@ class rgbCurve */ void write(const std::string &path, const std::string &name = "rgbCurve") const; + /** + * @brief Write in an html file + * @param[in] path + */ + void writeHtml(const std::string &path, const std::string& title) const; + /** * @brief Read and fill curves from a csv file * @param[in] path diff --git a/src/dependencies/htmlDoc/htmlDoc.hpp b/src/dependencies/htmlDoc/htmlDoc.hpp index 624703c6f9..9ff773980a 100644 --- a/src/dependencies/htmlDoc/htmlDoc.hpp +++ b/src/dependencies/htmlDoc/htmlDoc.hpp @@ -150,7 +150,7 @@ namespace htmlDocument << range.first.second << ","<< range.second.first <<"]);\n"; } - void addLine(double x0, double y0, double x1, double y1, std::string color ="00ff00") + void addLine(double x0, double y0, double x1, double y1, const std::string& color ="00ff00") { size_t index0 = cpt++; size_t index1 = cpt++; @@ -163,7 +163,7 @@ namespace htmlDocument template void addXYChart(const std::vector & vec_x, const std::vector & vec_y, - std::string stype) + const std::string& stype, const std::string& strokeColor = "0000ff") { size_t index0 = cpt++; size_t index1 = cpt++; @@ -182,11 +182,11 @@ namespace htmlDocument stream << "board.createElement('chart', " < - void addYChart(const std::vector & vec_y, std::string stype) + void addYChart(const std::vector & vec_y, const std::string& stype, const std::string& strokeColor = "0000ff") { size_t index0 = cpt++; @@ -195,7 +195,7 @@ namespace htmlDocument stream << "];\n"; stream << "board.createElement('chart', " <<"data"< - void pushXYChart(const std::vector& vec_x, const std::vector& vec_y,std::string name) + void pushXYChart(const std::vector& vec_x, const std::vector& vec_y, const std::string& name, const std::string& color = "0000ff") { std::pair< std::pair, std::pair > range = autoJSXGraphViewport(vec_x, vec_y); htmlDocument::JSXGraphWrapper jsxGraph; jsxGraph.init(name,600,300); - jsxGraph.addXYChart(vec_x, vec_y, "line,point"); + jsxGraph.addXYChart(vec_x, vec_y, "line,point", color); jsxGraph.UnsuspendUpdate(); jsxGraph.setViewport(range); jsxGraph.close(); pushInfo(jsxGraph.toStr()); } - + + template + void pushXYChart(const std::vector& vec_x, const std::vector>& vec_y, const std::string& name, const std::vector& colors, const std::string& sType= "line,point") + { + std::pair< std::pair, std::pair > range = autoJSXGraphViewport(vec_x, vec_y.front()); + + htmlDocument::JSXGraphWrapper jsxGraph; + jsxGraph.init(name, 600, 300); + for (int i = 0; i < vec_y.size(); ++i) + { + jsxGraph.addXYChart(vec_x, vec_y[i], sType, colors[i]); + } + jsxGraph.UnsuspendUpdate(); + jsxGraph.setViewport(range); + jsxGraph.close(); + pushInfo(jsxGraph.toStr()); + } + std::string getDoc() { return htmlMarkup("html", htmlStream.str()); From c2b66f417a15e3a292430476a4e89741eb914466 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Tue, 12 Nov 2019 12:50:24 +0100 Subject: [PATCH 111/174] [hdr] clean hdr fusion --- src/aliceVision/hdr/hdrMerge.cpp | 95 ++++++++++++++++++++++++-------- src/aliceVision/hdr/rgbCurve.cpp | 11 ++++ src/aliceVision/hdr/rgbCurve.hpp | 2 + 3 files changed, 86 insertions(+), 22 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index 3553a20b83..b0424bb6ca 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -15,7 +15,31 @@ namespace aliceVision { namespace hdr { - + +/** + * f(x)=min + (max-min) * \frac{1}{1 + e^{10 * (x - mid) / width}} + * https://www.desmos.com/calculator/xamvguu8zw + * ____ + * sigmoid: \________ + * sigMid + */ +inline float sigmoid(float zeroVal, float endVal, float sigwidth, float sigMid, float xval) +{ + return zeroVal + (endVal - zeroVal) * (1.0f / (1.0f + expf(10.0f * ((xval - sigMid) / sigwidth)))); +} + +/** + * https://www.desmos.com/calculator/cvu8s3rlvy + * + * ____ + * sigmoid inv: _______/ + * sigMid + */ +inline float sigmoidInv(float zeroVal, float endVal, float sigwidth, float sigMid, float xval) +{ + return zeroVal + (endVal - zeroVal) * (1.0f / (1.0f + expf(10.0f * ((sigMid - xval) / sigwidth)))); +} + void hdrMerge::process(const std::vector< image::Image > &images, const std::vector ×, const rgbCurve &weight, @@ -37,8 +61,11 @@ void hdrMerge::process(const std::vector< image::Image > &imag const std::size_t width = images.front().Width(); const std::size_t height = images.front().Height(); - const float maxLum = 1000.0; - const float minLum = 0.0001; + const float maxLum = 1000.0f; + const float minLum = 0.0001f; + + rgbCurve weightShortestExposure = weight; + weightShortestExposure.invertAndScaleSecondPart(1.0f + clampedValueCorrection * maxLum); #pragma omp parallel for for(int y = 0; y < height; ++y) @@ -52,37 +79,61 @@ void hdrMerge::process(const std::vector< image::Image > &imag { double wsum = 0.0; double wdiv = 0.0; - double highValue = images.at(0)(y, x)(channel); - double lowValue = images.at(images.size()-1)(y, x)(channel); + { + int exposureIndex = 0; + // float highValue = images[exposureIndex](y, x)(channel); + // // https://www.desmos.com/calculator/xamvguu8zw + // // ____ + // // sigmoid inv: _______/ + // // 0 1 + // float clampedHighValue = sigmoidInv(0.0f, 1.0f, /*sigWidth=*/0.2f, /*sigMid=*/0.9f, highValue); + ////////// + + // for each images + const double value = images[exposureIndex](y, x)(channel); + const double time = times[exposureIndex]; + // + // / + // weightShortestExposure: ____/ + // _______/ + // 0 1 + double w = std::max(0.f, weightShortestExposure(value, channel)); // - weight(0.05, 0) + + const double r = response(value, channel); + + wsum += w * r / time; + wdiv += w; - for(std::size_t i = 0; i < images.size(); ++i) + } + for(std::size_t i = 1; i < images.size(); ++i) { - //for each images + // for each images const double value = images[i](y, x)(channel); const double time = times[i]; - double w = std::max(0.f, weight(value, channel) - weight(0.05, 0)); + // + // weight: ____ + // _______/ \________ + // 0 1 + double w = std::max(0.f, weight(value, channel)); // - weight(0.05, 0) const double r = response(value, channel); wsum += w * r / time; wdiv += w; } - - double clampedHighValue = 1.0 - (1.0 / (1.0 + expf(10.0 * ((highValue - 0.9) / 0.2)))); - double clampedLowValue = 1.0 / (1.0 + expf(10.0 * ((lowValue - 0.005) / 0.01))); - - - if(!robCalibrate && clampedValueCorrection != 0.f) - { - radianceColor(channel) = (1.0 - clampedHighValue - clampedLowValue) * wsum / std::max(0.001, wdiv) * targetTime + clampedHighValue * maxLum * clampedValueCorrection + clampedLowValue * minLum * clampedValueCorrection; - } - else - { - radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetTime; - } + //{ + // int exposureIndex = images.size() - 1; + // double lowValue = images[exposureIndex](y, x)(channel); + // // https://www.desmos.com/calculator/cvu8s3rlvy + // // ____ + // // sigmoid: \________ + // // 0 1 + // double clampedLowValue = sigmoid(0.0f, 1.0f, /*sigWidth=*/0.01f, /*sigMid=*/0.005, lowValue); + //} + + radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetTime; } - } } } diff --git a/src/aliceVision/hdr/rgbCurve.cpp b/src/aliceVision/hdr/rgbCurve.cpp index 80aa4141a7..8d03987515 100644 --- a/src/aliceVision/hdr/rgbCurve.cpp +++ b/src/aliceVision/hdr/rgbCurve.cpp @@ -183,6 +183,17 @@ void rgbCurve::inverseAllValues() } } +void rgbCurve::invertAndScaleSecondPart(float scale) +{ + for (auto &curve : _data) + { + for (std::size_t i = curve.size()/2; i < curve.size(); ++i) + { + curve[i] = (1.f - curve[i]) * scale; + } + } +} + void rgbCurve::setAllAbsolute() { for(auto &curve : _data) diff --git a/src/aliceVision/hdr/rgbCurve.hpp b/src/aliceVision/hdr/rgbCurve.hpp index 1d5e1df713..4169e01dab 100644 --- a/src/aliceVision/hdr/rgbCurve.hpp +++ b/src/aliceVision/hdr/rgbCurve.hpp @@ -193,6 +193,8 @@ class rgbCurve */ void inverseAllValues(); + void invertAndScaleSecondPart(float scale); + /** * @brief change all value of the image by their absolute value */ From cb976179401327d3c68e958db06f1c45521efc1c Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Tue, 12 Nov 2019 12:51:08 +0100 Subject: [PATCH 112/174] [hdr] grossberg: use images paths in input (instead of all images) --- src/aliceVision/hdr/GrossbergCalibrate.cpp | 28 +++++++++++++++------- src/aliceVision/hdr/GrossbergCalibrate.hpp | 2 +- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/src/aliceVision/hdr/GrossbergCalibrate.cpp b/src/aliceVision/hdr/GrossbergCalibrate.cpp index 5c821bbfad..2108a5fa11 100644 --- a/src/aliceVision/hdr/GrossbergCalibrate.cpp +++ b/src/aliceVision/hdr/GrossbergCalibrate.cpp @@ -21,15 +21,15 @@ GrossbergCalibrate::GrossbergCalibrate(const unsigned int dimension) _dimension = dimension; } -void GrossbergCalibrate::process(const std::vector< std::vector< image::Image > > &ldrImageGroups, +void GrossbergCalibrate::process(const std::vector>& imagePathsGroups, const std::size_t channelQuantization, const std::vector< std::vector > ×, const int nbPoints, const bool fisheye, rgbCurve &response) { - const int nbGroups = ldrImageGroups.size(); - const int nbImages = ldrImageGroups.front().size(); + const int nbGroups = imagePathsGroups.size(); + const int nbImages = imagePathsGroups.front().size(); const int samplesPerImage = nbPoints / (nbGroups*nbImages); //set channels count always RGB @@ -103,7 +103,14 @@ void GrossbergCalibrate::process(const std::vector< std::vector< image::Image > &ldrImagesGroup = ldrImageGroups[g]; + const std::vector &imagePaths = imagePathsGroups[g]; + std::vector> ldrImagesGroup(imagePaths.size()); + + for (int i = 0; i < imagePaths.size(); i++) + { + image::readImage(imagePaths[i], ldrImagesGroup[i], image::EImageColorSpace::SRGB); + } + const std::vector &ldrTimes= times[g]; const std::size_t width = ldrImagesGroup.front().Width(); @@ -158,12 +165,19 @@ void GrossbergCalibrate::process(const std::vector< std::vector< image::Image > &ldrImagesGroup = ldrImageGroups[g]; + const std::vector &imagePaths = imagePathsGroups[g]; + std::vector> ldrImagesGroup(imagePaths.size()); + + for (int i = 0; i < imagePaths.size(); i++) + { + ALICEVISION_LOG_INFO("Load " << imagePaths[i]); + image::readImage(imagePaths[i], ldrImagesGroup[i], image::EImageColorSpace::SRGB); + } + const std::vector &ldrTimes= times[g]; const std::size_t width = ldrImagesGroup.front().Width(); @@ -184,7 +198,6 @@ void GrossbergCalibrate::process(const std::vector< std::vector< image::Image > > &ldrImageGroups, + void process(const std::vector>& imagePathsGroups, const std::size_t channelQuantization, const std::vector< std::vector > ×, const int nbPoints, From 020c427289c5ace3704570c42fc7773c23ecda58 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Tue, 12 Nov 2019 12:51:39 +0100 Subject: [PATCH 113/174] [software] LDRToHDR: re-expose important options --- src/software/convert/main_convertLDRToHDR.cpp | 240 +++++++++++++++--- 1 file changed, 209 insertions(+), 31 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 58bcdf4124..b031b6f16a 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -34,6 +34,69 @@ using namespace aliceVision; namespace po = boost::program_options; +namespace fs = boost::filesystem; + + + +enum class ECalibrationMethod +{ + LINEAR, + ROBERTSON, + DEBEVEC, + GROSSBERG +}; + +/** +* @brief convert an enum ECalibrationMethod to its corresponding string +* @param ECalibrationMethod +* @return String +*/ +inline std::string ECalibrationMethod_enumToString(const ECalibrationMethod calibrationMethod) +{ + switch (calibrationMethod) + { + case ECalibrationMethod::LINEAR: return "linear"; + case ECalibrationMethod::ROBERTSON: return "robertson"; + case ECalibrationMethod::DEBEVEC: return "debevec"; + case ECalibrationMethod::GROSSBERG: return "grossberg"; + } + throw std::out_of_range("Invalid method name enum"); +} + +/** +* @brief convert a string calibration method name to its corresponding enum ECalibrationMethod +* @param ECalibrationMethod +* @return String +*/ +inline ECalibrationMethod ECalibrationMethod_stringToEnum(const std::string& calibrationMethodName) +{ + std::string methodName = calibrationMethodName; + std::transform(methodName.begin(), methodName.end(), methodName.begin(), ::tolower); + + if (methodName == "linear") return ECalibrationMethod::LINEAR; + if (methodName == "robertson") return ECalibrationMethod::ROBERTSON; + if (methodName == "debevec") return ECalibrationMethod::DEBEVEC; + if (methodName == "grossberg") return ECalibrationMethod::GROSSBERG; + + throw std::out_of_range("Invalid method name : '" + calibrationMethodName + "'"); +} + +inline std::ostream& operator<<(std::ostream& os, ECalibrationMethod calibrationMethodName) +{ + os << ECalibrationMethod_enumToString(calibrationMethodName); + return os; +} + +inline std::istream& operator>>(std::istream& in, ECalibrationMethod& calibrationMethod) +{ + std::string token; + in >> token; + calibrationMethod = ECalibrationMethod_stringToEnum(token); + return in; +} + + + int main(int argc, char * argv[]) { @@ -41,7 +104,15 @@ int main(int argc, char * argv[]) { std::string sfmInputDataFilename = ""; std::string sfmOutputDataFilename = ""; int groupSize = 3; + ECalibrationMethod calibrationMethod = ECalibrationMethod::LINEAR; float clampedValueCorrection = 1.0f; + bool fisheye = false; + int channelQuantizationPower = 10; + int calibrationNbPoints = 0; + + std::string calibrationWeightFunction = "default"; + hdr::EFunctionType fusionWeightFunction = hdr::EFunctionType::GAUSSIAN; + /***** * DESCRIBE COMMAND LINE PARAMETERS @@ -59,7 +130,21 @@ int main(int argc, char * argv[]) { po::options_description optionalParams("Optional parameters"); optionalParams.add_options() - ("expandDynamicRange,e", po::value(&clampedValueCorrection)->default_value(clampedValueCorrection), "float value between 0 and 1 to correct clamped high values in dynamic range: use 0 for no correction, 0.5 for interior lighting and 1 for outdoor lighting."); + ("calibrationMethod,m", po::value(&calibrationMethod)->default_value(calibrationMethod), + "Name of method used for camera calibration: linear, robertson (slow), debevec, grossberg.") + ("expandDynamicRange,e", po::value(&clampedValueCorrection)->default_value(clampedValueCorrection), + "float value between 0 and 1 to correct clamped high values in dynamic range: use 0 for no correction, 0.5 for interior lighting and 1 for outdoor lighting.") + ("fisheyeLens,f", po::value(&fisheye)->default_value(fisheye), + "Set to 1 if images are taken with a fisheye lens and to 0 if not. Default value is set to 1.") + ("channelQuantizationPower", po::value(&channelQuantizationPower)->default_value(channelQuantizationPower), + "Quantization level like 8 bits or 10 bits.") + ("calibrationWeight,w", po::value(&calibrationWeightFunction)->default_value(calibrationWeightFunction), + "Weight function used to calibrate camera response (default depends on the calibration method, gaussian, triangle, plateau).") + ("fusionWeight,W", po::value(&fusionWeightFunction)->default_value(fusionWeightFunction), + "Weight function used to fuse all LDR images together (gaussian, triangle, plateau).") + ("calibrationNbPoints", po::value(&calibrationNbPoints)->default_value(calibrationNbPoints), + "Number of points used to calibrate (Use 0 for automatic selection based on the calibration method).") + ; po::options_description logParams("Log parameters"); logParams.add_options() @@ -200,7 +285,7 @@ int main(int argc, char * argv[]) { /*Read original image*/ image::Image originalImage; - image::readImage(v.second->getImagePath(), originalImage, image::EImageColorSpace::SRGB); + image::readImage(v.second->getImagePath(), originalImage, image::EImageColorSpace::LINEAR); /*Create a rotated image*/ image::Image rotated(w, h); @@ -241,8 +326,6 @@ int main(int argc, char * argv[]) { return (path_a.stem().string() < path_b.stem().string()); }); - - /* Make sure all images have same aperture */ @@ -269,7 +352,8 @@ int main(int argc, char * argv[]) { } std::vector> targetViews; - for (auto & group : groupedViews) { + for (auto & group : groupedViews) + { /*Sort all images by exposure time*/ std::sort(group.begin(), group.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { if (a == nullptr || b == nullptr) return true; @@ -289,11 +373,13 @@ int main(int argc, char * argv[]) { /*Build exposure times table*/ std::vector> groupedExposures; - for (int i = 0; i < groupedViews.size(); i++) { + for (int i = 0; i < groupedViews.size(); i++) + { const std::vector> & group = groupedViews[i]; std::vector exposures; - for (int j = 0; j < group.size(); j++) { + for (int j = 0; j < group.size(); j++) + { float etime = group[j]->getMetadataShutter(); exposures.push_back(etime); } @@ -302,42 +388,134 @@ int main(int argc, char * argv[]) { /*Build table of file names*/ std::vector> groupedFilenames; - for (int i = 0; i < groupedViews.size(); i++) { + for (int i = 0; i < groupedViews.size(); i++) + { const std::vector> & group = groupedViews[i]; std::vector filenames; - for (int j = 0; j < group.size(); j++) { - + for (int j = 0; j < group.size(); j++) + { filenames.push_back(group[j]->getImagePath()); } groupedFilenames.push_back(filenames); } - - size_t channelQuantization = std::pow(2, 10); - hdr::rgbCurve response(channelQuantization); + + size_t channelQuantization = std::pow(2, channelQuantizationPower); + // set the correct weight functions corresponding to the string parameter hdr::rgbCurve calibrationWeight(channelQuantization); - hdr::rgbCurve fusionWeight(channelQuantization); + std::transform(calibrationWeightFunction.begin(), calibrationWeightFunction.end(), calibrationWeightFunction.begin(), ::tolower); + if (calibrationWeightFunction == "default") + { + switch (calibrationMethod) + { + case ECalibrationMethod::LINEAR: break; + case ECalibrationMethod::DEBEVEC: calibrationWeight.setTriangular(); break; + case ECalibrationMethod::ROBERTSON: calibrationWeight.setRobertsonWeight(); break; + case ECalibrationMethod::GROSSBERG: break; + } + } + else + { + calibrationWeight.setFunction(hdr::EFunctionType_stringToEnum(calibrationWeightFunction)); + } + + hdr::rgbCurve response(channelQuantization); const float lambda = channelQuantization * 1.f; - const int nbPoints = 10000; - bool fisheye = false; - fusionWeight.setFunction(hdr::EFunctionType::GAUSSIAN); calibrationWeight.setTriangular(); - hdr::DebevecCalibrate calibration; - if (!calibration.process(groupedFilenames, channelQuantization, groupedExposures, nbPoints, fisheye, calibrationWeight, lambda, response)) { - ALICEVISION_LOG_ERROR("Calibration failed."); - return EXIT_FAILURE; + // calculate the response function according to the method given in argument or take the response provided by the user + { + switch (calibrationMethod) + { + case ECalibrationMethod::LINEAR: + { + // set the response function to linear + response.setLinear(); + + { + hdr::rgbCurve r = response; + r.exponential(); // TODO + std::string outputFolder = fs::path(sfmOutputDataFilename).parent_path().string(); + std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); + std::string outputResponsePath = (fs::path(outputFolder) / (std::string("response_log_") + methodName + std::string(".csv"))).string(); + std::string outputResponsePathHtml = (fs::path(outputFolder) / (std::string("response_log_") + methodName + std::string(".html"))).string(); + + r.write(outputResponsePath); + r.writeHtml(outputResponsePathHtml, "Camera Response Curve " + methodName); + ALICEVISION_LOG_INFO("Camera response function written as " << outputResponsePath); + } + + } + break; + case ECalibrationMethod::DEBEVEC: + { + ALICEVISION_LOG_INFO("Debevec calibration"); + const float lambda = channelQuantization * 1.f; + if(calibrationNbPoints == 0) + calibrationNbPoints = 10000; + hdr::DebevecCalibrate calibration; + calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, calibrationWeight, lambda, response); + + { + std::string outputFolder = fs::path(sfmOutputDataFilename).parent_path().string(); + std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); + std::string outputResponsePath = (fs::path(outputFolder) / (std::string("response_log_") + methodName + std::string(".csv"))).string(); + std::string outputResponsePathHtml = (fs::path(outputFolder) / (std::string("response_log_") + methodName + std::string(".html"))).string(); + + response.write(outputResponsePath); + response.writeHtml(outputResponsePathHtml, "Camera Response Curve " + methodName); + ALICEVISION_LOG_INFO("Camera response function written as " << outputResponsePath); + } + + response.exponential(); + response.scale(); + } + break; + case ECalibrationMethod::ROBERTSON: + { + /* + ALICEVISION_LOG_INFO("Robertson calibration"); + hdr::RobertsonCalibrate calibration(10); + if(calibrationNbPoints == 0) + calibrationNbPoints = 1000000; + calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, calibrationWeight, response); + response.scale(); + */ + } + break; + case ECalibrationMethod::GROSSBERG: + { + ALICEVISION_LOG_INFO("Grossberg calibration"); + if (calibrationNbPoints == 0) + calibrationNbPoints = 1000000; + hdr::GrossbergCalibrate calibration(3); + calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, response); + } + break; + } } ALICEVISION_LOG_INFO("Calibration done."); - response.exponential(); - response.scale(); + { + std::string outputFolder = fs::path(sfmOutputDataFilename).parent_path().string(); + std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); + std::string outputResponsePath = (fs::path(outputFolder) / (std::string("response_") + methodName + std::string(".csv"))).string(); + std::string outputResponsePathHtml = (fs::path(outputFolder) / (std::string("response_") + methodName + std::string(".html"))).string(); + + response.write(outputResponsePath); + response.writeHtml(outputResponsePathHtml, "Camera Response Curve " + methodName); + ALICEVISION_LOG_INFO("Camera response function written as " << outputResponsePath); + } + // HDR Fusion + + hdr::rgbCurve fusionWeight(channelQuantization); + fusionWeight.setFunction(fusionWeightFunction); sfmData::SfMData outputSfm; sfmData::Views & vs = outputSfm.getViews(); @@ -347,31 +525,31 @@ int main(int argc, char * argv[]) { { std::vector> images(groupSize); std::shared_ptr targetView = targetViews[g]; - if (targetView == nullptr) { + if (targetView == nullptr) + { ALICEVISION_LOG_ERROR("Null view"); return EXIT_FAILURE; } /* Load all images of the group */ - for (int i = 0; i < groupSize; i++) { + for (int i = 0; i < groupSize; i++) + { ALICEVISION_LOG_INFO("Load " << groupedFilenames[g][i]); - image::readImage(groupedFilenames[g][i], images[i], image::EImageColorSpace::SRGB); + image::readImage(groupedFilenames[g][i], images[i], (calibrationMethod == ECalibrationMethod::LINEAR) ? image::EImageColorSpace::LINEAR : image::EImageColorSpace::SRGB); } - - - /**/ + /* Merge HDR images */ hdr::hdrMerge merge; float targetTime = targetView->getMetadataShutter(); image::Image HDRimage(targetView->getWidth(), targetView->getHeight(), false); merge.process(images, groupedExposures[g], fusionWeight, response, HDRimage, targetTime, false, clampedValueCorrection); - /*Output image file path*/ + /* Output image file path */ std::string hdr_output_path; std::stringstream sstream; sstream << output_path << "/" << "hdr_" << std::setfill('0') << std::setw(4) << g << ".exr"; - /*Write an image with parameters from the target view*/ + /* Write an image with parameters from the target view */ oiio::ParamValueList targetMetadata = image::readImageMetadata(targetView->getImagePath()); image::writeImage(sstream.str(), HDRimage, image::EImageColorSpace::AUTO, targetMetadata); From b4f7dd16d8ca5185f3b26536adabebcedeca2bc3 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Wed, 13 Nov 2019 17:42:25 +0100 Subject: [PATCH 114/174] splitting compositing with warping --- src/software/pipeline/CMakeLists.txt | 15 +- .../pipeline/main_panoramaCompositing.cpp | 1095 +++++++++++++++++ ...Stitching.cpp => main_panoramaWarping.cpp} | 544 +------- 3 files changed, 1159 insertions(+), 495 deletions(-) create mode 100644 src/software/pipeline/main_panoramaCompositing.cpp rename src/software/pipeline/{main_panoramaStitching.cpp => main_panoramaWarping.cpp} (65%) diff --git a/src/software/pipeline/CMakeLists.txt b/src/software/pipeline/CMakeLists.txt index 2c26d2583c..323a5d46a3 100644 --- a/src/software/pipeline/CMakeLists.txt +++ b/src/software/pipeline/CMakeLists.txt @@ -100,8 +100,19 @@ if(ALICEVISION_BUILD_SFM) aliceVision_sfmDataIO ${Boost_LIBRARIES} ) - alicevision_add_software(aliceVision_panoramaStitching - SOURCE main_panoramaStitching.cpp + alicevision_add_software(aliceVision_panoramaWarping + SOURCE main_panoramaWarping.cpp + FOLDER ${FOLDER_SOFTWARE_PIPELINE} + LINKS aliceVision_system + aliceVision_image + aliceVision_feature + aliceVision_sfm + aliceVision_sfmData + aliceVision_sfmDataIO + ${Boost_LIBRARIES} + ) + alicevision_add_software(aliceVision_panoramaCompositing + SOURCE main_panoramaCompositing.cpp FOLDER ${FOLDER_SOFTWARE_PIPELINE} LINKS aliceVision_system aliceVision_image diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp new file mode 100644 index 0000000000..7944faba48 --- /dev/null +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -0,0 +1,1095 @@ +/** + * Input and geometry +*/ +#include +#include + +/** + * Image stuff + */ +#include +#include + +/*Logging stuff*/ +#include + +/*Reading command line options*/ +#include +#include + +/*IO*/ +#include +#include +#include +#include + +// These constants define the current software version. +// They must be updated when the command line is changed. +#define ALICEVISION_SOFTWARE_VERSION_MAJOR 1 +#define ALICEVISION_SOFTWARE_VERSION_MINOR 0 + +using namespace aliceVision; + +namespace po = boost::program_options; +namespace bpt = boost::property_tree; + +float sigmoid(float x, float sigwidth, float sigMid) +{ + return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); +} + +Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { + + Eigen::VectorXd x; + x.setLinSpaced(kernel_length + 1, -sigma, +sigma); + + Eigen::VectorXd cdf(kernel_length + 1); + for (int i = 0; i < kernel_length + 1; i++) { + cdf(i) = 0.5 * (1 + std::erf(x(i)/sqrt(2.0))); + } + + Eigen::VectorXd k1d(kernel_length); + for (int i = 0; i < kernel_length; i++) { + k1d(i) = cdf(i + 1) - cdf(i); + } + + double sum = k1d.sum(); + k1d = k1d / sum; + + return k1d.cast(); +} + +Eigen::MatrixXf gaussian_kernel(size_t kernel_length, float sigma) { + + Eigen::VectorXf k1d = gaussian_kernel_vector(kernel_length, sigma); + Eigen::MatrixXf K = k1d * k1d.transpose(); + + + double sum = K.sum(); + K = K / sum; + + return K; +} + + + +bool convolve(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::MatrixXf & kernel) { + + if (output.size() != input.size()) { + return false; + } + + if (output.size() != mask.size()) { + return false; + } + + if (kernel.size() % 2 == 0) { + return false; + } + + if (kernel.rows() != kernel.cols()) { + return false; + } + + int radius = kernel.rows() / 2; + + Eigen::MatrixXf kernel_scaled = kernel; + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + float sum = 0.0f; + float sum_mask = 0.0f; + + if (!mask(i, j)) { + output(i, j) = 0.0f; + continue; + } + + for (int k = 0; k < kernel.rows(); k++) { + float ni = i + k - radius; + if (ni < 0 || ni >= output.Height()) { + continue; + } + + for (int l = 0; l < kernel.cols(); l++) { + float nj = j + l - radius; + if (nj < 0 || nj >= output.Width()) { + continue; + } + + if (!mask(ni, nj)) { + continue; + } + + float val = kernel(k, l) * input(ni, nj); + sum += val; + sum_mask += kernel(k, l); + } + } + + output(i, j) = sum / sum_mask; + } + } + + return true; +} + +bool convolve(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::MatrixXf & kernel) { + + if (output.size() != input.size()) { + return false; + } + + if (output.size() != mask.size()) { + return false; + } + + if (kernel.size() % 2 == 0) { + return false; + } + + if (kernel.rows() != kernel.cols()) { + return false; + } + + int radius = kernel.rows() / 2; + + Eigen::MatrixXf kernel_scaled = kernel; + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + image::RGBfColor sum(0.0f); + float sum_mask = 0.0f; + + if (!mask(i, j)) { + output(i, j) = sum; + continue; + } + + for (int k = 0; k < kernel.rows(); k++) { + float ni = i + k - radius; + if (ni < 0 || ni >= output.Height()) { + continue; + } + + for (int l = 0; l < kernel.cols(); l++) { + float nj = j + l - radius; + if (nj < 0 || nj >= output.Width()) { + continue; + } + + if (!mask(ni, nj)) { + continue; + } + + sum.r() += kernel(k, l) * input(ni, nj).r(); + sum.g() += kernel(k, l) * input(ni, nj).g(); + sum.b() += kernel(k, l) * input(ni, nj).b(); + sum_mask += kernel(k, l); + } + } + + output(i, j) = sum / sum_mask; + } + } + + return true; +} + +bool computeDistanceMap(image::Image & distance, const image::Image & mask) { + + int m = mask.Height(); + int n = mask.Width(); + + int maxval = m * n; + + distance = image::Image (n, m, false); + for(int x = 0; x < n; ++x) { + + //A corner is when mask becomes 0 + bool b = !mask(0, x); + if (b) { + distance(0, x) = 0; + } + else { + distance(0, x) = maxval * maxval; + } + + for (int y = 1; y < m; y++) { + bool b = !mask(y, x); + if (b) { + distance(y, x) = 0; + } + else { + distance(y, x) = 1 + distance(y - 1, x); + } + } + + for (int y = m - 2; y >= 0; y--) { + if (distance(y + 1, x) < distance(y, x)) { + distance(y, x) = 1 + distance(y + 1, x); + } + } + } + + for (int y = 0; y < m; y++) { + int q; + std::map s; + std::map t; + + q = 0; + s[0] = 0; + t[0] = 0; + + std::function f = [distance, y](int x, int i) { + int gi = distance(y, i); + return (x - i)*(x - i) + gi * gi; + }; + + std::function sep = [distance, y](int i, int u) { + int gu = distance(y, u); + int gi = distance(y, i); + + int nom = (u * u) - (i * i) + (gu * gu) - (gi * gi); + int denom = 2 * (u - i); + + return nom / denom; + }; + + for (int u = 1; u < n; u++) { + + while (q >= 0 && (f(t[q], s[q]) > f(t[q], u))) { + q = q - 1; + } + + if (q < 0) { + q = 0; + s[0] = u; + } + else { + int w = 1 + sep(s[q], u); + if (w < n) { + q = q + 1; + s[q] = u; + t[q] = w; + } + } + } + + for (int u = n - 1; u >= 0; u--) { + distance(y, u) = f(u, s[q]); + if (u == t[q]) { + q = q - 1; + } + } + } + + return true; +} + + +namespace SphericalMapping +{ + /** + * Map from equirectangular to spherical coordinates + * @param equirectangular equirectangular coordinates + * @param width number of pixels used to represent longitude + * @param height number of pixels used to represent latitude + * @return spherical coordinates + */ + Vec3 fromEquirectangular(const Vec2 & equirectangular, int width, int height) + { + const double latitude = (equirectangular(1) / double(height)) * M_PI - M_PI_2; + const double longitude = ((equirectangular(0) / double(width)) * 2.0 * M_PI) - M_PI; + + const double Px = cos(latitude) * sin(longitude); + const double Py = sin(latitude); + const double Pz = cos(latitude) * cos(longitude); + + return Vec3(Px, Py, Pz); + } + + /** + * Map from Spherical to equirectangular coordinates + * @param spherical spherical coordinates + * @param width number of pixels used to represent longitude + * @param height number of pixels used to represent latitude + * @return equirectangular coordinates + */ + Vec2 toEquirectangular(const Vec3 & spherical, int width, int height) { + + double vertical_angle = asin(spherical(1)); + double horizontal_angle = atan2(spherical(0), spherical(2)); + + double latitude = ((vertical_angle + M_PI_2) / M_PI) * height; + double longitude = ((horizontal_angle + M_PI) / (2.0 * M_PI)) * width; + + return Vec2(longitude, latitude); + } + + /** + * Map from Spherical to equirectangular coordinates in radians + * @param spherical spherical coordinates + * @return equirectangular coordinates + */ + Vec2 toLongitudeLatitude(const Vec3 & spherical) { + + double latitude = asin(spherical(1)); + double longitude = atan2(spherical(0), spherical(2)); + + return Vec2(longitude, latitude); + } +} + +class GaussianPyramidNoMask { +public: + GaussianPyramidNoMask(const size_t width_base, const size_t height_base, const size_t limit_scales = 64) : + _width_base(width_base), + _height_base(height_base) + { + /** + * Compute optimal scale + * The smallest level will be at least of size min_size + */ + size_t min_dim = std::min(_width_base, _height_base); + size_t min_size = 32; + _scales = std::min(limit_scales, static_cast(floor(log2(double(min_dim) / float(min_size))))); + + + /** + * Create pyramid + **/ + size_t new_width = _width_base; + size_t new_height = _height_base; + for (int i = 0; i < _scales; i++) { + + _pyramid_color.push_back(image::Image(new_width, new_height, true, image::RGBfColor(0))); + _filter_buffer.push_back(image::Image(new_width, new_height, true, image::RGBfColor(0))); + new_height /= 2; + new_width /= 2; + } + } + + bool process(const image::Image & input) { + + if (input.Height() != _pyramid_color[0].Height()) return false; + if (input.Width() != _pyramid_color[0].Width()) return false; + + + /** + * Kernel + */ + oiio::ImageBuf K; + oiio::ImageBufAlgo::make_kernel(K, "gaussian", 5, 5); + + /** + * Build pyramid + */ + _pyramid_color[0] = input; + for (int lvl = 0; lvl < _scales - 1; lvl++) { + + const image::Image & source = _pyramid_color[lvl]; + image::Image & dst = _filter_buffer[lvl]; + + oiio::ImageSpec spec(source.Width(), source.Height(), 3, oiio::TypeDesc::FLOAT); + + const oiio::ImageBuf inBuf(spec, const_cast(source.data())); + oiio::ImageBuf outBuf(spec, dst.data()); + oiio::ImageBufAlgo::convolve(outBuf, inBuf, K); + + downscale(_pyramid_color[lvl + 1], _filter_buffer[lvl]); + } + + return true; + } + + + bool downscale(image::Image & output, const image::Image & input) { + + for (int i = 0; i < output.Height(); i++) { + int ui = i * 2; + + for (int j = 0; j < output.Width(); j++) { + int uj = j * 2; + + output(i, j) = input(ui, uj); + } + } + + return true; + } + + const size_t getScalesCount() const { + return _scales; + } + + const std::vector> & getPyramidColor() const { + return _pyramid_color; + } + + std::vector> & getPyramidColor() { + return _pyramid_color; + } + +protected: + std::vector> _pyramid_color; + std::vector> _filter_buffer; + size_t _width_base; + size_t _height_base; + size_t _scales; +}; + +class CoordinatesMap { +private: + struct BBox { + int left; + int top; + int width; + int height; + }; + +public: + /** + * Build coordinates map given camera properties + * @param panoramaSize desired output panoramaSize + * @param pose the camera pose wrt an arbitrary reference frame + * @param intrinsics the camera intrinsics + */ + bool build(const std::pair & panoramaSize, const geometry::Pose3 & pose, const aliceVision::camera::IntrinsicBase & intrinsics) { + + BBox coarse_bbox; + if (!computeCoarseBB(coarse_bbox, panoramaSize, pose, intrinsics)) { + return false; + } + + + /* Effectively compute the warping map */ + aliceVision::image::Image buffer_coordinates(coarse_bbox.width, coarse_bbox.height, false); + aliceVision::image::Image buffer_mask(coarse_bbox.width, coarse_bbox.height, true, 0); + + size_t max_x = 0; + size_t max_y = 0; + size_t min_x = panoramaSize.first; + size_t min_y = panoramaSize.second; + +#ifdef _MSC_VER + // TODO + // no support for reduction min in MSVC implementation of openmp +#else + #pragma omp parallel for reduction(min: min_x, min_y) reduction(max: max_x, max_y) +#endif + for (size_t y = 0; y < coarse_bbox.height; y++) { + + size_t cy = y + coarse_bbox.top; + + size_t row_max_x = 0; + size_t row_max_y = 0; + size_t row_min_x = panoramaSize.first; + size_t row_min_y = panoramaSize.second; + + + for (size_t x = 0; x < coarse_bbox.width; x++) { + + size_t cx = x + coarse_bbox.left; + + Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(cx, cy), panoramaSize.first, panoramaSize.second); + + /** + * Check that this ray should be visible. + * This test is camera type dependent + */ + Vec3 transformedRay = pose(ray); + if (!intrinsics.isVisibleRay(transformedRay)) { + continue; + } + + /** + * Project this ray to camera pixel coordinates + */ + const Vec2 pix_disto = intrinsics.project(pose, ray, true); + + /** + * Ignore invalid coordinates + */ + if (!intrinsics.isVisible(pix_disto)) { + continue; + } + + + buffer_coordinates(y, x) = pix_disto; + buffer_mask(y, x) = 1; + + row_min_x = std::min(x, row_min_x); + row_min_y = std::min(y, row_min_y); + row_max_x = std::max(x, row_max_x); + row_max_y = std::max(y, row_max_y); + } + + min_x = std::min(row_min_x, min_x); + min_y = std::min(row_min_y, min_y); + max_x = std::max(row_max_x, max_x); + max_y = std::max(row_max_y, max_y); + } + + _offset_x = coarse_bbox.left + min_x; + if (_offset_x > panoramaSize.first) { + /*The coarse bounding box may cross the borders where as the true coordinates may not*/ + int ox = int(_offset_x) - int(panoramaSize.first); + _offset_x = ox; + } + _offset_y = coarse_bbox.top + min_y; + + size_t real_width = max_x - min_x + 1; + size_t real_height = max_y - min_y + 1; + + /* Resize buffers */ + _coordinates = aliceVision::image::Image(real_width, real_height, false); + _mask = aliceVision::image::Image(real_width, real_height, true, 0); + + _coordinates.block(0, 0, real_height, real_width) = buffer_coordinates.block(min_y, min_x, real_height, real_width); + _mask.block(0, 0, real_height, real_width) = buffer_mask.block(min_y, min_x, real_height, real_width); + + return true; + } + + bool computeScale(double & result) { + + std::vector scales; + size_t real_height = _coordinates.Height(); + size_t real_width = _coordinates.Width(); + + for (int i = 0; i < real_height - 1; i++) { + for (int j = 0; j < real_width - 1; j++) { + if (!_mask(i, j) || !_mask(i, j + 1) || !_mask(i + 1, j)) { + continue; + } + + double dxx = _coordinates(i, j + 1).x() - _coordinates(i, j).x(); + double dxy = _coordinates(i + 1, j).x() - _coordinates(i, j).x(); + double dyx = _coordinates(i, j + 1).y() - _coordinates(i, j).y(); + double dyy = _coordinates(i + 1, j).y() - _coordinates(i, j).y(); + + double det = std::abs(dxx*dyy - dxy*dyx); + scales.push_back(det); + } + } + + if (scales.size() <= 1) return false; + + std::nth_element(scales.begin(), scales.begin() + scales.size() / 2, scales.end()); + result = sqrt(scales[scales.size() / 2]); + + + return true; + } + + size_t getOffsetX() const { + return _offset_x; + } + + size_t getOffsetY() const { + return _offset_y; + } + + const aliceVision::image::Image & getCoordinates() const { + return _coordinates; + } + + const aliceVision::image::Image & getMask() const { + return _mask; + } + +private: + + bool computeCoarseBB(BBox & coarse_bbox, const std::pair & panoramaSize, const geometry::Pose3 & pose, const aliceVision::camera::IntrinsicBase & intrinsics) { + + coarse_bbox.left = 0; + coarse_bbox.top = 0; + coarse_bbox.width = panoramaSize.first; + coarse_bbox.height = panoramaSize.second; + + int bbox_left, bbox_top; + int bbox_right, bbox_bottom; + int bbox_width, bbox_height; + + /*Estimate distorted maximal distance from optical center*/ + Vec2 pts[] = {{0.0f, 0.0f}, {intrinsics.w(), 0.0f}, {intrinsics.w(), intrinsics.h()}, {0.0f, intrinsics.h()}}; + float max_radius = 0.0; + for (int i = 0; i < 4; i++) { + + Vec2 ptmeter = intrinsics.ima2cam(pts[i]); + float radius = ptmeter.norm(); + max_radius = std::max(max_radius, radius); + } + + /* Estimate undistorted maximal distance from optical center */ + float max_radius_distorted = intrinsics.getMaximalDistortion(0.0, max_radius); + + /* + Coarse rectangle bouding box in camera space + We add intermediate points to ensure arclength between 2 points is never more than 180° + */ + Vec2 pts_radius[] = { + {-max_radius_distorted, -max_radius_distorted}, + {0, -max_radius_distorted}, + {max_radius_distorted, -max_radius_distorted}, + {max_radius_distorted, 0}, + {max_radius_distorted, max_radius_distorted}, + {0, max_radius_distorted}, + {-max_radius_distorted, max_radius_distorted}, + {-max_radius_distorted, 0} + }; + + + /* + Transform bounding box into the panorama frame. + Point are on a unit sphere. + */ + Vec3 rotated_pts[8]; + for (int i = 0; i < 8; i++) { + Vec3 pt3d = pts_radius[i].homogeneous().normalized(); + rotated_pts[i] = pose.rotation().transpose() * pt3d; + } + + /* Vertical Default solution : no pole*/ + bbox_top = panoramaSize.second; + bbox_bottom = 0; + + for (int i = 0; i < 8; i++) { + int i2 = i + 1; + if (i2 > 7) i2 = 0; + + Vec3 extremaY = getExtremaY(rotated_pts[i], rotated_pts[i2]); + + Vec2 res; + res = SphericalMapping::toEquirectangular(extremaY, panoramaSize.first, panoramaSize.second); + bbox_top = std::min(int(floor(res(1))), bbox_top); + bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); + + res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + bbox_top = std::min(int(floor(res(1))), bbox_top); + bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); + } + + /* + Check if our region circumscribe a pole of the sphere : + Check that the region projected on the Y=0 plane contains the point (0, 0) + This is a special projection case + */ + bool pole = isPoleInTriangle(rotated_pts[0], rotated_pts[1], rotated_pts[7]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[2], rotated_pts[3]); + pole |= isPoleInTriangle(rotated_pts[3], rotated_pts[4], rotated_pts[5]); + pole |= isPoleInTriangle(rotated_pts[7], rotated_pts[5], rotated_pts[6]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[3], rotated_pts[5]); + pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[5], rotated_pts[7]); + + + if (pole) { + Vec3 normal = (rotated_pts[1] - rotated_pts[0]).cross(rotated_pts[3] - rotated_pts[0]); + if (normal(1) > 0) { + //Lower pole + bbox_bottom = panoramaSize.second - 1; + } + else { + //upper pole + bbox_top = 0; + } + } + + bbox_height = bbox_bottom - bbox_top + 1; + + + /*Check if we cross the horizontal loop*/ + bool crossH; + for (int i = 0; i < 8; i++) { + int i2 = i + 1; + if (i2 > 7) i2 = 0; + + bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); + if (i == 0) crossH = cross; + else crossH |= cross; + } + + + if (pole) { + /*Easy : if we cross the pole, the width is full*/ + bbox_left = 0; + bbox_right = panoramaSize.first - 1; + bbox_width = bbox_right - bbox_left + 1; + } + else if (crossH) { + + for (int i = 0; i < 8; i+= 2) { + int i2 = i + 1; + int i3 = i + 2; + if (i3 >= 8) { + i3 = 0; + } + + + } + bbox_left = 0; + bbox_right = panoramaSize.first - 1; + bbox_width = bbox_right - bbox_left + 1; + } + else { + /*horizontal default solution : no border crossing, no pole*/ + bbox_left = panoramaSize.first; + bbox_right = 0; + for (int i = 0; i < 8; i++) { + Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); + bbox_left = std::min(int(floor(res(0))), bbox_left); + bbox_right = std::max(int(ceil(res(0))), bbox_right); + } + bbox_width = bbox_right - bbox_left + 1; + } + + /*Assign solution to result*/ + coarse_bbox.left = bbox_left; + coarse_bbox.top = bbox_top; + coarse_bbox.width = bbox_width; + coarse_bbox.height = bbox_height; + + return true; + } + + Vec3 getExtremaY(const Vec3 & pt1, const Vec3 & pt2) { + Vec3 delta = pt2 - pt1; + double dx = delta(0); + double dy = delta(1); + double dz = delta(2); + double sx = pt1(0); + double sy = pt1(1); + double sz = pt1(2); + + double ot_y = -(dx*sx*sy - (dy*sx)*(dy*sx) - (dy*sz)*(dy*sz) + dz*sy*sz)/(dx*dx*sy - dx*dy*sx - dy*dz*sz + dz*dz*sy); + + Vec3 pt_extrema = pt1 + ot_y * delta; + + return pt_extrema.normalized(); + } + + bool crossHorizontalLoop(const Vec3 & pt1, const Vec3 & pt2) { + Vec3 direction = pt2 - pt1; + + /*Vertical line*/ + if (std::abs(direction(0)) < 1e-12) { + return false; + } + + double t = - pt1(0) / direction(0); + Vec3 cross = pt1 + direction * t; + + if (t >= 0.0 && t < 1.0) { + if (cross(2) < 0.0) { + return true; + } + } + + return false; + } + + bool isPoleInTriangle(const Vec3 & pt1, const Vec3 & pt2, const Vec3 & pt3) { + + double a = (pt2.x()*pt3.z() - pt3.x()*pt2.z())/(pt1.x()*pt2.z() - pt1.x()*pt3.z() - pt2.x()*pt1.z() + pt2.x()*pt3.z() + pt3.x()*pt1.z() - pt3.x()*pt2.z()); + double b = (-pt1.x()*pt3.z() + pt3.x()*pt1.z())/(pt1.x()*pt2.z() - pt1.x()*pt3.z() - pt2.x()*pt1.z() + pt2.x()*pt3.z() + pt3.x()*pt1.z() - pt3.x()*pt2.z()); + double c = 1.0 - a - b; + + if (a < 0.0 || a > 1.0) return false; + if (b < 0.0 || b > 1.0) return false; + if (c < 0.0 || c > 1.0) return false; + + return true; + } + +private: + size_t _offset_x = 0; + size_t _offset_y = 0; + + aliceVision::image::Image _coordinates; + aliceVision::image::Image _mask; +}; + + +class Warper { +public: + virtual bool warp(const CoordinatesMap & map, const aliceVision::image::Image & source) { + + /** + * Copy additional info from map + */ + _offset_x = map.getOffsetX(); + _offset_y = map.getOffsetY(); + _mask = map.getMask(); + + const image::Sampler2d sampler; + const aliceVision::image::Image & coordinates = map.getCoordinates(); + + /** + * Create buffer + * No longer need to keep a 2**x size + */ + _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); + + /** + * Simple warp + */ + for (size_t i = 0; i < _color.Height(); i++) { + for (size_t j = 0; j < _color.Width(); j++) { + + bool valid = _mask(i, j); + if (!valid) { + continue; + } + + const Eigen::Vector2d & coord = coordinates(i, j); + const image::RGBfColor pixel = sampler(source, coord(1), coord(0)); + + _color(i, j) = pixel; + } + } + + return true; + } + + const aliceVision::image::Image & getColor() const { + return _color; + } + + const aliceVision::image::Image & getMask() const { + return _mask; + } + + + size_t getOffsetX() const { + return _offset_x; + } + + size_t getOffsetY() const { + return _offset_y; + } + +protected: + size_t _offset_x = 0; + size_t _offset_y = 0; + + aliceVision::image::Image _color; + aliceVision::image::Image _mask; +}; + +class GaussianWarper : public Warper { +public: + virtual bool warp(const CoordinatesMap & map, const aliceVision::image::Image & source) { + + /** + * Copy additional info from map + */ + _offset_x = map.getOffsetX(); + _offset_y = map.getOffsetY(); + _mask = map.getMask(); + + const image::Sampler2d sampler; + const aliceVision::image::Image & coordinates = map.getCoordinates(); + + /** + * Create a pyramid for input + */ + GaussianPyramidNoMask pyramid(source.Width(), source.Height()); + pyramid.process(source); + const std::vector> & mlsource = pyramid.getPyramidColor(); + size_t max_level = pyramid.getScalesCount() - 1; + + /** + * Create buffer + */ + _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); + + + /** + * Multi level warp + */ + for (size_t i = 0; i < _color.Height(); i++) { + for (size_t j = 0; j < _color.Width(); j++) { + + bool valid = _mask(i, j); + if (!valid) { + continue; + } + + if (i == _color.Height() - 1 || j == _color.Width() - 1 || !_mask(i + 1, j) || !_mask(i, j + 1)) { + const Eigen::Vector2d & coord = coordinates(i, j); + const image::RGBfColor pixel = sampler(source, coord(1), coord(0)); + _color(i, j) = pixel; + continue; + } + + const Eigen::Vector2d & coord_mm = coordinates(i, j); + const Eigen::Vector2d & coord_mp = coordinates(i, j + 1); + const Eigen::Vector2d & coord_pm = coordinates(i + 1, j); + + double dxx = coord_pm(0) - coord_mm(0); + double dxy = coord_mp(0) - coord_mm(0); + double dyx = coord_pm(1) - coord_mm(1); + double dyy = coord_mp(1) - coord_mm(1); + double det = std::abs(dxx*dyy - dxy*dyx); + double scale = sqrt(det); + + + + + double flevel = std::max(0.0, log2(scale)); + size_t blevel = std::min(max_level, size_t(floor(flevel))); + + double dscale, x, y; + dscale = 1.0 / pow(2.0, blevel); + x = coord_mm(0) * dscale; + y = coord_mm(1) * dscale; + /*Fallback to first level if outside*/ + if (x >= mlsource[blevel].Width() - 1 || y >= mlsource[blevel].Height() - 1) { + _color(i, j) = sampler(mlsource[0], coord_mm(1), coord_mm(0)); + continue; + } + + _color(i, j) = sampler(mlsource[blevel], y, x); + } + } + + return true; + } +}; + +bool computeOptimalPanoramaSize(std::pair & optimalSize, const sfmData::SfMData & sfmData) { + + optimalSize.first = 512; + optimalSize.second = 256; + + /** + * Loop over views to estimate best scale + */ + std::vector scales; + for (auto & viewIt: sfmData.getViews()) { + + /** + * Retrieve view + */ + const sfmData::View& view = *viewIt.second.get(); + if (!sfmData.isPoseAndIntrinsicDefined(&view)) { + continue; + } + + /** + * Get intrinsics and extrinsics + */ + const geometry::Pose3 camPose = sfmData.getPose(view).getTransform(); + const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); + + /** + * Compute map + */ + CoordinatesMap map; + if (!map.build(optimalSize, camPose, intrinsic)) { + continue; + } + + double scale; + if (!map.computeScale(scale)) { + continue; + } + + scales.push_back(scale); + } + + + if (scales.size() > 1) { + double median_scale; + std::nth_element(scales.begin(), scales.begin() + scales.size() / 2, scales.end()); + median_scale = scales[scales.size() / 2]; + + double multiplier = pow(2.0, int(floor(log2(median_scale)))); + + optimalSize.first = optimalSize.first * multiplier; + optimalSize.second = optimalSize.second * multiplier; + } + + return true; +} + +int main(int argc, char **argv) { + + /** + * Program description + */ + po::options_description allParams ( + "Perform panorama stiching of cameras around a nodal point for 360° panorama creation. \n" + "AliceVision PanoramaCompositing" + ); + + /** + * Description of mandatory parameters + */ + std::string inputDirectory; + std::string outputPanorama; + po::options_description requiredParams("Required parameters"); + requiredParams.add_options() + ("input,i", po::value(&inputDirectory)->required(), "SfMData file.") + ("output,o", po::value(&outputPanorama)->required(), "Path of the output panorama."); + allParams.add(requiredParams); + + /** + * Description of optional parameters + */ + po::options_description optionalParams("Optional parameters"); + allParams.add(optionalParams); + + /** + * Setup log level given command line + */ + std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); + po::options_description logParams("Log parameters"); + logParams.add_options() + ("verboseLevel,v", po::value(&verboseLevel)->default_value(verboseLevel), "verbosity level (fatal, error, warning, info, debug, trace)."); + allParams.add(logParams); + + + /** + * Effectively parse command line given parse options + */ + po::variables_map vm; + try + { + po::store(po::parse_command_line(argc, argv, allParams), vm); + + if(vm.count("help") || (argc == 1)) + { + ALICEVISION_COUT(allParams); + return EXIT_SUCCESS; + } + po::notify(vm); + } + catch(boost::program_options::required_option& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + catch(boost::program_options::error& e) + { + ALICEVISION_CERR("ERROR: " << e.what()); + ALICEVISION_COUT("Usage:\n\n" << allParams); + return EXIT_FAILURE; + } + + ALICEVISION_COUT("Program called with the following parameters:"); + ALICEVISION_COUT(vm); + + + /** + * Set verbose level given command line + */ + system::Logger::get()->setLogLevel(verboseLevel); + + + return EXIT_SUCCESS; +} \ No newline at end of file diff --git a/src/software/pipeline/main_panoramaStitching.cpp b/src/software/pipeline/main_panoramaWarping.cpp similarity index 65% rename from src/software/pipeline/main_panoramaStitching.cpp rename to src/software/pipeline/main_panoramaWarping.cpp index eac8f3a7e3..fe1092beff 100644 --- a/src/software/pipeline/main_panoramaStitching.cpp +++ b/src/software/pipeline/main_panoramaWarping.cpp @@ -19,9 +19,9 @@ /*IO*/ #include - #include - +#include +#include // These constants define the current software version. // They must be updated when the command line is changed. @@ -31,6 +31,7 @@ using namespace aliceVision; namespace po = boost::program_options; +namespace bpt = boost::property_tree; float sigmoid(float x, float sigwidth, float sigMid) { @@ -539,19 +540,16 @@ class CoordinatesMap { _offset_x = ox; } _offset_y = coarse_bbox.top + min_y; - _real_width = max_x - min_x + 1; - _real_height = max_y - min_y + 1; - - /* Make sure the buffer is a power of 2 for potential pyramids*/ - size_t rectified_width = pow(2.0, ceil(log2(double(_real_width)))); - size_t rectified_height = pow(2.0, ceil(log2(double(_real_height)))); + + size_t real_width = max_x - min_x + 1; + size_t real_height = max_y - min_y + 1; /* Resize buffers */ - _coordinates = aliceVision::image::Image(rectified_width, rectified_height, false); - _mask = aliceVision::image::Image(rectified_width, rectified_height, true, 0); + _coordinates = aliceVision::image::Image(real_width, real_height, false); + _mask = aliceVision::image::Image(real_width, real_height, true, 0); - _coordinates.block(0, 0, _real_height, _real_width) = buffer_coordinates.block(min_y, min_x, _real_height, _real_width); - _mask.block(0, 0, _real_height, _real_width) = buffer_mask.block(min_y, min_x, _real_height, _real_width); + _coordinates.block(0, 0, real_height, real_width) = buffer_coordinates.block(min_y, min_x, real_height, real_width); + _mask.block(0, 0, real_height, real_width) = buffer_mask.block(min_y, min_x, real_height, real_width); return true; } @@ -559,9 +557,11 @@ class CoordinatesMap { bool computeScale(double & result) { std::vector scales; + size_t real_height = _coordinates.Height(); + size_t real_width = _coordinates.Width(); - for (int i = 0; i < _real_height - 1; i++) { - for (int j = 0; j < _real_width - 1; j++) { + for (int i = 0; i < real_height - 1; i++) { + for (int j = 0; j < real_width - 1; j++) { if (!_mask(i, j) || !_mask(i, j + 1) || !_mask(i + 1, j)) { continue; } @@ -593,14 +593,6 @@ class CoordinatesMap { return _offset_y; } - size_t getWidth() const { - return _real_width; - } - - size_t getHeight() const { - return _real_height; - } - const aliceVision::image::Image & getCoordinates() const { return _coordinates; } @@ -815,60 +807,11 @@ class CoordinatesMap { private: size_t _offset_x = 0; size_t _offset_y = 0; - size_t _real_width = 0; - size_t _real_height = 0; aliceVision::image::Image _coordinates; aliceVision::image::Image _mask; }; -class AlphaBuilder { -public: - virtual bool build(const CoordinatesMap & map, const aliceVision::camera::IntrinsicBase & intrinsics) { - - float w = static_cast(intrinsics.w()); - float h = static_cast(intrinsics.h()); - float cx = w / 2.0f; - float cy = h / 2.0f; - - - const aliceVision::image::Image & coordinates = map.getCoordinates(); - const aliceVision::image::Image & mask = map.getMask(); - - _weights = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); - - for (int i = 0; i < _weights.Height(); i++) { - for (int j = 0; j < _weights.Width(); j++) { - - _weights(i, j) = 0.0f; - - bool valid = mask(i, j); - if (!valid) { - continue; - } - - const Vec2 & coords = coordinates(i, j); - - float x = coords(0); - float y = coords(1); - - float wx = 1.0f - std::abs((x - cx) / cx); - float wy = 1.0f - std::abs((y - cy) / cy); - - _weights(i, j) = wx * wy; - } - } - - return true; - } - - const aliceVision::image::Image & getWeights() const { - return _weights; - } - -private: - aliceVision::image::Image _weights; -}; class Warper { public: @@ -879,8 +822,6 @@ class Warper { */ _offset_x = map.getOffsetX(); _offset_y = map.getOffsetY(); - _real_width = map.getWidth(); - _real_height = map.getHeight(); _mask = map.getMask(); const image::Sampler2d sampler; @@ -888,10 +829,10 @@ class Warper { /** * Create buffer + * No longer need to keep a 2**x size */ _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); - /** * Simple warp */ @@ -930,39 +871,10 @@ class Warper { return _offset_y; } - size_t getWidth() const { - return _real_width; - } - - size_t getHeight() const { - return _real_height; - } - - void dump(std::ofstream & output) { - output.write((char*)&_offset_x, sizeof(_offset_x)); - output.write((char*)&_offset_y, sizeof(_offset_y)); - output.write((char*)&_real_width, sizeof(_real_width)); - output.write((char*)&_real_height, sizeof(_real_height)); - - char * ptr_color = (char*)(_color.data()); - for (int i = 0; i < _real_height; i++) { - output.write(ptr_color, _real_width * sizeof(decltype(_color)::Scalar)); - ptr_color += _color.rowStride(); - } - - char * ptr_mask = (char*)(_mask.data()); - for (int i = 0; i < _real_height; i++) { - output.write(ptr_mask, _real_width * sizeof(decltype(_mask)::Scalar)); - ptr_mask += _mask.rowStride(); - } - } - protected: size_t _offset_x = 0; size_t _offset_y = 0; - size_t _real_width = 0; - size_t _real_height = 0; - + aliceVision::image::Image _color; aliceVision::image::Image _mask; }; @@ -976,8 +888,6 @@ class GaussianWarper : public Warper { */ _offset_x = map.getOffsetX(); _offset_y = map.getOffsetY(); - _real_width = map.getWidth(); - _real_height = map.getHeight(); _mask = map.getMask(); const image::Sampler2d sampler; @@ -1050,362 +960,6 @@ class GaussianWarper : public Warper { } }; -class Compositer { -public: - Compositer(size_t outputWidth, size_t outputHeight) : - _panorama(outputWidth, outputHeight, true, image::RGBfColor(1.0f, 0.0f, 0.0f)), - _mask(outputWidth, outputHeight, true, false) - { - } - - virtual bool append(const Warper & warper) { - - const aliceVision::image::Image & inputMask = warper.getMask(); - const aliceVision::image::Image & color = warper.getColor(); - - for (size_t i = 0; i < warper.getHeight(); i++) { - - size_t pano_i = warper.getOffsetY() + i; - if (pano_i >= _panorama.Height()) { - continue; - } - - for (size_t j = 0; j < warper.getWidth(); j++) { - - if (!inputMask(i, j)) { - continue; - } - - size_t pano_j = warper.getOffsetX() + j; - if (pano_j >= _panorama.Width()) { - continue; - } - - _panorama(pano_i, pano_j) = color(i, j); - _mask(pano_i, pano_j) = 1; - } - } - - return true; - } - - const aliceVision::image::Image & getPanorama() const { - return _panorama; - } - - const aliceVision::image::Image & getPanoramaMask() const { - return _mask; - } - -protected: - aliceVision::image::Image _panorama; - aliceVision::image::Image _mask; -}; - - -class AverageCompositer : public Compositer { -public: - - AverageCompositer(size_t outputWidth, size_t outputHeight) : Compositer(outputWidth, outputHeight) { - - } - - virtual bool append(const Warper & warper) { - - const aliceVision::image::Image & inputMask = warper.getMask(); - const aliceVision::image::Image & color = warper.getColor(); - - for (size_t i = 0; i < warper.getHeight(); i++) { - - size_t pano_i = warper.getOffsetY() + i; - if (pano_i >= _panorama.Height()) { - continue; - } - - for (size_t j = 0; j < warper.getWidth(); j++) { - - if (!inputMask(i, j)) { - continue; - } - - size_t pano_j = warper.getOffsetX() + j; - if (pano_j >= _panorama.Width()) { - continue; - } - - if (!_mask(pano_i, pano_j)) { - _panorama(pano_i, pano_j) = color(i, j); - } - else { - _panorama(pano_i, pano_j).r() = 0.5 * (_panorama(pano_i, pano_j).r() + color(i, j).r()); - _panorama(pano_i, pano_j).g() = 0.5 * (_panorama(pano_i, pano_j).g() + color(i, j).g()); - _panorama(pano_i, pano_j).b() = 0.5 * (_panorama(pano_i, pano_j).b() + color(i, j).b()); - - } - - _mask(pano_i, pano_j) = true; - } - } - - return true; - } -}; - -class AlphaCompositer : public Compositer { -public: - - AlphaCompositer(size_t outputWidth, size_t outputHeight) : - Compositer(outputWidth, outputHeight), - _weightmap(outputWidth, outputHeight, true, 0.0f) { - - } - - virtual bool append(const Warper & warper, const AlphaBuilder & alpha) { - - const aliceVision::image::Image & inputMask = warper.getMask(); - const aliceVision::image::Image & color = warper.getColor(); - const aliceVision::image::Image & inputWeights = alpha.getWeights(); - - for (size_t i = 0; i < warper.getHeight(); i++) { - - size_t pano_i = warper.getOffsetY() + i; - if (pano_i >= _panorama.Height()) { - continue; - } - - for (size_t j = 0; j < warper.getWidth(); j++) { - - if (!inputMask(i, j)) { - continue; - } - - size_t pano_j = warper.getOffsetX() + j; - if (pano_j >= _panorama.Width()) { - pano_j = pano_j - _panorama.Width(); - } - - if (!_mask(pano_i, pano_j)) { - _panorama(pano_i, pano_j) = color(i, j); - _weightmap(pano_i, pano_j) = inputWeights(i, j); - } - else { - float wp = _weightmap(pano_i, pano_j); - float wc = inputWeights(i, j); - float wtotal = wp + wc; - wp /= wtotal; - wc /= wtotal; - - _panorama(pano_i, pano_j).r() = wp * _panorama(pano_i, pano_j).r() + wc * color(i, j).r(); - _panorama(pano_i, pano_j).g() = wp * _panorama(pano_i, pano_j).g() + wc * color(i, j).g(); - _panorama(pano_i, pano_j).b() = wp * _panorama(pano_i, pano_j).b() + wc * color(i, j).b(); - - _weightmap(pano_i, pano_j) = wtotal; - } - - _mask(pano_i, pano_j) = true; - } - } - - return true; - } - -protected: - aliceVision::image::Image _weightmap; -}; - -class LaplacianCompositer : public AlphaCompositer { -public: - - LaplacianCompositer(size_t outputWidth, size_t outputHeight) : - AlphaCompositer(outputWidth, outputHeight), - _maxweightmap(outputWidth, outputHeight, true, 0.0f) { - - - } - - virtual bool append(const Warper & warper, const AlphaBuilder & alpha) { - - const aliceVision::image::Image & camera_weights = alpha.getWeights(); - const aliceVision::image::Image & camera_mask = warper.getMask(); - const aliceVision::image::Image & camera_color = warper.getColor(); - - size_t ox = warper.getOffsetX(); - size_t oy = warper.getOffsetY(); - size_t pwidth = _panorama.Width(); - size_t pheight = _panorama.Height(); - - /** - * Create a copy of panorama related pixels - */ - aliceVision::image::Image panorama_subcolor(camera_weights.Width(), camera_weights.Height(), true, image::RGBfColor(0.0f, 0.0f, 0.0f)); - aliceVision::image::Image panorama_submask(camera_weights.Width(), camera_weights.Height(), true, false); - - if (ox + warper.getWidth() > pwidth) { - - int left_width = ox + warper.getWidth() - pwidth; - int right_width = warper.getWidth() - left_width; - panorama_subcolor.block(0, 0, warper.getHeight(), right_width) = _panorama.block(oy, ox, warper.getHeight(), right_width); - panorama_subcolor.block(0, right_width, warper.getHeight(), left_width) = _panorama.block(oy, 0, warper.getHeight(), left_width); - panorama_submask.block(0, 0, warper.getHeight(), right_width) = _mask.block(oy, ox, warper.getHeight(), right_width); - panorama_submask.block(0, right_width, warper.getHeight(), left_width) = _mask.block(oy, 0, warper.getHeight(), left_width); - } - else { - panorama_subcolor.block(0, 0, warper.getHeight(), warper.getWidth()) = _panorama.block(oy, ox, warper.getHeight(), warper.getWidth()); - panorama_submask.block(0, 0, warper.getHeight(), warper.getWidth()) = _mask.block(oy, ox, warper.getHeight(), warper.getWidth()); - } - - /** - * Compute optimal scale - * The smallest level will be at least of size min_size - */ - size_t min_dim = std::min(camera_weights.Width(), camera_weights.Height()); - size_t min_size = 4; - size_t limit_scales = 10; - size_t scales = std::min(limit_scales, static_cast(floor(log2(double(min_dim) / float(min_size))))); - - /** - * Create buffers - */ - std::vector> camera_weightmaps(scales); - std::vector> camera_colors(scales); - std::vector> camera_differences(scales); - std::vector> panorama_colors(scales); - std::vector> panorama_differences(scales); - std::vector> blended_differences(scales); - std::vector> blended_mask(scales); - - for (int level = 0; level < scales; level++) { - camera_weightmaps[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); - camera_colors[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); - camera_differences[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); - panorama_colors[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); - panorama_differences[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); - blended_differences[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); - blended_mask[level] = aliceVision::image::Image(camera_weights.Width(), camera_weights.Height()); - } - - /* Build max weight map for first level */ - auto & maxweightmap = camera_weightmaps[0]; - - for (int i = 0; i < camera_weights.Height(); i++) { - for (int j = 0; j < camera_weights.Width(); j++) { - - maxweightmap(i, j) = 0.0f; - - if (!camera_mask(i, j)) { - continue; - } - - float cw = camera_weights(i, j); - - int pano_y = oy + i; - int pano_x = ox + j; - if (pano_x >= pwidth) { - pano_x = pano_x - pwidth; - } - - float pw = _weightmap(pano_y, pano_x); - - if (cw > pw) { - maxweightmap(i, j) = 1.0f; - _weightmap(pano_y, pano_x) = cw; - } - } - } - - Eigen::MatrixXf kernel = gaussian_kernel(5, 2.0f); - - /*Create scales*/ - camera_colors[0] = camera_color; - panorama_colors[0] = panorama_subcolor; - for (int level = 1; level < scales; level++) { - convolve(camera_weightmaps[level], camera_weightmaps[level - 1], camera_mask, kernel); - convolve(camera_colors[level], camera_colors[level - 1], camera_mask, kernel); - convolve(panorama_colors[level], panorama_colors[level - 1], panorama_submask, kernel); - } - - - /*Compute differences*/ - for (int level = 0; level < scales - 1; level++) { - camera_differences[level] = camera_colors[level] - camera_colors[level + 1]; - panorama_differences[level] = panorama_colors[level] - panorama_colors[level + 1]; - } - camera_differences[scales - 1] = camera_colors[scales - 1]; - panorama_differences[scales - 1] = panorama_colors[scales - 1]; - - /*Blend*/ - for (int level = 0; level < scales; level++) { - const aliceVision::image::Image & imgpano = panorama_differences[level]; - const aliceVision::image::Image & imgcam = camera_differences[level]; - const aliceVision::image::Image & weights = camera_weightmaps[level]; - aliceVision::image::Image & imgblend = blended_differences[level]; - aliceVision::image::Image & maskblend = blended_mask[level]; - - for (int i = 0; i < camera_weights.Height(); i++) { - for (int j = 0; j < camera_weights.Width(); j++) { - if (camera_mask(i, j)) { - if (panorama_submask(i, j)) { - float w = weights(i, j); - float mw = 1.0f - w; - imgblend(i, j).r() = mw * imgpano(i, j).r() + w * imgcam(i, j).r(); - imgblend(i, j).g() = mw * imgpano(i, j).g() + w * imgcam(i, j).g(); - imgblend(i, j).b() = mw * imgpano(i, j).b() + w * imgcam(i, j).b(); - maskblend(i, j) = 1; - } - else { - imgblend(i, j) = imgcam(i, j); - maskblend(i, j) = 1; - } - } - else { - if (panorama_submask(i, j)) { - imgblend(i, j) = imgpano(i, j); - maskblend(i, j) = 1; - } - else { - maskblend(i, j) = 0; - } - } - } - } - } - - /*Rebuild*/ - for (int level = scales - 1; level >= 0; level--) { - - aliceVision::image::Image & imgblend = blended_differences[level]; - aliceVision::image::Image & maskblend = blended_mask[level]; - - for (int i = 0; i < warper.getHeight(); i++) { - for (int j = 0; j < warper.getWidth(); j++) { - - int pano_y = oy + i; - int pano_x = ox + j; - if (pano_x >= pwidth) { - pano_x = pano_x - pwidth; - } - - if (maskblend(i, j)) { - if (level == scales - 1) { - _panorama(pano_y, pano_x) = image::RGBfColor(0.0f); - _mask(pano_y, pano_x) = 0; - } - - _panorama(pano_y, pano_x) += imgblend(i, j); - _mask(pano_y, pano_x) = 1; - } - } - } - } - - - return true; - } - -private: - aliceVision::image::Image _maxweightmap; -}; - bool computeOptimalPanoramaSize(std::pair & optimalSize, const sfmData::SfMData & sfmData) { optimalSize.first = 512; @@ -1468,19 +1022,19 @@ int main(int argc, char **argv) { * Program description */ po::options_description allParams ( - "Perform panorama stiching of cameras around a nodal point for 360° panorama creation.\n" - "AliceVision PanoramaStitching" + "Perform panorama stiching of cameras around a nodal point for 360° panorama creation. \n" + "AliceVision PanoramaWarping" ); /** * Description of mandatory parameters */ std::string sfmDataFilename; - std::string outputPanorama; + std::string outputDirectory; po::options_description requiredParams("Required parameters"); requiredParams.add_options() ("input,i", po::value(&sfmDataFilename)->required(), "SfMData file.") - ("output,o", po::value(&outputPanorama)->required(), "Path of the output folder."); + ("output,o", po::value(&outputDirectory)->required(), "Path of the output folder."); allParams.add(requiredParams); /** @@ -1577,10 +1131,7 @@ int main(int argc, char **argv) { ALICEVISION_LOG_INFO("Choosen panorama size : " << panoramaSize.first << "x" << panoramaSize.second); - /** - * Create compositer - */ - AlphaCompositer compositer(size_t(panoramaSize.first), size_t(panoramaSize.second)); + bpt::ptree viewsTree; /** * Preprocessing per view @@ -1596,7 +1147,6 @@ int main(int argc, char **argv) { continue; } - /*if (pos >= 20 && pos < 305)*/ { ALICEVISION_LOG_INFO("Processing view " << view.getViewId()); /** @@ -1627,36 +1177,44 @@ int main(int argc, char **argv) { GaussianWarper warper; warper.warp(map, source); + /** - * Alpha mask + * Store result image */ - AlphaBuilder alphabuilder; - alphabuilder.build(map, intrinsic); - - /** - *Composite image into output - */ - ALICEVISION_LOG_INFO("Composite to final panorama\n"); - compositer.append(warper, alphabuilder); + std::stringstream ss; + const aliceVision::image::Image & cam = warper.getColor(); + ss << outputDirectory << "/view_" << pos << ".exr"; + std::string filename_view = ss.str(); - ALICEVISION_LOG_INFO("Save final panoram\n"); + ALICEVISION_LOG_INFO("Store view " << pos << " with path " << filename_view); + image::writeImage(filename_view, cam, image::EImageColorSpace::SRGB); - /*char filename[512]; - const aliceVision::image::Image & panorama = compositer.getPanorama(); - sprintf(filename, "%s_intermediate%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); - const aliceVision::image::Image & cam = warper.getColor(); - sprintf(filename, "%s_view%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, cam, image::EImageColorSpace::SRGB);*/ - } + /** + * Store view info + */ + bpt::ptree viewTree; + viewTree.put("filename", filename_view); + viewTree.put("offsetx", warper.getOffsetX()); + viewTree.put("offsety", warper.getOffsetY()); + viewsTree.push_back(std::make_pair("", viewTree)); + pos++; } - - ALICEVISION_LOG_INFO("Save final panoram\n"); - const aliceVision::image::Image & panorama = compositer.getPanorama(); - image::writeImage(outputPanorama, panorama, image::EImageColorSpace::SRGB); + + /** + * Config output + */ + bpt::ptree configTree; + configTree.put("panoramaWidth", panoramaSize.first); + configTree.put("panoramaHeight", panoramaSize.second); + configTree.add_child("views", viewsTree); + + std::stringstream ss; + ss << outputDirectory << "/config_views.json"; + ALICEVISION_LOG_INFO("Save config with path " << ss.str()); + bpt::write_json(ss, configTree); return EXIT_SUCCESS; } \ No newline at end of file From 8788e53436f71f8e4f9e12faeffbae90ca16855e Mon Sep 17 00:00:00 2001 From: fabienservant Date: Thu, 14 Nov 2019 16:16:45 +0100 Subject: [PATCH 115/174] updates to pipeline on compositing --- .../pipeline/main_panoramaCompositing.cpp | 1043 ++++------------- .../pipeline/main_panoramaWarping.cpp | 111 +- 2 files changed, 306 insertions(+), 848 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 7944faba48..02c1456ad3 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -33,10 +33,13 @@ using namespace aliceVision; namespace po = boost::program_options; namespace bpt = boost::property_tree; -float sigmoid(float x, float sigwidth, float sigMid) -{ - return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); -} +typedef struct { + size_t offset_x; + size_t offset_y; + std::string img_path; + std::string mask_path; + std::string weights_path; +} ConfigView; Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { @@ -56,24 +59,12 @@ Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { double sum = k1d.sum(); k1d = k1d / sum; - return k1d.cast(); -} - -Eigen::MatrixXf gaussian_kernel(size_t kernel_length, float sigma) { - - Eigen::VectorXf k1d = gaussian_kernel_vector(kernel_length, sigma); - Eigen::MatrixXf K = k1d * k1d.transpose(); - + std::cout << k1d.transpose() << std::endl; - double sum = K.sum(); - K = K / sum; - - return K; + return k1d.cast(); } - - -bool convolve(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::MatrixXf & kernel) { +bool convolveHorizontal(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::VectorXf & kernel) { if (output.size() != input.size()) { return false; @@ -87,46 +78,38 @@ bool convolve(image::Image & output, const image::Image & input, c return false; } - if (kernel.rows() != kernel.cols()) { - return false; - } - - int radius = kernel.rows() / 2; + int radius = kernel.size() / 2; - Eigen::MatrixXf kernel_scaled = kernel; for (int i = 0; i < output.Height(); i++) { for (int j = 0; j < output.Width(); j++) { - float sum = 0.0f; + image::RGBfColor sum = image::RGBfColor(0.0); float sum_mask = 0.0f; if (!mask(i, j)) { - output(i, j) = 0.0f; + output(i, j) = image::RGBfColor(0.0); continue; } - for (int k = 0; k < kernel.rows(); k++) { - float ni = i + k - radius; - if (ni < 0 || ni >= output.Height()) { - continue; - } + - for (int l = 0; l < kernel.cols(); l++) { - float nj = j + l - radius; - if (nj < 0 || nj >= output.Width()) { - continue; - } + for (int k = 0; k < kernel.size(); k++) { - if (!mask(ni, nj)) { - continue; - } + double w = kernel(k); + int col = j + k - radius; - float val = kernel(k, l) * input(ni, nj); - sum += val; - sum_mask += kernel(k, l); + if (col < 0 || col >= input.Width()) { + continue; } - } + + if (!mask(i, col)) { + continue; + } + + sum += w * input(i, col); + sum_mask += w; + } output(i, j) = sum / sum_mask; } @@ -135,7 +118,7 @@ bool convolve(image::Image & output, const image::Image & input, c return true; } -bool convolve(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::MatrixXf & kernel) { +bool convolveVertical(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::VectorXf & kernel) { if (output.size() != input.size()) { return false; @@ -149,140 +132,38 @@ bool convolve(image::Image & output, const image::Image= output.Height()) { - continue; - } - - for (int l = 0; l < kernel.cols(); l++) { - float nj = j + l - radius; - if (nj < 0 || nj >= output.Width()) { - continue; - } + + for (int k = 0; k < kernel.size(); k++) { - if (!mask(ni, nj)) { - continue; - } + double w = kernel(k); + int row = i + k - radius; - sum.r() += kernel(k, l) * input(ni, nj).r(); - sum.g() += kernel(k, l) * input(ni, nj).g(); - sum.b() += kernel(k, l) * input(ni, nj).b(); - sum_mask += kernel(k, l); + if (row < 0 || row >= input.Height()) { + continue; } - } - - output(i, j) = sum / sum_mask; - } - } - - return true; -} - -bool computeDistanceMap(image::Image & distance, const image::Image & mask) { - - int m = mask.Height(); - int n = mask.Width(); - - int maxval = m * n; - - distance = image::Image (n, m, false); - for(int x = 0; x < n; ++x) { - - //A corner is when mask becomes 0 - bool b = !mask(0, x); - if (b) { - distance(0, x) = 0; - } - else { - distance(0, x) = maxval * maxval; - } - - for (int y = 1; y < m; y++) { - bool b = !mask(y, x); - if (b) { - distance(y, x) = 0; - } - else { - distance(y, x) = 1 + distance(y - 1, x); - } - } - - for (int y = m - 2; y >= 0; y--) { - if (distance(y + 1, x) < distance(y, x)) { - distance(y, x) = 1 + distance(y + 1, x); - } - } - } - - for (int y = 0; y < m; y++) { - int q; - std::map s; - std::map t; - q = 0; - s[0] = 0; - t[0] = 0; - - std::function f = [distance, y](int x, int i) { - int gi = distance(y, i); - return (x - i)*(x - i) + gi * gi; - }; - - std::function sep = [distance, y](int i, int u) { - int gu = distance(y, u); - int gi = distance(y, i); - - int nom = (u * u) - (i * i) + (gu * gu) - (gi * gi); - int denom = 2 * (u - i); - - return nom / denom; - }; - - for (int u = 1; u < n; u++) { - - while (q >= 0 && (f(t[q], s[q]) > f(t[q], u))) { - q = q - 1; - } - - if (q < 0) { - q = 0; - s[0] = u; - } - else { - int w = 1 + sep(s[q], u); - if (w < n) { - q = q + 1; - s[q] = u; - t[q] = w; + if (!mask(row, j)) { + continue; } - } - } - for (int u = n - 1; u >= 0; u--) { - distance(y, u) = f(u, s[q]); - if (u == t[q]) { - q = q - 1; + sum += w * input(row, j); + sum_mask += w; } + + output(i, j) = sum / sum_mask; } } @@ -290,731 +171,169 @@ bool computeDistanceMap(image::Image & distance, const image::Image(floor(log2(double(min_dim) / float(min_size))))); - - - /** - * Create pyramid - **/ - size_t new_width = _width_base; - size_t new_height = _height_base; - for (int i = 0; i < _scales; i++) { - - _pyramid_color.push_back(image::Image(new_width, new_height, true, image::RGBfColor(0))); - _filter_buffer.push_back(image::Image(new_width, new_height, true, image::RGBfColor(0))); - new_height /= 2; - new_width /= 2; - } } - bool process(const image::Image & input) { - - if (input.Height() != _pyramid_color[0].Height()) return false; - if (input.Width() != _pyramid_color[0].Width()) return false; - - - /** - * Kernel - */ - oiio::ImageBuf K; - oiio::ImageBufAlgo::make_kernel(K, "gaussian", 5, 5); - - /** - * Build pyramid - */ - _pyramid_color[0] = input; - for (int lvl = 0; lvl < _scales - 1; lvl++) { - - const image::Image & source = _pyramid_color[lvl]; - image::Image & dst = _filter_buffer[lvl]; - - oiio::ImageSpec spec(source.Width(), source.Height(), 3, oiio::TypeDesc::FLOAT); - - const oiio::ImageBuf inBuf(spec, const_cast(source.data())); - oiio::ImageBuf outBuf(spec, dst.data()); - oiio::ImageBufAlgo::convolve(outBuf, inBuf, K); - - downscale(_pyramid_color[lvl + 1], _filter_buffer[lvl]); - } - - return true; - } + virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, size_t offset_x, size_t offset_y) { + for (size_t i = 0; i < color.Height(); i++) { - bool downscale(image::Image & output, const image::Image & input) { - - for (int i = 0; i < output.Height(); i++) { - int ui = i * 2; + size_t pano_i = offset_y + i; + if (pano_i >= _panorama.Height()) { + continue; + } - for (int j = 0; j < output.Width(); j++) { - int uj = j * 2; + for (size_t j = 0; j < color.Width(); j++) { + + if (!inputMask(i, j)) { + continue; + } + + size_t pano_j = offset_x + j; + if (pano_j >= _panorama.Width()) { + pano_j = pano_j - _panorama.Width(); + } - output(i, j) = input(ui, uj); + _panorama(pano_i, pano_j) = color(i, j); + _mask(pano_i, pano_j) = 1; } } return true; } - const size_t getScalesCount() const { - return _scales; + const aliceVision::image::Image & getPanorama() const { + return _panorama; } - const std::vector> & getPyramidColor() const { - return _pyramid_color; - } - - std::vector> & getPyramidColor() { - return _pyramid_color; + const aliceVision::image::Image & getPanoramaMask() const { + return _mask; } protected: - std::vector> _pyramid_color; - std::vector> _filter_buffer; - size_t _width_base; - size_t _height_base; - size_t _scales; + aliceVision::image::Image _panorama; + aliceVision::image::Image _mask; }; -class CoordinatesMap { -private: - struct BBox { - int left; - int top; - int width; - int height; - }; - +class AlphaCompositer : public Compositer { public: - /** - * Build coordinates map given camera properties - * @param panoramaSize desired output panoramaSize - * @param pose the camera pose wrt an arbitrary reference frame - * @param intrinsics the camera intrinsics - */ - bool build(const std::pair & panoramaSize, const geometry::Pose3 & pose, const aliceVision::camera::IntrinsicBase & intrinsics) { - BBox coarse_bbox; - if (!computeCoarseBB(coarse_bbox, panoramaSize, pose, intrinsics)) { - return false; - } + AlphaCompositer(size_t outputWidth, size_t outputHeight) : + Compositer(outputWidth, outputHeight), + _weightmap(outputWidth, outputHeight, true, 0.0f) { + } - /* Effectively compute the warping map */ - aliceVision::image::Image buffer_coordinates(coarse_bbox.width, coarse_bbox.height, false); - aliceVision::image::Image buffer_mask(coarse_bbox.width, coarse_bbox.height, true, 0); - - size_t max_x = 0; - size_t max_y = 0; - size_t min_x = panoramaSize.first; - size_t min_y = panoramaSize.second; - -#ifdef _MSC_VER - // TODO - // no support for reduction min in MSVC implementation of openmp -#else - #pragma omp parallel for reduction(min: min_x, min_y) reduction(max: max_x, max_y) -#endif - for (size_t y = 0; y < coarse_bbox.height; y++) { - - size_t cy = y + coarse_bbox.top; - - size_t row_max_x = 0; - size_t row_max_y = 0; - size_t row_min_x = panoramaSize.first; - size_t row_min_y = panoramaSize.second; - - - for (size_t x = 0; x < coarse_bbox.width; x++) { + virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - size_t cx = x + coarse_bbox.left; + for (size_t i = 0; i < color.Height(); i++) { - Vec3 ray = SphericalMapping::fromEquirectangular(Vec2(cx, cy), panoramaSize.first, panoramaSize.second); + size_t pano_i = offset_y + i; + if (pano_i >= _panorama.Height()) { + continue; + } - /** - * Check that this ray should be visible. - * This test is camera type dependent - */ - Vec3 transformedRay = pose(ray); - if (!intrinsics.isVisibleRay(transformedRay)) { + for (size_t j = 0; j < color.Width(); j++) { + + if (!inputMask(i, j)) { continue; } - - /** - * Project this ray to camera pixel coordinates - */ - const Vec2 pix_disto = intrinsics.project(pose, ray, true); - - /** - * Ignore invalid coordinates - */ - if (!intrinsics.isVisible(pix_disto)) { - continue; + + size_t pano_j = offset_x + j; + if (pano_j >= _panorama.Width()) { + pano_j = pano_j - _panorama.Width(); } - - buffer_coordinates(y, x) = pix_disto; - buffer_mask(y, x) = 1; - - row_min_x = std::min(x, row_min_x); - row_min_y = std::min(y, row_min_y); - row_max_x = std::max(x, row_max_x); - row_max_y = std::max(y, row_max_y); - } - - min_x = std::min(row_min_x, min_x); - min_y = std::min(row_min_y, min_y); - max_x = std::max(row_max_x, max_x); - max_y = std::max(row_max_y, max_y); - } - - _offset_x = coarse_bbox.left + min_x; - if (_offset_x > panoramaSize.first) { - /*The coarse bounding box may cross the borders where as the true coordinates may not*/ - int ox = int(_offset_x) - int(panoramaSize.first); - _offset_x = ox; - } - _offset_y = coarse_bbox.top + min_y; - - size_t real_width = max_x - min_x + 1; - size_t real_height = max_y - min_y + 1; - - /* Resize buffers */ - _coordinates = aliceVision::image::Image(real_width, real_height, false); - _mask = aliceVision::image::Image(real_width, real_height, true, 0); - - _coordinates.block(0, 0, real_height, real_width) = buffer_coordinates.block(min_y, min_x, real_height, real_width); - _mask.block(0, 0, real_height, real_width) = buffer_mask.block(min_y, min_x, real_height, real_width); - - return true; - } - - bool computeScale(double & result) { - - std::vector scales; - size_t real_height = _coordinates.Height(); - size_t real_width = _coordinates.Width(); - - for (int i = 0; i < real_height - 1; i++) { - for (int j = 0; j < real_width - 1; j++) { - if (!_mask(i, j) || !_mask(i, j + 1) || !_mask(i + 1, j)) { - continue; + if (!_mask(pano_i, pano_j)) { + _panorama(pano_i, pano_j) = color(i, j); + _weightmap(pano_i, pano_j) = inputWeights(i, j); } - - double dxx = _coordinates(i, j + 1).x() - _coordinates(i, j).x(); - double dxy = _coordinates(i + 1, j).x() - _coordinates(i, j).x(); - double dyx = _coordinates(i, j + 1).y() - _coordinates(i, j).y(); - double dyy = _coordinates(i + 1, j).y() - _coordinates(i, j).y(); - - double det = std::abs(dxx*dyy - dxy*dyx); - scales.push_back(det); - } - } - - if (scales.size() <= 1) return false; - - std::nth_element(scales.begin(), scales.begin() + scales.size() / 2, scales.end()); - result = sqrt(scales[scales.size() / 2]); - - - return true; - } - - size_t getOffsetX() const { - return _offset_x; - } - - size_t getOffsetY() const { - return _offset_y; - } - - const aliceVision::image::Image & getCoordinates() const { - return _coordinates; - } - - const aliceVision::image::Image & getMask() const { - return _mask; - } - -private: - - bool computeCoarseBB(BBox & coarse_bbox, const std::pair & panoramaSize, const geometry::Pose3 & pose, const aliceVision::camera::IntrinsicBase & intrinsics) { - - coarse_bbox.left = 0; - coarse_bbox.top = 0; - coarse_bbox.width = panoramaSize.first; - coarse_bbox.height = panoramaSize.second; - - int bbox_left, bbox_top; - int bbox_right, bbox_bottom; - int bbox_width, bbox_height; - - /*Estimate distorted maximal distance from optical center*/ - Vec2 pts[] = {{0.0f, 0.0f}, {intrinsics.w(), 0.0f}, {intrinsics.w(), intrinsics.h()}, {0.0f, intrinsics.h()}}; - float max_radius = 0.0; - for (int i = 0; i < 4; i++) { - - Vec2 ptmeter = intrinsics.ima2cam(pts[i]); - float radius = ptmeter.norm(); - max_radius = std::max(max_radius, radius); - } - - /* Estimate undistorted maximal distance from optical center */ - float max_radius_distorted = intrinsics.getMaximalDistortion(0.0, max_radius); - - /* - Coarse rectangle bouding box in camera space - We add intermediate points to ensure arclength between 2 points is never more than 180° - */ - Vec2 pts_radius[] = { - {-max_radius_distorted, -max_radius_distorted}, - {0, -max_radius_distorted}, - {max_radius_distorted, -max_radius_distorted}, - {max_radius_distorted, 0}, - {max_radius_distorted, max_radius_distorted}, - {0, max_radius_distorted}, - {-max_radius_distorted, max_radius_distorted}, - {-max_radius_distorted, 0} - }; - - - /* - Transform bounding box into the panorama frame. - Point are on a unit sphere. - */ - Vec3 rotated_pts[8]; - for (int i = 0; i < 8; i++) { - Vec3 pt3d = pts_radius[i].homogeneous().normalized(); - rotated_pts[i] = pose.rotation().transpose() * pt3d; - } - - /* Vertical Default solution : no pole*/ - bbox_top = panoramaSize.second; - bbox_bottom = 0; - - for (int i = 0; i < 8; i++) { - int i2 = i + 1; - if (i2 > 7) i2 = 0; - - Vec3 extremaY = getExtremaY(rotated_pts[i], rotated_pts[i2]); - - Vec2 res; - res = SphericalMapping::toEquirectangular(extremaY, panoramaSize.first, panoramaSize.second); - bbox_top = std::min(int(floor(res(1))), bbox_top); - bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); - - res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); - bbox_top = std::min(int(floor(res(1))), bbox_top); - bbox_bottom = std::max(int(ceil(res(1))), bbox_bottom); - } - - /* - Check if our region circumscribe a pole of the sphere : - Check that the region projected on the Y=0 plane contains the point (0, 0) - This is a special projection case - */ - bool pole = isPoleInTriangle(rotated_pts[0], rotated_pts[1], rotated_pts[7]); - pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[2], rotated_pts[3]); - pole |= isPoleInTriangle(rotated_pts[3], rotated_pts[4], rotated_pts[5]); - pole |= isPoleInTriangle(rotated_pts[7], rotated_pts[5], rotated_pts[6]); - pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[3], rotated_pts[5]); - pole |= isPoleInTriangle(rotated_pts[1], rotated_pts[5], rotated_pts[7]); - - - if (pole) { - Vec3 normal = (rotated_pts[1] - rotated_pts[0]).cross(rotated_pts[3] - rotated_pts[0]); - if (normal(1) > 0) { - //Lower pole - bbox_bottom = panoramaSize.second - 1; - } - else { - //upper pole - bbox_top = 0; - } - } - - bbox_height = bbox_bottom - bbox_top + 1; - - - /*Check if we cross the horizontal loop*/ - bool crossH; - for (int i = 0; i < 8; i++) { - int i2 = i + 1; - if (i2 > 7) i2 = 0; - - bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); - if (i == 0) crossH = cross; - else crossH |= cross; - } - - - if (pole) { - /*Easy : if we cross the pole, the width is full*/ - bbox_left = 0; - bbox_right = panoramaSize.first - 1; - bbox_width = bbox_right - bbox_left + 1; - } - else if (crossH) { - - for (int i = 0; i < 8; i+= 2) { - int i2 = i + 1; - int i3 = i + 2; - if (i3 >= 8) { - i3 = 0; + else { + float wp = _weightmap(pano_i, pano_j); + float wc = inputWeights(i, j); + float wtotal = wp + wc; + wp /= wtotal; + wc /= wtotal; + + _panorama(pano_i, pano_j).r() = wp * _panorama(pano_i, pano_j).r() + wc * color(i, j).r(); + _panorama(pano_i, pano_j).g() = wp * _panorama(pano_i, pano_j).g() + wc * color(i, j).g(); + _panorama(pano_i, pano_j).b() = wp * _panorama(pano_i, pano_j).b() + wc * color(i, j).b(); + + _weightmap(pano_i, pano_j) = wtotal; } - + _mask(pano_i, pano_j) = true; } - bbox_left = 0; - bbox_right = panoramaSize.first - 1; - bbox_width = bbox_right - bbox_left + 1; - } - else { - /*horizontal default solution : no border crossing, no pole*/ - bbox_left = panoramaSize.first; - bbox_right = 0; - for (int i = 0; i < 8; i++) { - Vec2 res = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); - bbox_left = std::min(int(floor(res(0))), bbox_left); - bbox_right = std::max(int(ceil(res(0))), bbox_right); - } - bbox_width = bbox_right - bbox_left + 1; - } - - /*Assign solution to result*/ - coarse_bbox.left = bbox_left; - coarse_bbox.top = bbox_top; - coarse_bbox.width = bbox_width; - coarse_bbox.height = bbox_height; - - return true; - } - - Vec3 getExtremaY(const Vec3 & pt1, const Vec3 & pt2) { - Vec3 delta = pt2 - pt1; - double dx = delta(0); - double dy = delta(1); - double dz = delta(2); - double sx = pt1(0); - double sy = pt1(1); - double sz = pt1(2); - - double ot_y = -(dx*sx*sy - (dy*sx)*(dy*sx) - (dy*sz)*(dy*sz) + dz*sy*sz)/(dx*dx*sy - dx*dy*sx - dy*dz*sz + dz*dz*sy); - - Vec3 pt_extrema = pt1 + ot_y * delta; - - return pt_extrema.normalized(); - } - - bool crossHorizontalLoop(const Vec3 & pt1, const Vec3 & pt2) { - Vec3 direction = pt2 - pt1; - - /*Vertical line*/ - if (std::abs(direction(0)) < 1e-12) { - return false; - } - - double t = - pt1(0) / direction(0); - Vec3 cross = pt1 + direction * t; - - if (t >= 0.0 && t < 1.0) { - if (cross(2) < 0.0) { - return true; - } } - return false; - } - - bool isPoleInTriangle(const Vec3 & pt1, const Vec3 & pt2, const Vec3 & pt3) { - - double a = (pt2.x()*pt3.z() - pt3.x()*pt2.z())/(pt1.x()*pt2.z() - pt1.x()*pt3.z() - pt2.x()*pt1.z() + pt2.x()*pt3.z() + pt3.x()*pt1.z() - pt3.x()*pt2.z()); - double b = (-pt1.x()*pt3.z() + pt3.x()*pt1.z())/(pt1.x()*pt2.z() - pt1.x()*pt3.z() - pt2.x()*pt1.z() + pt2.x()*pt3.z() + pt3.x()*pt1.z() - pt3.x()*pt2.z()); - double c = 1.0 - a - b; - - if (a < 0.0 || a > 1.0) return false; - if (b < 0.0 || b > 1.0) return false; - if (c < 0.0 || c > 1.0) return false; - return true; } -private: - size_t _offset_x = 0; - size_t _offset_y = 0; - - aliceVision::image::Image _coordinates; - aliceVision::image::Image _mask; +protected: + aliceVision::image::Image _weightmap; }; - -class Warper { +class LaplacianCompositer : public AlphaCompositer { public: - virtual bool warp(const CoordinatesMap & map, const aliceVision::image::Image & source) { - - /** - * Copy additional info from map - */ - _offset_x = map.getOffsetX(); - _offset_y = map.getOffsetY(); - _mask = map.getMask(); - const image::Sampler2d sampler; - const aliceVision::image::Image & coordinates = map.getCoordinates(); - - /** - * Create buffer - * No longer need to keep a 2**x size - */ - _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); + LaplacianCompositer(size_t outputWidth, size_t outputHeight) : + AlphaCompositer(outputWidth, outputHeight) { - /** - * Simple warp - */ - for (size_t i = 0; i < _color.Height(); i++) { - for (size_t j = 0; j < _color.Width(); j++) { - - bool valid = _mask(i, j); - if (!valid) { - continue; - } - - const Eigen::Vector2d & coord = coordinates(i, j); - const image::RGBfColor pixel = sampler(source, coord(1), coord(0)); - - _color(i, j) = pixel; - } - } - - return true; } - const aliceVision::image::Image & getColor() const { - return _color; - } + virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - const aliceVision::image::Image & getMask() const { - return _mask; - } - - - size_t getOffsetX() const { - return _offset_x; - } - - size_t getOffsetY() const { - return _offset_y; - } + for (size_t i = 0; i < color.Height(); i++) { -protected: - size_t _offset_x = 0; - size_t _offset_y = 0; - - aliceVision::image::Image _color; - aliceVision::image::Image _mask; -}; - -class GaussianWarper : public Warper { -public: - virtual bool warp(const CoordinatesMap & map, const aliceVision::image::Image & source) { - - /** - * Copy additional info from map - */ - _offset_x = map.getOffsetX(); - _offset_y = map.getOffsetY(); - _mask = map.getMask(); - - const image::Sampler2d sampler; - const aliceVision::image::Image & coordinates = map.getCoordinates(); - - /** - * Create a pyramid for input - */ - GaussianPyramidNoMask pyramid(source.Width(), source.Height()); - pyramid.process(source); - const std::vector> & mlsource = pyramid.getPyramidColor(); - size_t max_level = pyramid.getScalesCount() - 1; - - /** - * Create buffer - */ - _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); - - - /** - * Multi level warp - */ - for (size_t i = 0; i < _color.Height(); i++) { - for (size_t j = 0; j < _color.Width(); j++) { - - bool valid = _mask(i, j); - if (!valid) { - continue; - } + size_t pano_i = offset_y + i; + if (pano_i >= _panorama.Height()) { + continue; + } - if (i == _color.Height() - 1 || j == _color.Width() - 1 || !_mask(i + 1, j) || !_mask(i, j + 1)) { - const Eigen::Vector2d & coord = coordinates(i, j); - const image::RGBfColor pixel = sampler(source, coord(1), coord(0)); - _color(i, j) = pixel; + for (size_t j = 0; j < color.Width(); j++) { + + if (!inputMask(i, j)) { continue; } - - const Eigen::Vector2d & coord_mm = coordinates(i, j); - const Eigen::Vector2d & coord_mp = coordinates(i, j + 1); - const Eigen::Vector2d & coord_pm = coordinates(i + 1, j); - - double dxx = coord_pm(0) - coord_mm(0); - double dxy = coord_mp(0) - coord_mm(0); - double dyx = coord_pm(1) - coord_mm(1); - double dyy = coord_mp(1) - coord_mm(1); - double det = std::abs(dxx*dyy - dxy*dyx); - double scale = sqrt(det); - - + size_t pano_j = offset_x + j; + if (pano_j >= _panorama.Width()) { + pano_j = pano_j - _panorama.Width(); + } - double flevel = std::max(0.0, log2(scale)); - size_t blevel = std::min(max_level, size_t(floor(flevel))); - - double dscale, x, y; - dscale = 1.0 / pow(2.0, blevel); - x = coord_mm(0) * dscale; - y = coord_mm(1) * dscale; - /*Fallback to first level if outside*/ - if (x >= mlsource[blevel].Width() - 1 || y >= mlsource[blevel].Height() - 1) { - _color(i, j) = sampler(mlsource[0], coord_mm(1), coord_mm(0)); - continue; + if (!_mask(pano_i, pano_j)) { + _panorama(pano_i, pano_j) = color(i, j); + _weightmap(pano_i, pano_j) = inputWeights(i, j); + } + else { + float wp = _weightmap(pano_i, pano_j); + float wc = inputWeights(i, j); + float wtotal = wp + wc; + wp /= wtotal; + wc /= wtotal; + + _panorama(pano_i, pano_j).r() = wp * _panorama(pano_i, pano_j).r() + wc * color(i, j).r(); + _panorama(pano_i, pano_j).g() = wp * _panorama(pano_i, pano_j).g() + wc * color(i, j).g(); + _panorama(pano_i, pano_j).b() = wp * _panorama(pano_i, pano_j).b() + wc * color(i, j).b(); + + _weightmap(pano_i, pano_j) = wtotal; } - _color(i, j) = sampler(mlsource[blevel], y, x); + _mask(pano_i, pano_j) = true; } } return true; } -}; - -bool computeOptimalPanoramaSize(std::pair & optimalSize, const sfmData::SfMData & sfmData) { - - optimalSize.first = 512; - optimalSize.second = 256; - /** - * Loop over views to estimate best scale - */ - std::vector scales; - for (auto & viewIt: sfmData.getViews()) { - - /** - * Retrieve view - */ - const sfmData::View& view = *viewIt.second.get(); - if (!sfmData.isPoseAndIntrinsicDefined(&view)) { - continue; - } - - /** - * Get intrinsics and extrinsics - */ - const geometry::Pose3 camPose = sfmData.getPose(view).getTransform(); - const camera::IntrinsicBase & intrinsic = *sfmData.getIntrinsicPtr(view.getIntrinsicId()); - - /** - * Compute map - */ - CoordinatesMap map; - if (!map.build(optimalSize, camPose, intrinsic)) { - continue; - } - - double scale; - if (!map.computeScale(scale)) { - continue; - } - - scales.push_back(scale); - } - - - if (scales.size() > 1) { - double median_scale; - std::nth_element(scales.begin(), scales.begin() + scales.size() / 2, scales.end()); - median_scale = scales[scales.size() / 2]; - - double multiplier = pow(2.0, int(floor(log2(median_scale)))); - - optimalSize.first = optimalSize.first * multiplier; - optimalSize.second = optimalSize.second * multiplier; - } - - return true; -} +protected: + aliceVision::image::Image _weightmap; +}; int main(int argc, char **argv) { @@ -1033,7 +352,7 @@ int main(int argc, char **argv) { std::string outputPanorama; po::options_description requiredParams("Required parameters"); requiredParams.add_options() - ("input,i", po::value(&inputDirectory)->required(), "SfMData file.") + ("input,i", po::value(&inputDirectory)->required(), "Warping directory.") ("output,o", po::value(&outputPanorama)->required(), "Path of the output panorama."); allParams.add(requiredParams); @@ -1090,6 +409,80 @@ int main(int argc, char **argv) { */ system::Logger::get()->setLogLevel(verboseLevel); + + /*Load output from previous warping step*/ + std::stringstream ss; + ss << inputDirectory << "/config_views.json"; + std::ifstream file_config(ss.str()); + if (file_config.is_open() == false) { + ALICEVISION_LOG_INFO("Config file for warping can't be found at " << ss.str()); + return EXIT_FAILURE; + } + + bpt::ptree configTree; + bpt::read_json(ss.str(), configTree); + + + std::pair panoramaSize; + panoramaSize.first = configTree.get("panoramaWidth"); + panoramaSize.second = configTree.get("panoramaHeight"); + + ALICEVISION_LOG_INFO("Output panorama size set to " << panoramaSize.first << "x" << panoramaSize.second); + + std::vector configViews; + size_t pos = 0; + for (auto & item : configTree.get_child("views")) { + ConfigView cv; + + if (pos > 22 && pos < 28) { + cv.img_path = item.second.get("filename_view"); + cv.mask_path = item.second.get("filename_mask"); + cv.weights_path = item.second.get("filename_weights"); + cv.offset_x = item.second.get("offsetx"); + cv.offset_y = item.second.get("offsety"); + configViews.push_back(cv); + } + pos++; + } + + + LaplacianCompositer compositer(panoramaSize.first, panoramaSize.second); + for (const ConfigView & cv : configViews) { + + /** + * Load image and convert it to linear colorspace + */ + std::string imagePath = cv.img_path; + ALICEVISION_LOG_INFO("Load image with path " << imagePath); + image::Image source; + image::readImage(imagePath, source, image::EImageColorSpace::NO_CONVERSION); + + /** + * Load mask + */ + std::string maskPath = cv.mask_path; + ALICEVISION_LOG_INFO("Load mask with path " << maskPath); + image::Image mask; + image::readImage(maskPath, mask, image::EImageColorSpace::NO_CONVERSION); + + /** + * Load Weights + */ + std::string weightsPath = cv.weights_path; + ALICEVISION_LOG_INFO("Load weights with path " << weightsPath); + image::Image weights; + image::readImage(weightsPath, weights, image::EImageColorSpace::NO_CONVERSION); + + + compositer.append(source, mask, weights, cv.offset_x, cv.offset_y); + } + + ALICEVISION_LOG_INFO("Write output panorama to file " << outputPanorama); + const aliceVision::image::Image & panorama = compositer.getPanorama(); + image::writeImage(outputPanorama, panorama, image::EImageColorSpace::SRGB); + + + return EXIT_SUCCESS; } \ No newline at end of file diff --git a/src/software/pipeline/main_panoramaWarping.cpp b/src/software/pipeline/main_panoramaWarping.cpp index d5ae2ef798..5044ac8712 100644 --- a/src/software/pipeline/main_panoramaWarping.cpp +++ b/src/software/pipeline/main_panoramaWarping.cpp @@ -852,6 +852,53 @@ class CoordinatesMap { aliceVision::image::Image _mask; }; +class AlphaBuilder { +public: + virtual bool build(const CoordinatesMap & map, const aliceVision::camera::IntrinsicBase & intrinsics) { + + float w = static_cast(intrinsics.w()); + float h = static_cast(intrinsics.h()); + float cx = w / 2.0f; + float cy = h / 2.0f; + + + const aliceVision::image::Image & coordinates = map.getCoordinates(); + const aliceVision::image::Image & mask = map.getMask(); + + _weights = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); + + for (int i = 0; i < _weights.Height(); i++) { + for (int j = 0; j < _weights.Width(); j++) { + + _weights(i, j) = 0.0f; + + bool valid = mask(i, j); + if (!valid) { + continue; + } + + const Vec2 & coords = coordinates(i, j); + + float x = coords(0); + float y = coords(1); + + float wx = 1.0f - std::abs((x - cx) / cx); + float wy = 1.0f - std::abs((y - cy) / cy); + + _weights(i, j) = wx * wy; + } + } + + return true; + } + + const aliceVision::image::Image & getWeights() const { + return _weights; + } + +private: + aliceVision::image::Image _weights; +}; class Warper { public: @@ -1209,48 +1256,66 @@ int main(int argc, char **argv) { image::Image source; image::readImage(imagePath, source, image::EImageColorSpace::LINEAR); - - /** * Warp image */ GaussianWarper warper; warper.warp(map, source); + /** + * Alpha mask + */ + AlphaBuilder alphabuilder; + alphabuilder.build(map, intrinsic); + /** - * Store result image + * Combine mask and image */ - std::stringstream ss; const aliceVision::image::Image & cam = warper.getColor(); - ss << outputDirectory << "/view_" << pos << ".exr"; - std::string filename_view = ss.str(); + const aliceVision::image::Image & mask = warper.getMask(); + const aliceVision::image::Image & weights = alphabuilder.getWeights(); -<<<<<<< HEAD:src/software/pipeline/main_panoramaStitching.cpp - char filename[512]; - const aliceVision::image::Image & panorama = compositer.getPanorama(); - sprintf(filename, "%s_intermediate%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, panorama, image::EImageColorSpace::SRGB); + /** + * Store result image + */ + bpt::ptree viewTree; + std::string path; - const aliceVision::image::Image & cam = warper.getColor(); - sprintf(filename, "%s_view%d.exr", outputPanorama.c_str(), pos); - image::writeImage(filename, cam, image::EImageColorSpace::SRGB); + { + std::stringstream ss; + ss << outputDirectory << "/view_" << pos << ".exr"; + path = ss.str(); + viewTree.put("filename_view", path); + ALICEVISION_LOG_INFO("Store view " << pos << " with path " << path); + image::writeImage(path, cam, image::EImageColorSpace::NO_CONVERSION); } -======= - ALICEVISION_LOG_INFO("Store view " << pos << " with path " << filename_view); - image::writeImage(filename_view, cam, image::EImageColorSpace::SRGB); + { + std::stringstream ss; + ss << outputDirectory << "/mask_" << pos << ".png"; + path = ss.str(); + viewTree.put("filename_mask", path); + ALICEVISION_LOG_INFO("Store mask " << pos << " with path " << path); + image::writeImage(path, mask, image::EImageColorSpace::NO_CONVERSION); + } + { + std::stringstream ss; + ss << outputDirectory << "/weightmap_" << pos << ".exr"; + path = ss.str(); + viewTree.put("filename_weights", path); + ALICEVISION_LOG_INFO("Store weightmap " << pos << " with path " << path); + image::writeImage(path, weights, image::EImageColorSpace::NO_CONVERSION); + } + /** - * Store view info - */ - bpt::ptree viewTree; - viewTree.put("filename", filename_view); + * Store view info + */ viewTree.put("offsetx", warper.getOffsetX()); viewTree.put("offsety", warper.getOffsetY()); viewsTree.push_back(std::make_pair("", viewTree)); ->>>>>>> b4f7dd16d8ca5185f3b26536adabebcedeca2bc3:src/software/pipeline/main_panoramaWarping.cpp pos++; } @@ -1266,7 +1331,7 @@ int main(int argc, char **argv) { std::stringstream ss; ss << outputDirectory << "/config_views.json"; ALICEVISION_LOG_INFO("Save config with path " << ss.str()); - bpt::write_json(ss, configTree); + bpt::write_json(ss.str(), configTree, std::locale(), true); return EXIT_SUCCESS; } \ No newline at end of file From f3d5f8d183d5096b626070d1fbeab834c8431227 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 15 Nov 2019 15:59:10 +0100 Subject: [PATCH 116/174] laplacian pyramid --- .../pipeline/main_panoramaCompositing.cpp | 192 ++++++++++++++---- 1 file changed, 151 insertions(+), 41 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 02c1456ad3..dfbdd554df 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -50,6 +50,7 @@ Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { for (int i = 0; i < kernel_length + 1; i++) { cdf(i) = 0.5 * (1 + std::erf(x(i)/sqrt(2.0))); } + Eigen::VectorXd k1d(kernel_length); for (int i = 0; i < kernel_length; i++) { @@ -59,8 +60,6 @@ Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { double sum = k1d.sum(); k1d = k1d / sum; - std::cout << k1d.transpose() << std::endl; - return k1d.cast(); } @@ -170,6 +169,154 @@ bool convolveVertical(image::Image & output, const image::Imag return true; } +bool downscale(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { + + size_t width = inputColor.Width(); + size_t height = inputColor.Height(); + + if (inputMask.Width() != width || inputMask.Height() != height) { + return false; + } + + size_t output_width = width / 2; + size_t output_height = height / 2; + + outputColor = image::Image(output_width, output_height); + outputMask = image::Image(output_width, output_height); + + for (int i = 0; i < output_height; i++) { + for (int j = 0; j < output_width; j++) { + outputMask(i, j) = inputMask(i * 2, j * 2); + outputColor(i, j) = inputColor(i * 2, j * 2); + } + } + + return true; +} + +bool upscale(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { + + size_t width = inputColor.Width(); + size_t height = inputColor.Height(); + + if (inputMask.Width() != width || inputMask.Height() != height) { + return false; + } + + size_t output_width = width * 2; + size_t output_height = height * 2; + + outputColor = image::Image(output_width, output_height); + outputMask = image::Image(output_width, output_height); + + for (int i = 0; i < height; i++) { + + int di = i * 2; + + for (int j = 0; j < width; j++) { + int dj = j * 2; + + if (!inputMask(i, j)) { + outputMask(di, dj) = 0; + } + + outputColor(di, dj) = inputColor(i, j); + outputColor(di, dj + 1) = inputColor(i, j); + outputColor(di + 1, dj) = inputColor(i, j); + outputColor(di + 1, dj + 1) = inputColor(i, j); + outputMask(di, dj) = 1; + } + } + + return true; +} + +size_t countValid(const aliceVision::image::Image & inputMask) { + + size_t count = 0; + + for (int i = 0; i < inputMask.Height(); i++) { + for (int j = 0; j < inputMask.Width(); j++) { + if (inputMask(i, j)) { + count++; + } + } + } + + return count; +} + +class GaussianPyramid { +public: + bool process(const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { + + _pyramid_color.clear(); + _pyramid_mask.clear(); + + if (inputColor.size() != inputMask.size()) { + return false; + } + + /*Make a 2**n size image, easier for multiple consecutive half size*/ + size_t width = inputColor.Width(); + size_t height = inputColor.Height(); + size_t widthPot = pow(2.0, std::ceil(std::log2(width))); + size_t heightPot = pow(2.0, std::ceil(std::log2(height))); + image::Image inputColorPot(widthPot, heightPot, true, image::RGBfColor(0.0f, 0.0f, 0.0f)); + image::Image inputMaskPot(widthPot, heightPot, true, 0); + inputColorPot.block(0, 0, height, width) = inputColor; + inputMaskPot.block(0, 0, height, width) = inputMask; + + _pyramid_color.push_back(inputColorPot); + _pyramid_mask.push_back(inputMaskPot); + + + /*Create the gaussian kernel*/ + Eigen::VectorXf kernel = gaussian_kernel_vector(7, 2.0); + + /*Compute initial count of valid pixels*/ + size_t countValidPixels = countValid(inputMask); + + while (true) { + + if (widthPot < 128 || heightPot < 128) break; + + /* Make sure enough pixels are valid*/ + if (countValidPixels < 128*128) break; + + size_t level = _pyramid_color.size() - 1; + + image::Image buffer(widthPot, heightPot); + image::Image smoothed(widthPot, heightPot); + + convolveHorizontal(buffer, _pyramid_color[level], _pyramid_mask[level], kernel); + convolveVertical(smoothed, buffer, _pyramid_mask[level], kernel); + + widthPot = widthPot / 2; + heightPot = heightPot / 2; + image::Image reducedColor; + image::Image reducedMask; + downscale(reducedColor, reducedMask, smoothed, _pyramid_mask[level]); + + countValidPixels = countValid(reducedMask); + _pyramid_color.push_back(reducedColor); + _pyramid_mask.push_back(reducedMask); + } + + + /*const aliceVision::image::Image & panorama = _pyramid_color[_pyramid_color.size() - 1]; + image::writeImage("/home/mmoc/test.exr", panorama, image::EImageColorSpace::SRGB);*/ + + std::cout << "levels : " <<_pyramid_color.size() << std::endl; + + return true; + } + +private: + std::vector> _pyramid_color; + std::vector> _pyramid_mask; +}; + class Compositer { public: @@ -288,45 +435,8 @@ class LaplacianCompositer : public AlphaCompositer { virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - for (size_t i = 0; i < color.Height(); i++) { - - size_t pano_i = offset_y + i; - if (pano_i >= _panorama.Height()) { - continue; - } - - for (size_t j = 0; j < color.Width(); j++) { - - if (!inputMask(i, j)) { - continue; - } - - size_t pano_j = offset_x + j; - if (pano_j >= _panorama.Width()) { - pano_j = pano_j - _panorama.Width(); - } - - if (!_mask(pano_i, pano_j)) { - _panorama(pano_i, pano_j) = color(i, j); - _weightmap(pano_i, pano_j) = inputWeights(i, j); - } - else { - float wp = _weightmap(pano_i, pano_j); - float wc = inputWeights(i, j); - float wtotal = wp + wc; - wp /= wtotal; - wc /= wtotal; - - _panorama(pano_i, pano_j).r() = wp * _panorama(pano_i, pano_j).r() + wc * color(i, j).r(); - _panorama(pano_i, pano_j).g() = wp * _panorama(pano_i, pano_j).g() + wc * color(i, j).g(); - _panorama(pano_i, pano_j).b() = wp * _panorama(pano_i, pano_j).b() + wc * color(i, j).b(); - - _weightmap(pano_i, pano_j) = wtotal; - } - - _mask(pano_i, pano_j) = true; - } - } + GaussianPyramid pyr; + pyr.process(color, inputMask); return true; } From 0f3f161eeb753efa9aabee3cc5c61be551c84933 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Mon, 18 Nov 2019 16:23:49 +0100 Subject: [PATCH 117/174] currently working on laplacian pyramid --- .../pipeline/main_panoramaCompositing.cpp | 250 +++++++++++++++++- 1 file changed, 241 insertions(+), 9 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index dfbdd554df..7039256149 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -218,13 +218,125 @@ bool upscale(aliceVision::image::Image & outputColor, aliceVis if (!inputMask(i, j)) { outputMask(di, dj) = 0; + continue; } outputColor(di, dj) = inputColor(i, j); outputColor(di, dj + 1) = inputColor(i, j); outputColor(di + 1, dj) = inputColor(i, j); outputColor(di + 1, dj + 1) = inputColor(i, j); - outputMask(di, dj) = 1; + outputMask(di, dj) = 255; + outputMask(di, dj + 1) = 255; + outputMask(di + 1, dj) = 255; + outputMask(di + 1, dj + 1) = 255; + } + } + + return true; +} + +bool difference(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & aColor, const aliceVision::image::Image & aMask, const aliceVision::image::Image & bColor, const aliceVision::image::Image & bMask) { + + size_t width = outputColor.Width(); + size_t height = outputColor.Height(); + + if (outputMask.Width() != width || outputMask.Height() != height) { + return false; + } + + if (aColor.Width() != width || aColor.Height() != height) { + return false; + } + + if (aMask.Width() != width || aMask.Height() != height) { + return false; + } + + if (bColor.Width() != width || bColor.Height() != height) { + return false; + } + + if (bMask.Width() != width || bMask.Height() != height) { + return false; + } + + for (int i = 0; i < height; i++) { + for (int j = 0; j < width; j++) { + + if (aMask(i, j)) { + if (bMask(i, j)) { + outputColor(i, j) = aColor(i, j) - bColor(i, j); + outputMask(i, j) = 1; + } + else { + outputColor(i, j) = aColor(i, j); + outputMask(i, j) = 1; + } + } + else { + if (bMask(i, j)) { + outputMask(i, j) = 0; + } + else { + outputMask(i, j) = 0; + } + } + } + } + + return true; +} + +bool addition(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & aColor, const aliceVision::image::Image & aMask, const aliceVision::image::Image & bColor, const aliceVision::image::Image & bMask) { + + size_t width = outputColor.Width(); + size_t height = outputColor.Height(); + + + if (outputMask.Width() != width || outputMask.Height() != height) { + return false; + } + + if (aColor.Width() != width || aColor.Height() != height) { + return false; + } + + if (aMask.Width() != width || aMask.Height() != height) { + return false; + } + + if (bColor.Width() != width || bColor.Height() != height) { + return false; + } + + if (bMask.Width() != width || bMask.Height() != height) { + return false; + } + + for (int i = 0; i < height; i++) { + for (int j = 0; j < width; j++) { + + if (aMask(i, j)) { + if (bMask(i, j)) { + outputColor(i, j).r() = aColor(i, j).r() + bColor(i, j).r(); + outputColor(i, j).g() = aColor(i, j).g() + bColor(i, j).g(); + outputColor(i, j).b() = aColor(i, j).b() + bColor(i, j).b(); + outputMask(i, j) = 1; + } + else { + outputColor(i, j) = aColor(i, j); + outputMask(i, j) = 1; + } + } + else { + if (bMask(i, j)) { + outputColor(i, j) = bColor(i, j); + outputMask(i, j) = 1; + } + else { + outputMask(i, j) = 0; + } + } } } @@ -303,20 +415,126 @@ class GaussianPyramid { _pyramid_mask.push_back(reducedMask); } + return true; + } - /*const aliceVision::image::Image & panorama = _pyramid_color[_pyramid_color.size() - 1]; - image::writeImage("/home/mmoc/test.exr", panorama, image::EImageColorSpace::SRGB);*/ + size_t getPyramidSize() { + return _pyramid_color.size(); + } - std::cout << "levels : " <<_pyramid_color.size() << std::endl; + const std::vector> & getColors() { + return _pyramid_color; + } - return true; + const std::vector> & getMasks() { + return _pyramid_mask; } -private: +protected: std::vector> _pyramid_color; std::vector> _pyramid_mask; }; +class LaplacianPyramid { +public: + bool process(const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { + + GaussianPyramid gp; + if (!gp.process(inputColor, inputMask)) { + return false; + } + + const std::vector> & gp_colors = gp.getColors(); + const std::vector> & gp_masks = gp.getMasks(); + + image::writeImage("/home/mmoc/test2.exr", gp_colors[0], image::EImageColorSpace::SRGB); + + /*Create the gaussian kernel*/ + Eigen::VectorXf kernel = gaussian_kernel_vector(7, 2.0); + + for (int level = 0; level < gp_colors.size() - 1; level++) { + + const image::Image prev_color = gp_colors[level + 1]; + const image::Image prev_mask = gp_masks[level + 1]; + const image::Image color = gp_colors[level]; + const image::Image mask = gp_masks[level]; + + image::Image prev_color_upscaled(color.Width(), color.Height()); + image::Image prev_mask_upscaled(mask.Width(), mask.Height()); + + image::Image diff_color(color.Width(), color.Height()); + image::Image buffer(color.Width(), color.Height()); + image::Image smoothed(color.Width(), color.Height()); + image::Image diff_mask(mask.Width(), mask.Height()); + + upscale(prev_color_upscaled, prev_mask_upscaled, prev_color, prev_mask); + + convolveHorizontal(buffer, prev_color_upscaled, prev_mask_upscaled, kernel); + convolveVertical(smoothed, buffer, prev_mask_upscaled, kernel); + + if (!difference(diff_color, diff_mask, color, mask, smoothed, prev_mask_upscaled)) { + return false; + } + + _pyramid_color.push_back(diff_color); + _pyramid_mask.push_back(diff_mask); + } + + _pyramid_color.push_back(gp_colors[gp_colors.size() - 1]); + _pyramid_mask.push_back(gp_masks[gp_masks.size() - 1]); + + + return true; + } + + bool stack() { + + image::Image prev_color = _pyramid_color[_pyramid_color.size() - 1]; + image::Image prev_mask = _pyramid_mask[_pyramid_mask.size() - 1]; + + /*Create the gaussian kernel*/ + Eigen::VectorXf kernel = gaussian_kernel_vector(7, 2.0); + + for (int level = _pyramid_color.size() - 2; level >= 0; level--) { + + const image::Image diff_color = _pyramid_color[level]; + const image::Image diff_mask = _pyramid_mask[level]; + + image::Image prev_color_upscaled(diff_color.Width(), diff_color.Height()); + image::Image buffer(diff_color.Width(), diff_color.Height()); + image::Image smoothed(diff_color.Width(), diff_color.Height()); + image::Image prev_mask_upscaled(diff_mask.Width(), diff_mask.Height()); + + upscale(prev_color_upscaled, prev_mask_upscaled, prev_color, prev_mask); + convolveHorizontal(buffer, prev_color_upscaled, prev_mask_upscaled, kernel); + convolveVertical(smoothed, buffer, prev_mask_upscaled, kernel); + + prev_color = image::Image(diff_color.Width(), diff_color.Height()); + prev_mask = image::Image(diff_mask.Width(), diff_mask.Height()); + + std::cout << prev_color.Width() << " "; + std::cout << prev_mask.Width() << " "; + std::cout << smoothed.Width() << " "; + std::cout << prev_mask_upscaled.Width() << " "; + std::cout << diff_color.Width() << " "; + std::cout << diff_mask.Width() << std::endl; + + if (!addition(prev_color, prev_mask, smoothed, prev_mask_upscaled, diff_color, diff_mask)) { + std::cout << "ok" << std::endl; + return false; + } + } + + image::writeImage("/home/mmoc/test.exr", prev_color, image::EImageColorSpace::SRGB); + + + return true; + } + +protected: + std::vector> _pyramid_color; + std::vector> _pyramid_mask; +}; class Compositer { public: @@ -435,13 +653,27 @@ class LaplacianCompositer : public AlphaCompositer { virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - GaussianPyramid pyr; - pyr.process(color, inputMask); + /* if (first) { + prev_color = color; + prev_inputMask = inputMask; + first = false; + return true; + }*/ + + LaplacianPyramid pyr1; + pyr1.process(color, inputMask); + pyr1.stack(); + + /*LaplacianPyramid pyr2; + pyr2.process(color, inputMask);*/ return true; } protected: + bool first = true; + aliceVision::image::Image prev_color; + aliceVision::image::Image prev_inputMask; aliceVision::image::Image _weightmap; }; @@ -544,7 +776,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - if (pos > 22 && pos < 28) { + if (pos == 24 /*|| pos == 25*//* && pos < 28*/) { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); cv.weights_path = item.second.get("filename_weights"); From 87777fc6c87685e9462898b1d94740154c87cc85 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Tue, 19 Nov 2019 16:57:11 +0100 Subject: [PATCH 118/174] WIP laplacian --- .../pipeline/main_panoramaCompositing.cpp | 211 ++++++++++++++---- 1 file changed, 171 insertions(+), 40 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 7039256149..e0f28e5737 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -91,8 +91,6 @@ bool convolveHorizontal(image::Image & output, const image::Im continue; } - - for (int k = 0; k < kernel.size(); k++) { double w = kernel(k); @@ -187,7 +185,13 @@ bool downscale(aliceVision::image::Image & outputColor, aliceV for (int i = 0; i < output_height; i++) { for (int j = 0; j < output_width; j++) { outputMask(i, j) = inputMask(i * 2, j * 2); - outputColor(i, j) = inputColor(i * 2, j * 2); + + if (!outputMask(i, j)) { + outputColor(i, j) = image::RGBfColor(0.0f); + } + else { + outputColor(i, j) = inputColor(i * 2, j * 2); + } } } @@ -217,7 +221,16 @@ bool upscale(aliceVision::image::Image & outputColor, aliceVis int dj = j * 2; if (!inputMask(i, j)) { + outputColor(di, dj) = image::RGBfColor(0.0f); + outputColor(di, dj + 1) = image::RGBfColor(0.0f); + outputColor(di + 1, dj) = image::RGBfColor(0.0f); + outputColor(di + 1, dj + 1) = image::RGBfColor(0.0f); + outputMask(di, dj) = 0; + outputMask(di, dj + 1) = 0; + outputMask(di + 1, dj) = 0; + outputMask(di + 1, dj + 1) = 0; + continue; } @@ -225,6 +238,7 @@ bool upscale(aliceVision::image::Image & outputColor, aliceVis outputColor(di, dj + 1) = inputColor(i, j); outputColor(di + 1, dj) = inputColor(i, j); outputColor(di + 1, dj + 1) = inputColor(i, j); + outputMask(di, dj) = 255; outputMask(di, dj + 1) = 255; outputMask(di + 1, dj) = 255; @@ -275,9 +289,11 @@ bool difference(aliceVision::image::Image & outputColor, alice } else { if (bMask(i, j)) { + outputColor(i, j) = image::RGBfColor(0.0f); outputMask(i, j) = 0; } else { + outputColor(i, j) = image::RGBfColor(0.0f); outputMask(i, j) = 0; } } @@ -324,8 +340,8 @@ bool addition(aliceVision::image::Image & outputColor, aliceVi outputMask(i, j) = 1; } else { - outputColor(i, j) = aColor(i, j); - outputMask(i, j) = 1; + outputColor(i, j) = image::RGBfColor(0.0f); + outputMask(i, j) = 0; } } else { @@ -334,6 +350,7 @@ bool addition(aliceVision::image::Image & outputColor, aliceVi outputMask(i, j) = 1; } else { + outputColor(i, j) = image::RGBfColor(0.0f); outputMask(i, j) = 0; } } @@ -439,6 +456,9 @@ class LaplacianPyramid { public: bool process(const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { + _original_width = inputColor.Width(); + _original_height = inputColor.Height(); + GaussianPyramid gp; if (!gp.process(inputColor, inputMask)) { return false; @@ -447,8 +467,6 @@ class LaplacianPyramid { const std::vector> & gp_colors = gp.getColors(); const std::vector> & gp_masks = gp.getMasks(); - image::writeImage("/home/mmoc/test2.exr", gp_colors[0], image::EImageColorSpace::SRGB); - /*Create the gaussian kernel*/ Eigen::VectorXf kernel = gaussian_kernel_vector(7, 2.0); @@ -487,7 +505,7 @@ class LaplacianPyramid { return true; } - bool stack() { + bool stack(int max_level) { image::Image prev_color = _pyramid_color[_pyramid_color.size() - 1]; image::Image prev_mask = _pyramid_mask[_pyramid_mask.size() - 1]; @@ -495,45 +513,131 @@ class LaplacianPyramid { /*Create the gaussian kernel*/ Eigen::VectorXf kernel = gaussian_kernel_vector(7, 2.0); - for (int level = _pyramid_color.size() - 2; level >= 0; level--) { + for (int level = _pyramid_color.size() - 2; level >= max_level; level--) { const image::Image diff_color = _pyramid_color[level]; const image::Image diff_mask = _pyramid_mask[level]; image::Image prev_color_upscaled(diff_color.Width(), diff_color.Height()); image::Image buffer(diff_color.Width(), diff_color.Height()); - image::Image smoothed(diff_color.Width(), diff_color.Height()); + image::Image smoothed(diff_color.Width(), diff_color.Height(), true, image::RGBfColor(0.0f)); image::Image prev_mask_upscaled(diff_mask.Width(), diff_mask.Height()); - upscale(prev_color_upscaled, prev_mask_upscaled, prev_color, prev_mask); - convolveHorizontal(buffer, prev_color_upscaled, prev_mask_upscaled, kernel); - convolveVertical(smoothed, buffer, prev_mask_upscaled, kernel); + if (!upscale(prev_color_upscaled, prev_mask_upscaled, prev_color, prev_mask)) { + return false; + } - prev_color = image::Image(diff_color.Width(), diff_color.Height()); - prev_mask = image::Image(diff_mask.Width(), diff_mask.Height()); + if (!convolveHorizontal(buffer, prev_color_upscaled, prev_mask_upscaled, kernel)) { + return false; + } + + if (!convolveVertical(smoothed, buffer, prev_mask_upscaled, kernel)) { + return false; + } + + prev_color = image::Image(diff_color.Width(), diff_color.Height(), true, image::RGBfColor(0.0f)); + prev_mask = image::Image(diff_mask.Width(), diff_mask.Height(), true, 0); - std::cout << prev_color.Width() << " "; - std::cout << prev_mask.Width() << " "; - std::cout << smoothed.Width() << " "; - std::cout << prev_mask_upscaled.Width() << " "; - std::cout << diff_color.Width() << " "; - std::cout << diff_mask.Width() << std::endl; if (!addition(prev_color, prev_mask, smoothed, prev_mask_upscaled, diff_color, diff_mask)) { - std::cout << "ok" << std::endl; return false; } + } + + _stack_result = prev_color; + _stack_mask = prev_mask; + + return true; + } + + bool merge(const LaplacianPyramid & other) { + + for (int level = 0; level < _pyramid_color.size(); level++) { + + image::Image & aColor = _pyramid_color[level]; + image::Image & aMask = _pyramid_mask[level]; + const image::Image & bColor = other._pyramid_color[level]; + const image::Image & bMask = other._pyramid_mask[level]; + + for (int i = 0; i < bColor.Height(); i++) { + + for (int j = 0; j < bColor.Width(); j++) { + + if (aMask(i, j)) { + if (bMask(i, j)) { + aColor(i, j).r() = 0.5f * aColor(i, j).r() + 0.5f * bColor(i, j).r(); + aColor(i, j).g() = 0.5f * aColor(i, j).g() + 0.5f * bColor(i, j).g(); + aColor(i, j).b() = 0.5f * aColor(i, j).b() + 0.5f * bColor(i, j).b(); + } + else { + //do nothing + } + } + else { + if (bMask(i, j)) { + aColor(i, j) = bColor(i, j); + aMask(i, j) = 1; + } + else { + aColor(i, j) = image::RGBfColor(0.0f); + aMask(i, j) = 0; + } + } + } + } } - image::writeImage("/home/mmoc/test.exr", prev_color, image::EImageColorSpace::SRGB); - + return true; + } + + bool reducePyramidDepth(size_t newSize) { + + if (newSize == 0) { + return false; + } + + if (newSize > _pyramid_color.size()) { + return false; + } + + if (newSize == _pyramid_color.size()) { + return true; + } + + if (!stack(newSize - 1)) { + return false; + } + + _pyramid_color[newSize - 1] = _stack_result; + _pyramid_mask[newSize - 1] = _stack_mask; + + _pyramid_color.resize(newSize); + _pyramid_mask.resize(newSize); return true; } + const image::Image & getStackResult() const { + return _stack_result; + } + + const image::Image & getStackMask() const { + return _stack_mask; + } + + size_t getDepth() const { + return _pyramid_color.size(); + } + protected: std::vector> _pyramid_color; std::vector> _pyramid_mask; + + image::Image _stack_result; + image::Image _stack_mask; + + size_t _original_width; + size_t _original_height; }; class Compositer { @@ -653,28 +757,55 @@ class LaplacianCompositer : public AlphaCompositer { virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - /* if (first) { - prev_color = color; - prev_inputMask = inputMask; - first = false; - return true; - }*/ + LaplacianPyramid pyramid_panorama; + pyramid_panorama.process(_panorama, _mask); + + aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBfColor(0.0f)); + aliceVision::image::Image viewmask(_panorama.Width(), _panorama.Height(), true, 0); + + for (int i = 0; i < color.Height(); i++) { + + int di = i + offset_y; + + for (int j = 0; j < color.Width(); j++) { + + int dj = j + offset_x; + + if (dj >= view.Width()) { + dj = dj - view.Width(); + } + + view(di, dj) = color(i, j); + viewmask(di, dj) = inputMask(i, j); + } + } + + LaplacianPyramid pyramid_add; + pyramid_add.process(view, viewmask); + + + if (pyramid_panorama.getDepth() > pyramid_add.getDepth()) { + pyramid_panorama.reducePyramidDepth(pyramid_add.getDepth()); + } + else if (pyramid_panorama.getDepth() < pyramid_add.getDepth()) { + pyramid_add.reducePyramidDepth(pyramid_panorama.getDepth()); + } + + pyramid_panorama.merge(pyramid_add); + pyramid_panorama.stack(0); - LaplacianPyramid pyr1; - pyr1.process(color, inputMask); - pyr1.stack(); + const aliceVision::image::Image & img = pyramid_panorama.getStackResult(); + const aliceVision::image::Image & mask = pyramid_panorama.getStackMask(); - /*LaplacianPyramid pyr2; - pyr2.process(color, inputMask);*/ + _panorama = img.block(0, 0, _panorama.Height(), _panorama.Width()); + _mask = mask.block(0, 0, _panorama.Height(), _panorama.Width()); + + image::writeImage("/home/mmoc/test/out.exr", pyramid_panorama.getStackResult(), image::EImageColorSpace::NO_CONVERSION); return true; } protected: - bool first = true; - aliceVision::image::Image prev_color; - aliceVision::image::Image prev_inputMask; - aliceVision::image::Image _weightmap; }; int main(int argc, char **argv) { @@ -776,7 +907,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - if (pos == 24 /*|| pos == 25*//* && pos < 28*/) { + if (pos == 24 || pos == 25 || pos == 28) { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); cv.weights_path = item.second.get("filename_weights"); From 6a729e6a9b2fef6867a802a424ed6a48d5afc24d Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 20 Nov 2019 16:33:50 +0100 Subject: [PATCH 119/174] [hdr] fix image size of the output hdr fusion --- src/aliceVision/hdr/hdrMerge.cpp | 16 ++++++++++++---- src/software/convert/main_convertLDRToHDR.cpp | 2 +- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index b6c8be7b92..137383c77a 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -10,7 +10,9 @@ #include #include #include + #include +#include namespace aliceVision { @@ -54,13 +56,19 @@ void hdrMerge::process(const std::vector< image::Image > &imag assert(!images.empty()); assert(images.size() == times.size()); - //reset radiance image - radiance.fill(image::RGBfColor(0.f, 0.f, 0.f)); - - //get images width, height + // get images width, height const std::size_t width = images.front().Width(); const std::size_t height = images.front().Height(); + // resize and reset radiance image to 0.0 + radiance.resize(width, height, true, image::RGBfColor(0.f, 0.f, 0.f)); + + ALICEVISION_LOG_TRACE("[hdrMerge] Images to fuse:"); + for(int i = 0; i < images.size(); ++i) + { + ALICEVISION_LOG_TRACE(images[i].Width() << "x" << images[i].Height() << ", time: " << times[i]); + } + const float maxLum = 1000.0f; const float minLum = 0.0001f; diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index b031b6f16a..b51b3f2a64 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -541,7 +541,7 @@ int main(int argc, char * argv[]) { /* Merge HDR images */ hdr::hdrMerge merge; float targetTime = targetView->getMetadataShutter(); - image::Image HDRimage(targetView->getWidth(), targetView->getHeight(), false); + image::Image HDRimage; merge.process(images, groupedExposures[g], fusionWeight, response, HDRimage, targetTime, false, clampedValueCorrection); /* Output image file path */ From 9c1cb2e737a9cc83f25e5d9af935ef762fc022f5 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 20 Nov 2019 16:36:18 +0100 Subject: [PATCH 120/174] add radial4 for stitching --- src/aliceVision/sfm/BundleAdjustmentCeres.cpp | 6 +- .../sfm/ResidualErrorConstraintFunctor.hpp | 150 ++++++++++++++++++ 2 files changed, 154 insertions(+), 2 deletions(-) diff --git a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp index 7fd77f1f40..fbddfa745d 100644 --- a/src/aliceVision/sfm/BundleAdjustmentCeres.cpp +++ b/src/aliceVision/sfm/BundleAdjustmentCeres.cpp @@ -97,17 +97,19 @@ ceres::CostFunction* createConstraintsCostFunctionFromIntrinsics(const Intrinsic return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_Pinhole(observation_first.homogeneous(), observation_second.homogeneous())); case PINHOLE_CAMERA_RADIAL1: return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_PinholeRadialK1(observation_first.homogeneous(), observation_second.homogeneous())); + case PINHOLE_CAMERA_RADIAL3: + return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_PinholeRadialK3(observation_first.homogeneous(), observation_second.homogeneous())); case PINHOLE_CAMERA_FISHEYE: return new ceres::AutoDiffCostFunction(new ResidualErrorConstraintFunctor_PinholeFisheye(observation_first.homogeneous(), observation_second.homogeneous())); default: throw std::logic_error("Cannot create cost function, unrecognized intrinsic type in BA."); - } + } } void BundleAdjustmentCeres::CeresOptions::setDenseBA() { // default configuration use a DENSE representation - preconditionerType = ceres::JACOBI; + preconditionerType = ceres::JACOBI; linearSolverType = ceres::DENSE_SCHUR; sparseLinearAlgebraLibraryType = ceres::SUITE_SPARSE; // not used but just to avoid a warning in ceres ALICEVISION_LOG_DEBUG("BundleAdjustment[Ceres]: DENSE_SCHUR"); diff --git a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp index c4bde80cfc..e0b080e8cc 100644 --- a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp +++ b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp @@ -251,6 +251,156 @@ struct ResidualErrorConstraintFunctor_PinholeRadialK1 Vec3 m_pos_2dpoint_second; // The 2D observation in second view }; +/** + * @brief Ceres functor to use a pair of pinhole on a pure rotation 2D constraint. + * + * Data parameter blocks are the following <2,6,6,6> + * - 2 => dimension of the residuals, + * - 4 => the intrinsic data block for the first view [focal, principal point x, principal point y, K1, K2, K3], + * - 3 => the camera extrinsic data block for the first view + * - 3 => the camera extrinsic data block for the second view + * + */ + + +struct ResidualErrorConstraintFunctor_PinholeRadialK3 +{ + ResidualErrorConstraintFunctor_PinholeRadialK3(const Vec3 & pos_2dpoint_first, const Vec3 & pos_2dpoint_second) + : m_pos_2dpoint_first(pos_2dpoint_first), m_pos_2dpoint_second(pos_2dpoint_second) + { + } + + // Enum to map intrinsics parameters between aliceVision & ceres camera data parameter block. + enum { + OFFSET_FOCAL_LENGTH = 0, + OFFSET_PRINCIPAL_POINT_X = 1, + OFFSET_PRINCIPAL_POINT_Y = 2, + OFFSET_DISTO_K1 = 3, + OFFSET_DISTO_K2 = 4, + OFFSET_DISTO_K3 = 5 + }; + + static double distoFunctor(const std::vector & params, double r2) + { + const double k1 = params[0]; + const double k2 = params[1]; + const double k3 = params[2]; + return r2 * Square(1.+r2*(k1+r2*(k2+r2*k3))); + } + + template + void lift(const T* const cam_K, const Vec3 pt, Eigen::Matrix< T, 3, 1> & out) const + { + const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; + const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; + const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; + const T& k1 = cam_K[OFFSET_DISTO_K1]; + const T& k2 = cam_K[OFFSET_DISTO_K2]; + const T& k3 = cam_K[OFFSET_DISTO_K3]; + + //Unshift then unscale back to meters + T xd = (pt(0) - principal_point_x) / focal; + T yd = (pt(1) - principal_point_y) / focal; + T distorted_radius = sqrt(xd*xd + yd*yd); + + /*A hack to obtain undistorted point even if using automatic diff*/ + double xd_real, yd_real, k1_real, k2_real, k3_real; + xd_real = getJetValue(xd); + yd_real = getJetValue(yd); + k1_real = getJetValue(k1); + k2_real = getJetValue(k2); + k3_real = getJetValue(k3); + + /*get rescaler*/ + std::vector distortionParams = {k1_real, k2_real, k3_real}; + double distorted_radius_square = xd_real * xd_real + yd_real * yd_real; + double rescaler = ::sqrt(camera::radial_distortion::bisection_Radius_Solve(distortionParams, distorted_radius_square, distoFunctor));; + + if (distorted_radius < 1e-12) { + out(0) = xd; + out(1) = yd; + out(2) = static_cast(1.0); + } + else { + out(0) = (xd / distorted_radius) * static_cast(rescaler); + out(1) = (yd / distorted_radius) * static_cast(rescaler); + out(2) = static_cast(1.0); + } + } + + template + void unlift(const T* const cam_K, const Eigen::Matrix< T, 3, 1> & pt, Eigen::Matrix< T, 3, 1> & out) const + { + const T& focal = cam_K[OFFSET_FOCAL_LENGTH]; + const T& principal_point_x = cam_K[OFFSET_PRINCIPAL_POINT_X]; + const T& principal_point_y = cam_K[OFFSET_PRINCIPAL_POINT_Y]; + const T& k1 = cam_K[OFFSET_DISTO_K1]; + const T& k2 = cam_K[OFFSET_DISTO_K2]; + const T& k3 = cam_K[OFFSET_DISTO_K3]; + + //Project on plane + Eigen::Matrix< T, 3, 1> proj_pt = pt / pt(2); + + //Apply distortion + const T r2 = proj_pt(0)*proj_pt(0) + proj_pt(1)*proj_pt(1); + const T r4 = r2 * r2; + const T r6 = r4 * r2; + const T r_coeff = (T(1) + k1*r2 + k2*r4 + k3*r6); + const T x_d = proj_pt(0) * r_coeff; + const T y_d = proj_pt(1) * r_coeff; + + //Scale and shift + out(0) = x_d * focal + principal_point_x; + out(1) = y_d * focal + principal_point_y; + out(2) = static_cast(1.0); + } + + /** + * @param[in] cam_K: Camera intrinsics( focal, principal point [x,y] ) + * @param[in] cam_Rt: Camera parameterized using one block of 6 parameters [R;t]: + * - 3 for rotation(angle axis), 3 for translation + * @param[in] pos_3dpoint + * @param[out] out_residuals + */ + template + bool operator()( + const T* const cam_K, + const T* const cam_R1, + const T* const cam_R2, + T* out_residuals) const + { + Eigen::Matrix oneRo, twoRo, twoRone; + + Eigen::Matrix pt3d_1; + + //From pixel to meters + lift(cam_K, m_pos_2dpoint_first, pt3d_1); + + //Build relative rotation + ceres::AngleAxisToRotationMatrix(cam_R1, oneRo.data()); + ceres::AngleAxisToRotationMatrix(cam_R2, twoRo.data()); + twoRone = twoRo * oneRo.transpose(); + + //Transform point + Eigen::Matrix< T, 3, 1> pt3d_2_est = twoRone * pt3d_1; + + //Project back to image space in pixels + Eigen::Matrix< T, 3, 1> pt2d_2_est; + unlift(cam_K, pt3d_2_est, pt2d_2_est); + + //Compute residual + Eigen::Matrix< T, 3, 1> residual = pt2d_2_est - m_pos_2dpoint_second; + + out_residuals[0] = residual(0); + out_residuals[1] = residual(1); + + return true; + } + + Vec3 m_pos_2dpoint_first; // The 2D observation in first view + Vec3 m_pos_2dpoint_second; // The 2D observation in second view +}; + /** * @brief Ceres functor to use a pair of fisheye on a pure rotation 2D constraint. From b65cc308bccfa603c8f280af1d0f8048b0cd0de1 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 20 Nov 2019 16:36:31 +0100 Subject: [PATCH 121/174] correct laplacian --- src/software/pipeline/main_panoramaCompositing.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index e0f28e5737..37e7b79a00 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -235,9 +235,9 @@ bool upscale(aliceVision::image::Image & outputColor, aliceVis } outputColor(di, dj) = inputColor(i, j); - outputColor(di, dj + 1) = inputColor(i, j); - outputColor(di + 1, dj) = inputColor(i, j); - outputColor(di + 1, dj + 1) = inputColor(i, j); + outputColor(di, dj + 1) = image::RGBfColor(0.0f); + outputColor(di + 1, dj) = image::RGBfColor(0.0f); + outputColor(di + 1, dj + 1) = image::RGBfColor(0.0f); outputMask(di, dj) = 255; outputMask(di, dj + 1) = 255; @@ -800,8 +800,6 @@ class LaplacianCompositer : public AlphaCompositer { _panorama = img.block(0, 0, _panorama.Height(), _panorama.Width()); _mask = mask.block(0, 0, _panorama.Height(), _panorama.Width()); - image::writeImage("/home/mmoc/test/out.exr", pyramid_panorama.getStackResult(), image::EImageColorSpace::NO_CONVERSION); - return true; } @@ -907,7 +905,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - if (pos == 24 || pos == 25 || pos == 28) { + if (pos == 24 || pos == 25 || pos == 26) { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); cv.weights_path = item.second.get("filename_weights"); From 168d6f359b029589c0d982fc82dd46282a935a84 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 22 Nov 2019 14:02:59 +0100 Subject: [PATCH 122/174] some tests --- .vscode/c_cpp_properties.json | 4 +- .../pipeline/main_panoramaCompositing.cpp | 266 ++++++++++++++++-- 2 files changed, 248 insertions(+), 22 deletions(-) diff --git a/.vscode/c_cpp_properties.json b/.vscode/c_cpp_properties.json index 836216394a..dc16256c97 100644 --- a/.vscode/c_cpp_properties.json +++ b/.vscode/c_cpp_properties.json @@ -7,9 +7,9 @@ ], "defines": [], "compilerPath": "/usr/bin/clang", - "intelliSenseMode": "clang-x64", "configurationProvider": "vector-of-bool.cmake-tools", - "compileCommands": "${workspaceFolder}/build/compile_commands.json" + "compileCommands": "${workspaceFolder}/build/compile_commands.json", + } ], "version": 4 diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 37e7b79a00..226589f111 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -43,12 +43,16 @@ typedef struct { Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { - Eigen::VectorXd x; - x.setLinSpaced(kernel_length + 1, -sigma, +sigma); + int radius = kernel_length / 2; + + Eigen::VectorXd x(kernel_length + 1); + for (int i = 0; i < kernel_length + 1; i++) { + x(i) = i - radius; + } Eigen::VectorXd cdf(kernel_length + 1); for (int i = 0; i < kernel_length + 1; i++) { - cdf(i) = 0.5 * (1 + std::erf(x(i)/sqrt(2.0))); + cdf(i) = 0.5 * (1.0 + std::erf(x(i)/(sigma * sqrt(2.0)))); } @@ -115,6 +119,45 @@ bool convolveHorizontal(image::Image & output, const image::Im return true; } +bool convolveHorizontal(image::Image & output, const image::Image & input, const Eigen::VectorXf & kernel) { + + if (output.size() != input.size()) { + return false; + } + + if (kernel.size() % 2 == 0) { + return false; + } + + int radius = kernel.size() / 2; + + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + float sum = 0.0f; + float sum_mask = 0.0f; + + for (int k = 0; k < kernel.size(); k++) { + + double w = kernel(k); + int col = j + k - radius; + + if (col < 0 || col >= input.Width()) { + continue; + } + + sum += w * input(i, col); + sum_mask += w; + } + + output(i, j) = sum / sum_mask; + } + } + + return true; +} + bool convolveVertical(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::VectorXf & kernel) { if (output.size() != input.size()) { @@ -152,7 +195,41 @@ bool convolveVertical(image::Image & output, const image::Imag continue; } - if (!mask(row, j)) { + sum += w * input(row, j); + sum_mask += w; + } + + output(i, j) = sum / sum_mask; + } + } + + return true; +} + +bool convolveVertical(image::Image & output, const image::Image & input, const Eigen::VectorXf & kernel) { + + if (output.size() != input.size()) { + return false; + } + + if (kernel.size() % 2 == 0) { + return false; + } + + int radius = kernel.size() / 2; + + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + + float sum = 0.0f; + float sum_mask = 0.0f; + + for (int k = 0; k < kernel.size(); k++) { + + double w = kernel(k); + int row = i + k - radius; + + if (row < 0 || row >= input.Height()) { continue; } @@ -198,6 +275,25 @@ bool downscale(aliceVision::image::Image & outputColor, aliceV return true; } +bool downscale(aliceVision::image::Image & outputColor, const aliceVision::image::Image & inputColor) { + + size_t width = inputColor.Width(); + size_t height = inputColor.Height(); + + size_t output_width = width / 2; + size_t output_height = height / 2; + + outputColor = image::Image(output_width, output_height); + + for (int i = 0; i < output_height; i++) { + for (int j = 0; j < output_width; j++) { + outputColor(i, j) = inputColor(i * 2, j * 2); + } + } + + return true; +} + bool upscale(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { size_t width = inputColor.Width(); @@ -234,10 +330,10 @@ bool upscale(aliceVision::image::Image & outputColor, aliceVis continue; } - outputColor(di, dj) = inputColor(i, j); - outputColor(di, dj + 1) = image::RGBfColor(0.0f); - outputColor(di + 1, dj) = image::RGBfColor(0.0f); - outputColor(di + 1, dj + 1) = image::RGBfColor(0.0f); + outputColor(di, dj).r() = inputColor(i, j); + outputColor(di, dj + 1) = inputColor(i, j); + outputColor(di + 1, dj) = inputColor(i, j); + outputColor(di + 1, dj + 1) = inputColor(i, j); outputMask(di, dj) = 255; outputMask(di, dj + 1) = 255; @@ -375,6 +471,67 @@ size_t countValid(const aliceVision::image::Image & inputMask) { return count; } +class GaussianPyramidNoMask { +public: + bool process(const aliceVision::image::Image & input) { + + _pyramid.clear(); + + /*Make a 2**n size image, easier for multiple consecutive half size*/ + size_t width = input.Width(); + size_t height = input.Height(); + size_t widthPot = pow(2.0, std::ceil(std::log2(width))); + size_t heightPot = pow(2.0, std::ceil(std::log2(height))); + image::Image inputPot(widthPot, heightPot, true, 0.0f); + inputPot.block(0, 0, height, width) = input; + + _pyramid.push_back(inputPot); + + /*Create the gaussian kernel*/ + Eigen::VectorXf kernel(5); + kernel[0] = 1.0f; + kernel[1] = 4.0f; + kernel[2] = 6.0f; + kernel[3] = 4.0f; + kernel[4] = 1.0f; + + + while (true) { + + if (widthPot < 32 || heightPot < 32) break; + + size_t level = _pyramid.size() - 1; + + image::Image buffer(widthPot, heightPot); + image::Image smoothed(widthPot, heightPot); + + convolveHorizontal(buffer, _pyramid[level], kernel); + convolveVertical(smoothed, buffer, kernel); + + widthPot = widthPot / 2; + heightPot = heightPot / 2; + image::Image reduced; + downscale(reduced, smoothed); + + _pyramid.push_back(reduced); + } + + + return true; + } + + size_t getPyramidSize() { + return _pyramid.size(); + } + + const std::vector> & getLevels() { + return _pyramid; + } + +protected: + std::vector> _pyramid; +}; + class GaussianPyramid { public: bool process(const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { @@ -401,17 +558,23 @@ class GaussianPyramid { /*Create the gaussian kernel*/ - Eigen::VectorXf kernel = gaussian_kernel_vector(7, 2.0); + Eigen::VectorXf kernel(5); + kernel[0] = 1.0f; + kernel[1] = 4.0f; + kernel[2] = 6.0f; + kernel[3] = 4.0f; + kernel[4] = 1.0f; + /*Compute initial count of valid pixels*/ size_t countValidPixels = countValid(inputMask); while (true) { - if (widthPot < 128 || heightPot < 128) break; + if (widthPot < 32 || heightPot < 32) break; /* Make sure enough pixels are valid*/ - if (countValidPixels < 128*128) break; + if (countValidPixels < 4) break; size_t level = _pyramid_color.size() - 1; @@ -432,6 +595,7 @@ class GaussianPyramid { _pyramid_mask.push_back(reducedMask); } + return true; } @@ -468,7 +632,13 @@ class LaplacianPyramid { const std::vector> & gp_masks = gp.getMasks(); /*Create the gaussian kernel*/ - Eigen::VectorXf kernel = gaussian_kernel_vector(7, 2.0); + Eigen::VectorXf kernel(5); + kernel[0] = 1.0f; + kernel[1] = 4.0f; + kernel[2] = 6.0f; + kernel[3] = 4.0f; + kernel[4] = 1.0f; + for (int level = 0; level < gp_colors.size() - 1; level++) { @@ -496,6 +666,8 @@ class LaplacianPyramid { _pyramid_color.push_back(diff_color); _pyramid_mask.push_back(diff_mask); + + } _pyramid_color.push_back(gp_colors[gp_colors.size() - 1]); @@ -511,7 +683,12 @@ class LaplacianPyramid { image::Image prev_mask = _pyramid_mask[_pyramid_mask.size() - 1]; /*Create the gaussian kernel*/ - Eigen::VectorXf kernel = gaussian_kernel_vector(7, 2.0); + Eigen::VectorXf kernel(5); + kernel[0] = 1.0f; + kernel[1] = 4.0f; + kernel[2] = 6.0f; + kernel[3] = 4.0f; + kernel[4] = 1.0f; for (int level = _pyramid_color.size() - 2; level >= max_level; level--) { @@ -542,6 +719,10 @@ class LaplacianPyramid { if (!addition(prev_color, prev_mask, smoothed, prev_mask_upscaled, diff_color, diff_mask)) { return false; } + + char filename[FILENAME_MAX]; + sprintf(filename, "/home/mmoc/output_%d.exr", level); + image::writeImage(filename, prev_color, image::EImageColorSpace::NO_CONVERSION); } _stack_result = prev_color; @@ -550,7 +731,15 @@ class LaplacianPyramid { return true; } - bool merge(const LaplacianPyramid & other) { + bool merge(const LaplacianPyramid & other, const image::Image & weights) { + + GaussianPyramidNoMask pyramid_weights; + pyramid_weights.process(weights); + + + const std::vector> & vector_weights = pyramid_weights.getLevels(); + + for (int level = 0; level < _pyramid_color.size(); level++) { @@ -558,6 +747,7 @@ class LaplacianPyramid { image::Image & aMask = _pyramid_mask[level]; const image::Image & bColor = other._pyramid_color[level]; const image::Image & bMask = other._pyramid_mask[level]; + const image::Image & lweights = vector_weights[level]; for (int i = 0; i < bColor.Height(); i++) { @@ -565,9 +755,14 @@ class LaplacianPyramid { if (aMask(i, j)) { if (bMask(i, j)) { - aColor(i, j).r() = 0.5f * aColor(i, j).r() + 0.5f * bColor(i, j).r(); - aColor(i, j).g() = 0.5f * aColor(i, j).g() + 0.5f * bColor(i, j).g(); - aColor(i, j).b() = 0.5f * aColor(i, j).b() + 0.5f * bColor(i, j).b(); + + float w = 0.5f;//lweights(i, j); + float iw = 1.0f - w; + + + aColor(i, j).r() = iw * aColor(i, j).r() + w * bColor(i, j).r(); + aColor(i, j).g() = iw * aColor(i, j).g() + w * bColor(i, j).g(); + aColor(i, j).b() = iw * aColor(i, j).b() + w * bColor(i, j).b(); } else { //do nothing @@ -762,6 +957,7 @@ class LaplacianCompositer : public AlphaCompositer { aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBfColor(0.0f)); aliceVision::image::Image viewmask(_panorama.Width(), _panorama.Height(), true, 0); + aliceVision::image::Image weight(_panorama.Width(), _panorama.Height(), true, 0); for (int i = 0; i < color.Height(); i++) { @@ -777,13 +973,21 @@ class LaplacianCompositer : public AlphaCompositer { view(di, dj) = color(i, j); viewmask(di, dj) = inputMask(i, j); + + if (inputMask(i ,j)) { + if (inputWeights(i,j) > _weightmap(di, dj)) { + weight(di, dj) = 1.0; + } + else { + weight(di, dj) = 0.0; + } + } } } LaplacianPyramid pyramid_add; pyramid_add.process(view, viewmask); - if (pyramid_panorama.getDepth() > pyramid_add.getDepth()) { pyramid_panorama.reducePyramidDepth(pyramid_add.getDepth()); } @@ -791,7 +995,8 @@ class LaplacianCompositer : public AlphaCompositer { pyramid_add.reducePyramidDepth(pyramid_panorama.getDepth()); } - pyramid_panorama.merge(pyramid_add); + + pyramid_panorama.merge(pyramid_add, weight); pyramid_panorama.stack(0); const aliceVision::image::Image & img = pyramid_panorama.getStackResult(); @@ -800,6 +1005,27 @@ class LaplacianCompositer : public AlphaCompositer { _panorama = img.block(0, 0, _panorama.Height(), _panorama.Width()); _mask = mask.block(0, 0, _panorama.Height(), _panorama.Width()); + for (int i = 0; i < inputWeights.Height(); i++) { + + int di = i + offset_y; + + for (int j = 0; j < inputWeights.Width(); j++) { + + int dj = j + offset_x; + + if (dj >= _panorama.Width()) { + dj = dj - _panorama.Width(); + } + + if (!inputMask(i, j)) { + continue; + } + + _weightmap(di, dj) = std::max(_weightmap(di, dj), inputWeights(i, j)); + } + } + + return true; } @@ -905,7 +1131,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - if (pos == 24 || pos == 25 || pos == 26) { + /*if (pos == 24 || pos == 25 || pos == 26)*/ { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); cv.weights_path = item.second.get("filename_weights"); From c2696eafb859467f4ea2af79289b372fae499367 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Mon, 25 Nov 2019 08:55:59 +0100 Subject: [PATCH 123/174] add option for multiband --- .../pipeline/main_panoramaCompositing.cpp | 108 ++++++++++-------- 1 file changed, 60 insertions(+), 48 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 226589f111..7184974862 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -42,7 +42,7 @@ typedef struct { } ConfigView; Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { - + int radius = kernel_length / 2; Eigen::VectorXd x(kernel_length + 1); @@ -54,7 +54,7 @@ Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { for (int i = 0; i < kernel_length + 1; i++) { cdf(i) = 0.5 * (1.0 + std::erf(x(i)/(sigma * sqrt(2.0)))); } - + Eigen::VectorXd k1d(kernel_length); for (int i = 0; i < kernel_length; i++) { @@ -91,7 +91,7 @@ bool convolveHorizontal(image::Image & output, const image::Im float sum_mask = 0.0f; if (!mask(i, j)) { - output(i, j) = image::RGBfColor(0.0); + output(i, j) = image::RGBfColor(0.0); continue; } @@ -181,11 +181,11 @@ bool convolveVertical(image::Image & output, const image::Imag float sum_mask = 0.0f; if (!mask(i, j)) { - output(i, j) = image::RGBfColor(0.0); + output(i, j) = image::RGBfColor(0.0); continue; } - + for (int k = 0; k < kernel.size(); k++) { double w = kernel(k); @@ -223,7 +223,7 @@ bool convolveVertical(image::Image & output, const image::Image & float sum = 0.0f; float sum_mask = 0.0f; - + for (int k = 0; k < kernel.size(); k++) { double w = kernel(k); @@ -326,7 +326,7 @@ bool upscale(aliceVision::image::Image & outputColor, aliceVis outputMask(di, dj + 1) = 0; outputMask(di + 1, dj) = 0; outputMask(di + 1, dj + 1) = 0; - + continue; } @@ -372,7 +372,7 @@ bool difference(aliceVision::image::Image & outputColor, alice for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { - + if (aMask(i, j)) { if (bMask(i, j)) { outputColor(i, j) = aColor(i, j) - bColor(i, j); @@ -427,7 +427,7 @@ bool addition(aliceVision::image::Image & outputColor, aliceVi for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { - + if (aMask(i, j)) { if (bMask(i, j)) { outputColor(i, j).r() = aColor(i, j).r() + bColor(i, j).r(); @@ -485,7 +485,7 @@ class GaussianPyramidNoMask { image::Image inputPot(widthPot, heightPot, true, 0.0f); inputPot.block(0, 0, height, width) = input; - _pyramid.push_back(inputPot); + _pyramid.push_back(inputPot); /*Create the gaussian kernel*/ Eigen::VectorXf kernel(5); @@ -512,7 +512,7 @@ class GaussianPyramidNoMask { heightPot = heightPot / 2; image::Image reduced; downscale(reduced, smoothed); - + _pyramid.push_back(reduced); } @@ -555,7 +555,7 @@ class GaussianPyramid { _pyramid_color.push_back(inputColorPot); _pyramid_mask.push_back(inputMaskPot); - + /*Create the gaussian kernel*/ Eigen::VectorXf kernel(5); @@ -589,10 +589,12 @@ class GaussianPyramid { image::Image reducedColor; image::Image reducedMask; downscale(reducedColor, reducedMask, smoothed, _pyramid_mask[level]); - + countValidPixels = countValid(reducedMask); _pyramid_color.push_back(reducedColor); _pyramid_mask.push_back(reducedMask); + + break; } @@ -649,7 +651,7 @@ class LaplacianPyramid { image::Image prev_color_upscaled(color.Width(), color.Height()); image::Image prev_mask_upscaled(mask.Width(), mask.Height()); - + image::Image diff_color(color.Width(), color.Height()); image::Image buffer(color.Width(), color.Height()); image::Image smoothed(color.Width(), color.Height()); @@ -659,7 +661,7 @@ class LaplacianPyramid { convolveHorizontal(buffer, prev_color_upscaled, prev_mask_upscaled, kernel); convolveVertical(smoothed, buffer, prev_mask_upscaled, kernel); - + if (!difference(diff_color, diff_mask, color, mask, smoothed, prev_mask_upscaled)) { return false; } @@ -667,7 +669,7 @@ class LaplacianPyramid { _pyramid_color.push_back(diff_color); _pyramid_mask.push_back(diff_mask); - + } _pyramid_color.push_back(gp_colors[gp_colors.size() - 1]); @@ -690,8 +692,9 @@ class LaplacianPyramid { kernel[3] = 4.0f; kernel[4] = 1.0f; + for (int level = _pyramid_color.size() - 2; level >= max_level; level--) { - + const image::Image diff_color = _pyramid_color[level]; const image::Image diff_mask = _pyramid_mask[level]; @@ -720,10 +723,10 @@ class LaplacianPyramid { return false; } - char filename[FILENAME_MAX]; + /*char filename[FILENAME_MAX]; sprintf(filename, "/home/mmoc/output_%d.exr", level); - image::writeImage(filename, prev_color, image::EImageColorSpace::NO_CONVERSION); - } + image::writeImage(filename, prev_color, image::EImageColorSpace::NO_CONVERSION);*/ + } _stack_result = prev_color; _stack_mask = prev_mask; @@ -735,7 +738,7 @@ class LaplacianPyramid { GaussianPyramidNoMask pyramid_weights; pyramid_weights.process(weights); - + const std::vector> & vector_weights = pyramid_weights.getLevels(); @@ -750,15 +753,15 @@ class LaplacianPyramid { const image::Image & lweights = vector_weights[level]; for (int i = 0; i < bColor.Height(); i++) { - + for (int j = 0; j < bColor.Width(); j++) { if (aMask(i, j)) { if (bMask(i, j)) { - float w = 0.5f;//lweights(i, j); + float w = lweights(i, j); float iw = 1.0f - w; - + aColor(i, j).r() = iw * aColor(i, j).r() + w * bColor(i, j).r(); aColor(i, j).g() = iw * aColor(i, j).g() + w * bColor(i, j).g(); @@ -827,7 +830,7 @@ class LaplacianPyramid { protected: std::vector> _pyramid_color; std::vector> _pyramid_mask; - + image::Image _stack_result; image::Image _stack_mask; @@ -838,12 +841,12 @@ class LaplacianPyramid { class Compositer { public: Compositer(size_t outputWidth, size_t outputHeight) : - _panorama(outputWidth, outputHeight, true, image::RGBfColor(1.0f, 0.0f, 0.0f)), + _panorama(outputWidth, outputHeight, true, image::RGBfColor(1.0f, 0.0f, 0.0f)), _mask(outputWidth, outputHeight, true, false) { } - virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, size_t offset_x, size_t offset_y) { + virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { for (size_t i = 0; i < color.Height(); i++) { @@ -853,11 +856,11 @@ class Compositer { } for (size_t j = 0; j < color.Width(); j++) { - + if (!inputMask(i, j)) { continue; } - + size_t pano_j = offset_x + j; if (pano_j >= _panorama.Width()) { pano_j = pano_j - _panorama.Width(); @@ -887,10 +890,10 @@ class Compositer { class AlphaCompositer : public Compositer { public: - AlphaCompositer(size_t outputWidth, size_t outputHeight) : + AlphaCompositer(size_t outputWidth, size_t outputHeight) : Compositer(outputWidth, outputHeight), _weightmap(outputWidth, outputHeight, true, 0.0f) { - + } virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { @@ -903,11 +906,11 @@ class AlphaCompositer : public Compositer { } for (size_t j = 0; j < color.Width(); j++) { - + if (!inputMask(i, j)) { continue; } - + size_t pano_j = offset_x + j; if (pano_j >= _panorama.Width()) { pano_j = pano_j - _panorama.Width(); @@ -927,7 +930,7 @@ class AlphaCompositer : public Compositer { _panorama(pano_i, pano_j).r() = wp * _panorama(pano_i, pano_j).r() + wc * color(i, j).r(); _panorama(pano_i, pano_j).g() = wp * _panorama(pano_i, pano_j).g() + wc * color(i, j).g(); _panorama(pano_i, pano_j).b() = wp * _panorama(pano_i, pano_j).b() + wc * color(i, j).b(); - + _weightmap(pano_i, pano_j) = wtotal; } @@ -945,9 +948,9 @@ class AlphaCompositer : public Compositer { class LaplacianCompositer : public AlphaCompositer { public: - LaplacianCompositer(size_t outputWidth, size_t outputHeight) : + LaplacianCompositer(size_t outputWidth, size_t outputHeight) : AlphaCompositer(outputWidth, outputHeight) { - + } virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { @@ -995,11 +998,11 @@ class LaplacianCompositer : public AlphaCompositer { pyramid_add.reducePyramidDepth(pyramid_panorama.getDepth()); } - + pyramid_panorama.merge(pyramid_add, weight); pyramid_panorama.stack(0); - const aliceVision::image::Image & img = pyramid_panorama.getStackResult(); + const aliceVision::image::Image & img = pyramid_panorama.getStackResult(); const aliceVision::image::Image & mask = pyramid_panorama.getStackMask(); _panorama = img.block(0, 0, _panorama.Height(), _panorama.Width()); @@ -1020,7 +1023,7 @@ class LaplacianCompositer : public AlphaCompositer { if (!inputMask(i, j)) { continue; } - + _weightmap(di, dj) = std::max(_weightmap(di, dj), inputWeights(i, j)); } } @@ -1056,7 +1059,10 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ + bool multiband = false; po::options_description optionalParams("Optional parameters"); + optionalParams.add_options() + ("multiband,m", po::value(&multiband)->required(), "Use multi-band blending."); allParams.add(optionalParams); /** @@ -1106,7 +1112,7 @@ int main(int argc, char **argv) { */ system::Logger::get()->setLogLevel(verboseLevel); - + /*Load output from previous warping step*/ std::stringstream ss; ss << inputDirectory << "/config_views.json"; @@ -1119,7 +1125,7 @@ int main(int argc, char **argv) { bpt::ptree configTree; bpt::read_json(ss.str(), configTree); - + std::pair panoramaSize; panoramaSize.first = configTree.get("panoramaWidth"); panoramaSize.second = configTree.get("panoramaHeight"); @@ -1131,7 +1137,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - /*if (pos == 24 || pos == 25 || pos == 26)*/ { + { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); cv.weights_path = item.second.get("filename_weights"); @@ -1142,10 +1148,16 @@ int main(int argc, char **argv) { pos++; } - - LaplacianCompositer compositer(panoramaSize.first, panoramaSize.second); + std::unique_ptr compositer; + if (multiband) { + compositer = std::unique_ptr(new LaplacianCompositer(panoramaSize.first, panoramaSize.second)); + } + else { + compositer = std::unique_ptr(new AlphaCompositer(panoramaSize.first, panoramaSize.second)); + } + for (const ConfigView & cv : configViews) { - + /** * Load image and convert it to linear colorspace */ @@ -1171,15 +1183,15 @@ int main(int argc, char **argv) { image::readImage(weightsPath, weights, image::EImageColorSpace::NO_CONVERSION); - compositer.append(source, mask, weights, cv.offset_x, cv.offset_y); + compositer->append(source, mask, weights, cv.offset_x, cv.offset_y); } ALICEVISION_LOG_INFO("Write output panorama to file " << outputPanorama); - const aliceVision::image::Image & panorama = compositer.getPanorama(); + const aliceVision::image::Image & panorama = compositer->getPanorama(); image::writeImage(outputPanorama, panorama, image::EImageColorSpace::SRGB); - + return EXIT_SUCCESS; } \ No newline at end of file From fdce092bba13018f23895afb16ef9eebd9204a37 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Tue, 26 Nov 2019 13:21:44 +0100 Subject: [PATCH 124/174] trials without gaussians --- .../pipeline/main_panoramaCompositing.cpp | 731 +++--------------- 1 file changed, 126 insertions(+), 605 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 7184974862..3bd180225c 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -41,309 +41,6 @@ typedef struct { std::string weights_path; } ConfigView; -Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { - - int radius = kernel_length / 2; - - Eigen::VectorXd x(kernel_length + 1); - for (int i = 0; i < kernel_length + 1; i++) { - x(i) = i - radius; - } - - Eigen::VectorXd cdf(kernel_length + 1); - for (int i = 0; i < kernel_length + 1; i++) { - cdf(i) = 0.5 * (1.0 + std::erf(x(i)/(sigma * sqrt(2.0)))); - } - - - Eigen::VectorXd k1d(kernel_length); - for (int i = 0; i < kernel_length; i++) { - k1d(i) = cdf(i + 1) - cdf(i); - } - - double sum = k1d.sum(); - k1d = k1d / sum; - - return k1d.cast(); -} - -bool convolveHorizontal(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::VectorXf & kernel) { - - if (output.size() != input.size()) { - return false; - } - - if (output.size() != mask.size()) { - return false; - } - - if (kernel.size() % 2 == 0) { - return false; - } - - int radius = kernel.size() / 2; - - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { - - image::RGBfColor sum = image::RGBfColor(0.0); - float sum_mask = 0.0f; - - if (!mask(i, j)) { - output(i, j) = image::RGBfColor(0.0); - continue; - } - - for (int k = 0; k < kernel.size(); k++) { - - double w = kernel(k); - int col = j + k - radius; - - if (col < 0 || col >= input.Width()) { - continue; - } - - if (!mask(i, col)) { - continue; - } - - sum += w * input(i, col); - sum_mask += w; - } - - output(i, j) = sum / sum_mask; - } - } - - return true; -} - -bool convolveHorizontal(image::Image & output, const image::Image & input, const Eigen::VectorXf & kernel) { - - if (output.size() != input.size()) { - return false; - } - - if (kernel.size() % 2 == 0) { - return false; - } - - int radius = kernel.size() / 2; - - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { - - float sum = 0.0f; - float sum_mask = 0.0f; - - for (int k = 0; k < kernel.size(); k++) { - - double w = kernel(k); - int col = j + k - radius; - - if (col < 0 || col >= input.Width()) { - continue; - } - - sum += w * input(i, col); - sum_mask += w; - } - - output(i, j) = sum / sum_mask; - } - } - - return true; -} - -bool convolveVertical(image::Image & output, const image::Image & input, const image::Image & mask, const Eigen::VectorXf & kernel) { - - if (output.size() != input.size()) { - return false; - } - - if (output.size() != mask.size()) { - return false; - } - - if (kernel.size() % 2 == 0) { - return false; - } - - int radius = kernel.size() / 2; - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { - - image::RGBfColor sum = image::RGBfColor(0.0); - float sum_mask = 0.0f; - - if (!mask(i, j)) { - output(i, j) = image::RGBfColor(0.0); - continue; - } - - - for (int k = 0; k < kernel.size(); k++) { - - double w = kernel(k); - int row = i + k - radius; - - if (row < 0 || row >= input.Height()) { - continue; - } - - sum += w * input(row, j); - sum_mask += w; - } - - output(i, j) = sum / sum_mask; - } - } - - return true; -} - -bool convolveVertical(image::Image & output, const image::Image & input, const Eigen::VectorXf & kernel) { - - if (output.size() != input.size()) { - return false; - } - - if (kernel.size() % 2 == 0) { - return false; - } - - int radius = kernel.size() / 2; - - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { - - float sum = 0.0f; - float sum_mask = 0.0f; - - for (int k = 0; k < kernel.size(); k++) { - - double w = kernel(k); - int row = i + k - radius; - - if (row < 0 || row >= input.Height()) { - continue; - } - - sum += w * input(row, j); - sum_mask += w; - } - - output(i, j) = sum / sum_mask; - } - } - - return true; -} - -bool downscale(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { - - size_t width = inputColor.Width(); - size_t height = inputColor.Height(); - - if (inputMask.Width() != width || inputMask.Height() != height) { - return false; - } - - size_t output_width = width / 2; - size_t output_height = height / 2; - - outputColor = image::Image(output_width, output_height); - outputMask = image::Image(output_width, output_height); - - for (int i = 0; i < output_height; i++) { - for (int j = 0; j < output_width; j++) { - outputMask(i, j) = inputMask(i * 2, j * 2); - - if (!outputMask(i, j)) { - outputColor(i, j) = image::RGBfColor(0.0f); - } - else { - outputColor(i, j) = inputColor(i * 2, j * 2); - } - } - } - - return true; -} - -bool downscale(aliceVision::image::Image & outputColor, const aliceVision::image::Image & inputColor) { - - size_t width = inputColor.Width(); - size_t height = inputColor.Height(); - - size_t output_width = width / 2; - size_t output_height = height / 2; - - outputColor = image::Image(output_width, output_height); - - for (int i = 0; i < output_height; i++) { - for (int j = 0; j < output_width; j++) { - outputColor(i, j) = inputColor(i * 2, j * 2); - } - } - - return true; -} - -bool upscale(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { - - size_t width = inputColor.Width(); - size_t height = inputColor.Height(); - - if (inputMask.Width() != width || inputMask.Height() != height) { - return false; - } - - size_t output_width = width * 2; - size_t output_height = height * 2; - - outputColor = image::Image(output_width, output_height); - outputMask = image::Image(output_width, output_height); - - for (int i = 0; i < height; i++) { - - int di = i * 2; - - for (int j = 0; j < width; j++) { - int dj = j * 2; - - if (!inputMask(i, j)) { - outputColor(di, dj) = image::RGBfColor(0.0f); - outputColor(di, dj + 1) = image::RGBfColor(0.0f); - outputColor(di + 1, dj) = image::RGBfColor(0.0f); - outputColor(di + 1, dj + 1) = image::RGBfColor(0.0f); - - outputMask(di, dj) = 0; - outputMask(di, dj + 1) = 0; - outputMask(di + 1, dj) = 0; - outputMask(di + 1, dj + 1) = 0; - - continue; - } - - outputColor(di, dj).r() = inputColor(i, j); - outputColor(di, dj + 1) = inputColor(i, j); - outputColor(di + 1, dj) = inputColor(i, j); - outputColor(di + 1, dj + 1) = inputColor(i, j); - - outputMask(di, dj) = 255; - outputMask(di, dj + 1) = 255; - outputMask(di + 1, dj) = 255; - outputMask(di + 1, dj + 1) = 255; - } - } - - return true; -} bool difference(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & aColor, const aliceVision::image::Image & aMask, const aliceVision::image::Image & bColor, const aliceVision::image::Image & bMask) { @@ -471,347 +168,180 @@ size_t countValid(const aliceVision::image::Image & inputMask) { return count; } -class GaussianPyramidNoMask { -public: - bool process(const aliceVision::image::Image & input) { - - _pyramid.clear(); - - /*Make a 2**n size image, easier for multiple consecutive half size*/ - size_t width = input.Width(); - size_t height = input.Height(); - size_t widthPot = pow(2.0, std::ceil(std::log2(width))); - size_t heightPot = pow(2.0, std::ceil(std::log2(height))); - image::Image inputPot(widthPot, heightPot, true, 0.0f); - inputPot.block(0, 0, height, width) = input; - - _pyramid.push_back(inputPot); - - /*Create the gaussian kernel*/ - Eigen::VectorXf kernel(5); - kernel[0] = 1.0f; - kernel[1] = 4.0f; - kernel[2] = 6.0f; - kernel[3] = 4.0f; - kernel[4] = 1.0f; - - while (true) { - - if (widthPot < 32 || heightPot < 32) break; - - size_t level = _pyramid.size() - 1; - - image::Image buffer(widthPot, heightPot); - image::Image smoothed(widthPot, heightPot); - - convolveHorizontal(buffer, _pyramid[level], kernel); - convolveVertical(smoothed, buffer, kernel); - - widthPot = widthPot / 2; - heightPot = heightPot / 2; - image::Image reduced; - downscale(reduced, smoothed); - - _pyramid.push_back(reduced); - } - - - return true; - } - - size_t getPyramidSize() { - return _pyramid.size(); - } - - const std::vector> & getLevels() { - return _pyramid; - } - -protected: - std::vector> _pyramid; -}; - -class GaussianPyramid { +class LaplacianPyramid { public: - bool process(const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { + bool process(const aliceVision::image::Image & inputColor, const image::Image & weights, size_t max_levels) { - _pyramid_color.clear(); - _pyramid_mask.clear(); + _original_width = inputColor.Width(); + _original_height = inputColor.Height(); - if (inputColor.size() != inputMask.size()) { - return false; - } + std::vector> pyramid_gaussian; - /*Make a 2**n size image, easier for multiple consecutive half size*/ - size_t width = inputColor.Width(); - size_t height = inputColor.Height(); - size_t widthPot = pow(2.0, std::ceil(std::log2(width))); - size_t heightPot = pow(2.0, std::ceil(std::log2(height))); - image::Image inputColorPot(widthPot, heightPot, true, image::RGBfColor(0.0f, 0.0f, 0.0f)); - image::Image inputMaskPot(widthPot, heightPot, true, 0); - inputColorPot.block(0, 0, height, width) = inputColor; - inputMaskPot.block(0, 0, height, width) = inputMask; + pyramid_gaussian.push_back(inputColor); + _pyramid_weights.push_back(weights); - _pyramid_color.push_back(inputColorPot); - _pyramid_mask.push_back(inputMaskPot); + /*Build pyramid using 2x2 kernel **/ + for (int level = 0; level < max_levels; level++) { + const image::Image & color = pyramid_gaussian[level]; + const image::Image & weight = _pyramid_weights[level]; - /*Create the gaussian kernel*/ - Eigen::VectorXf kernel(5); - kernel[0] = 1.0f; - kernel[1] = 4.0f; - kernel[2] = 6.0f; - kernel[3] = 4.0f; - kernel[4] = 1.0f; + image::Image nextColor(color.Width() / 2, color.Height() / 2, true, image::RGBfColor(0.0f)); + image::Image nextWeight(weight.Width() / 2, weight.Height() / 2, true, 0.0f); + for (int i = 0; i < nextColor.Height(); i++) { - /*Compute initial count of valid pixels*/ - size_t countValidPixels = countValid(inputMask); + int di = i * 2; - while (true) { + for (int j = 0; j < nextColor.Width(); j++) { + int dj = j * 2; - if (widthPot < 32 || heightPot < 32) break; + nextColor(i, j).r() = 0.25f * (weight(di, dj) * color(di, dj).r() + weight(di, dj + 1) * color(di, dj + 1).r() + weight(di + 1, dj) * color(di + 1, dj).r() + weight(di + 1, dj + 1) * color(di + 1, dj + 1).r()); + nextColor(i, j).g() = 0.25f * (weight(di, dj) * color(di, dj).g() + weight(di, dj + 1) * color(di, dj + 1).g() + weight(di + 1, dj) * color(di + 1, dj).g() + weight(di + 1, dj + 1) * color(di + 1, dj + 1).g()); + nextColor(i, j).b() = 0.25f * (weight(di, dj) * color(di, dj).b() + weight(di, dj + 1) * color(di, dj + 1).b() + weight(di + 1, dj) * color(di + 1, dj).b() + weight(di + 1, dj + 1) * color(di + 1, dj + 1).b()); + nextWeight(i, j) = 0.25f * (weight(di, dj) + weight(di, dj + 1) + weight(di + 1, dj) + weight(di + 1, dj + 1)); + } + } - /* Make sure enough pixels are valid*/ - if (countValidPixels < 4) break; + pyramid_gaussian.push_back(nextColor); + _pyramid_weights.push_back(nextWeight); + } - size_t level = _pyramid_color.size() - 1; + /*Compute laplacian*/ + for (int level = 0; level < pyramid_gaussian.size() - 1; level++) { - image::Image buffer(widthPot, heightPot); - image::Image smoothed(widthPot, heightPot); + const image::Image & color = pyramid_gaussian[level]; + const image::Image & next_color = pyramid_gaussian[level + 1]; - convolveHorizontal(buffer, _pyramid_color[level], _pyramid_mask[level], kernel); - convolveVertical(smoothed, buffer, _pyramid_mask[level], kernel); + image::Image upscaled(color.Width(), color.Height(), true, image::RGBfColor(0.0f)); + image::Image difference(color.Width(), color.Height(), true, image::RGBfColor(0.0f)); - widthPot = widthPot / 2; - heightPot = heightPot / 2; - image::Image reducedColor; - image::Image reducedMask; - downscale(reducedColor, reducedMask, smoothed, _pyramid_mask[level]); + for (int i = 0; i < next_color.Height(); i++) { + int di = i * 2; + for (int j = 0; j < next_color.Width(); j++) { + int dj = j * 2; + upscaled(di, dj) = next_color(i, j); + upscaled(di, dj + 1) = next_color(i, j); + upscaled(di + 1, dj) = next_color(i, j); + upscaled(di + 1, dj + 1) = next_color(i, j); + } + } - countValidPixels = countValid(reducedMask); - _pyramid_color.push_back(reducedColor); - _pyramid_mask.push_back(reducedMask); + for (int i = 0; i < color.Height(); i++) { + for (int j = 0; j < color.Width(); j++) { + difference(i, j) = color(i, j) - upscaled(i, j); + } + } - break; + _pyramid_color.push_back(difference); } + _pyramid_color.push_back(pyramid_gaussian[pyramid_gaussian.size() - 1]); + return true; } - size_t getPyramidSize() { - return _pyramid_color.size(); - } + bool stack() { - const std::vector> & getColors() { - return _pyramid_color; - } + image::Image prev = _pyramid_color[_pyramid_color.size() - 1]; - const std::vector> & getMasks() { - return _pyramid_mask; - } + for (int level = _pyramid_color.size() - 2; level >= 0; level--) { -protected: - std::vector> _pyramid_color; - std::vector> _pyramid_mask; -}; - -class LaplacianPyramid { -public: - bool process(const aliceVision::image::Image & inputColor, const aliceVision::image::Image & inputMask) { - - _original_width = inputColor.Width(); - _original_height = inputColor.Height(); - - GaussianPyramid gp; - if (!gp.process(inputColor, inputMask)) { - return false; - } - - const std::vector> & gp_colors = gp.getColors(); - const std::vector> & gp_masks = gp.getMasks(); - - /*Create the gaussian kernel*/ - Eigen::VectorXf kernel(5); - kernel[0] = 1.0f; - kernel[1] = 4.0f; - kernel[2] = 6.0f; - kernel[3] = 4.0f; - kernel[4] = 1.0f; + + image::Image current = _pyramid_color[level]; + image::Image rescaled(prev.Width() * 2, prev.Height() * 2, true, image::RGBfColor(0.0f)); - for (int level = 0; level < gp_colors.size() - 1; level++) { - const image::Image prev_color = gp_colors[level + 1]; - const image::Image prev_mask = gp_masks[level + 1]; - const image::Image color = gp_colors[level]; - const image::Image mask = gp_masks[level]; + for (int i = 0; i < prev.Height(); i++) { - image::Image prev_color_upscaled(color.Width(), color.Height()); - image::Image prev_mask_upscaled(mask.Width(), mask.Height()); + int di = i * 2; - image::Image diff_color(color.Width(), color.Height()); - image::Image buffer(color.Width(), color.Height()); - image::Image smoothed(color.Width(), color.Height()); - image::Image diff_mask(mask.Width(), mask.Height()); + for (int j = 0; j < prev.Width(); j++) { - upscale(prev_color_upscaled, prev_mask_upscaled, prev_color, prev_mask); + int dj = j * 2; - convolveHorizontal(buffer, prev_color_upscaled, prev_mask_upscaled, kernel); - convolveVertical(smoothed, buffer, prev_mask_upscaled, kernel); - - if (!difference(diff_color, diff_mask, color, mask, smoothed, prev_mask_upscaled)) { - return false; + rescaled(di, dj) = prev(i, j); + rescaled(di, dj + 1) = prev(i, j); + rescaled(di + 1, dj) = prev(i, j); + rescaled(di + 1, dj + 1) = prev(i, j); + } } - _pyramid_color.push_back(diff_color); - _pyramid_mask.push_back(diff_mask); - + for (int i = 0; i < rescaled.Height(); i++) { + for (int j = 0; j < rescaled.Width(); j++) { + rescaled(i, j).r() = rescaled(i, j).r() + current(i, j).r(); + rescaled(i, j).g() = rescaled(i, j).g() + current(i, j).g(); + rescaled(i, j).b() = rescaled(i, j).b() + current(i, j).b(); + } + } + prev = rescaled; } - _pyramid_color.push_back(gp_colors[gp_colors.size() - 1]); - _pyramid_mask.push_back(gp_masks[gp_masks.size() - 1]); + _stack_result = prev; return true; } - bool stack(int max_level) { - - image::Image prev_color = _pyramid_color[_pyramid_color.size() - 1]; - image::Image prev_mask = _pyramid_mask[_pyramid_mask.size() - 1]; + bool merge(const LaplacianPyramid & other) { - /*Create the gaussian kernel*/ - Eigen::VectorXf kernel(5); - kernel[0] = 1.0f; - kernel[1] = 4.0f; - kernel[2] = 6.0f; - kernel[3] = 4.0f; - kernel[4] = 1.0f; - - - for (int level = _pyramid_color.size() - 2; level >= max_level; level--) { - - const image::Image diff_color = _pyramid_color[level]; - const image::Image diff_mask = _pyramid_mask[level]; - - image::Image prev_color_upscaled(diff_color.Width(), diff_color.Height()); - image::Image buffer(diff_color.Width(), diff_color.Height()); - image::Image smoothed(diff_color.Width(), diff_color.Height(), true, image::RGBfColor(0.0f)); - image::Image prev_mask_upscaled(diff_mask.Width(), diff_mask.Height()); - - if (!upscale(prev_color_upscaled, prev_mask_upscaled, prev_color, prev_mask)) { - return false; - } - - if (!convolveHorizontal(buffer, prev_color_upscaled, prev_mask_upscaled, kernel)) { - return false; - } - - if (!convolveVertical(smoothed, buffer, prev_mask_upscaled, kernel)) { - return false; - } - - prev_color = image::Image(diff_color.Width(), diff_color.Height(), true, image::RGBfColor(0.0f)); - prev_mask = image::Image(diff_mask.Width(), diff_mask.Height(), true, 0); - - - if (!addition(prev_color, prev_mask, smoothed, prev_mask_upscaled, diff_color, diff_mask)) { - return false; - } - - /*char filename[FILENAME_MAX]; - sprintf(filename, "/home/mmoc/output_%d.exr", level); - image::writeImage(filename, prev_color, image::EImageColorSpace::NO_CONVERSION);*/ + if (_pyramid_color.size() == 0) { + _pyramid_color = other._pyramid_color; + _pyramid_weights = other._pyramid_weights; + return true; } - _stack_result = prev_color; - _stack_mask = prev_mask; - - return true; - } - - bool merge(const LaplacianPyramid & other, const image::Image & weights) { - - GaussianPyramidNoMask pyramid_weights; - pyramid_weights.process(weights); - - - const std::vector> & vector_weights = pyramid_weights.getLevels(); - - - - for (int level = 0; level < _pyramid_color.size(); level++) { - - image::Image & aColor = _pyramid_color[level]; - image::Image & aMask = _pyramid_mask[level]; - const image::Image & bColor = other._pyramid_color[level]; - const image::Image & bMask = other._pyramid_mask[level]; - const image::Image & lweights = vector_weights[level]; - - for (int i = 0; i < bColor.Height(); i++) { - - for (int j = 0; j < bColor.Width(); j++) { - - if (aMask(i, j)) { - if (bMask(i, j)) { + for (int level = 0; level < _pyramid_color.size() - 1; level++) { + image::Image & color = _pyramid_color[level]; + image::Image & weight = _pyramid_weights[level]; - float w = lweights(i, j); - float iw = 1.0f - w; + const image::Image & ocolor = other._pyramid_color[level]; + const image::Image & oweight = other._pyramid_weights[level]; + for (int i = 0; i < color.Height(); i++) { + for (int j = 0; j < color.Width(); j++) { - aColor(i, j).r() = iw * aColor(i, j).r() + w * bColor(i, j).r(); - aColor(i, j).g() = iw * aColor(i, j).g() + w * bColor(i, j).g(); - aColor(i, j).b() = iw * aColor(i, j).b() + w * bColor(i, j).b(); - } - else { - //do nothing - } - } - else { - if (bMask(i, j)) { - aColor(i, j) = bColor(i, j); - aMask(i, j) = 1; - } - else { - aColor(i, j) = image::RGBfColor(0.0f); - aMask(i, j) = 0; - } + if (weight(i, j) < oweight(i, j)) { + color(i, j) = ocolor(i, j); } } } } - return true; - } + size_t max_level = _pyramid_color.size() - 1; + image::Image & color = _pyramid_color[max_level]; + image::Image & weight = _pyramid_weights[max_level]; + const image::Image & ocolor = other._pyramid_color[max_level]; + const image::Image & oweight = other._pyramid_weights[max_level]; - bool reducePyramidDepth(size_t newSize) { + for (int i = 0; i < color.Height(); i++) { + for (int j = 0; j < color.Width(); j++) { - if (newSize == 0) { - return false; - } + float w = weight(i, j); + float ow = oweight(i, j); + float sum = w + ow; - if (newSize > _pyramid_color.size()) { - return false; - } + if (sum > 1e-8) { + w = w / sum; + ow = ow / sum; - if (newSize == _pyramid_color.size()) { - return true; + color(i, j).r() = w * color(i, j).r() + ow * ocolor(i, j).r(); + color(i, j).g() = w * color(i, j).g() + ow * ocolor(i, j).g(); + color(i, j).b() = w * color(i, j).b() + ow * ocolor(i, j).b(); + } + } } - if (!stack(newSize - 1)) { - return false; + for (int level = 0; level < _pyramid_color.size(); level++) { + char filename[512]; + sprintf(filename, "/home/mmoc/color_%d.exr", level); + image::writeImage(filename, _pyramid_color[level], image::EImageColorSpace::NO_CONVERSION); } - _pyramid_color[newSize - 1] = _stack_result; - _pyramid_mask[newSize - 1] = _stack_mask; - - _pyramid_color.resize(newSize); - _pyramid_mask.resize(newSize); - return true; } @@ -829,7 +359,7 @@ class LaplacianPyramid { protected: std::vector> _pyramid_color; - std::vector> _pyramid_mask; + std::vector> _pyramid_weights; image::Image _stack_result; image::Image _stack_mask; @@ -955,11 +485,7 @@ class LaplacianCompositer : public AlphaCompositer { virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - LaplacianPyramid pyramid_panorama; - pyramid_panorama.process(_panorama, _mask); - aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBfColor(0.0f)); - aliceVision::image::Image viewmask(_panorama.Width(), _panorama.Height(), true, 0); aliceVision::image::Image weight(_panorama.Width(), _panorama.Height(), true, 0); for (int i = 0; i < color.Height(); i++) { @@ -974,39 +500,32 @@ class LaplacianCompositer : public AlphaCompositer { dj = dj - view.Width(); } - view(di, dj) = color(i, j); - viewmask(di, dj) = inputMask(i, j); - + /* Create binary mask */ if (inputMask(i ,j)) { + view(di, dj) = color(i, j); + if (inputWeights(i,j) > _weightmap(di, dj)) { - weight(di, dj) = 1.0; + weight(di, dj) = 1.0f; } else { - weight(di, dj) = 0.0; + weight(di, dj) = 0.0f; } } } } - LaplacianPyramid pyramid_add; - pyramid_add.process(view, viewmask); - - if (pyramid_panorama.getDepth() > pyramid_add.getDepth()) { - pyramid_panorama.reducePyramidDepth(pyramid_add.getDepth()); - } - else if (pyramid_panorama.getDepth() < pyramid_add.getDepth()) { - pyramid_add.reducePyramidDepth(pyramid_panorama.getDepth()); - } - + LaplacianPyramid pyramid; + pyramid.process(view, weight, 4); + _pyramid_panorama.merge(pyramid); + _pyramid_panorama.stack(); - pyramid_panorama.merge(pyramid_add, weight); - pyramid_panorama.stack(0); - const aliceVision::image::Image & img = pyramid_panorama.getStackResult(); - const aliceVision::image::Image & mask = pyramid_panorama.getStackMask(); + + const aliceVision::image::Image & img = _pyramid_panorama.getStackResult(); + //const aliceVision::image::Image & mask = _pyramid_panorama.getStackMask(); _panorama = img.block(0, 0, _panorama.Height(), _panorama.Width()); - _mask = mask.block(0, 0, _panorama.Height(), _panorama.Width()); + //_mask = mask.block(0, 0, _panorama.Height(), _panorama.Width());*/ for (int i = 0; i < inputWeights.Height(); i++) { @@ -1033,6 +552,7 @@ class LaplacianCompositer : public AlphaCompositer { } protected: + LaplacianPyramid _pyramid_panorama; }; int main(int argc, char **argv) { @@ -1137,6 +657,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; + if (pos == 24 || pos == 25) { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); From 6faa7b22883a1fe6723442e04599f390fd71609d Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Tue, 26 Nov 2019 21:03:01 +0100 Subject: [PATCH 125/174] [hdr] wip changing highlight correction --- src/aliceVision/hdr/hdrMerge.cpp | 43 +++++++++++++++++++------------- src/aliceVision/hdr/rgbCurve.cpp | 12 +++++++++ src/aliceVision/hdr/rgbCurve.hpp | 2 ++ 3 files changed, 40 insertions(+), 17 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index 137383c77a..8a827fef25 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -70,10 +70,9 @@ void hdrMerge::process(const std::vector< image::Image > &imag } const float maxLum = 1000.0f; - const float minLum = 0.0001f; rgbCurve weightShortestExposure = weight; - weightShortestExposure.invertAndScaleSecondPart(1.0f + clampedValueCorrection * maxLum); + weightShortestExposure.freezeSecondPartValues(); // invertAndScaleSecondPart(clampedValueCorrection * maxLum); #pragma omp parallel for for(int y = 0; y < height; ++y) @@ -83,6 +82,8 @@ void hdrMerge::process(const std::vector< image::Image > &imag //for each pixels image::RGBfColor &radianceColor = radiance(y, x); + double isPixelClamped = 0.0; + for(std::size_t channel = 0; channel < 3; ++channel) { double wsum = 0.0; @@ -90,40 +91,39 @@ void hdrMerge::process(const std::vector< image::Image > &imag { int exposureIndex = 0; - // float highValue = images[exposureIndex](y, x)(channel); - // // https://www.desmos.com/calculator/xamvguu8zw - // // ____ - // // sigmoid inv: _______/ - // // 0 1 - // float clampedHighValue = sigmoidInv(0.0f, 1.0f, /*sigWidth=*/0.2f, /*sigMid=*/0.9f, highValue); - ////////// - - // for each images + + // for each image const double value = images[exposureIndex](y, x)(channel); const double time = times[exposureIndex]; + + // https://www.desmos.com/calculator/xamvguu8zw + // ____ + // sigmoid inv: _______/ + // 0 1 + const float isChannelClamped = sigmoidInv(0.0f, 1.0f, /*sigWidth=*/0.2f, /*sigMid=*/0.9f, value); + isPixelClamped += isChannelClamped; + // - // / - // weightShortestExposure: ____/ + // weightShortestExposure: _______ // _______/ // 0 1 - double w = std::max(0.f, weightShortestExposure(value, channel)); // - weight(0.05, 0) + double w = std::max(0.f, weightShortestExposure(value, channel)); const double r = response(value, channel); wsum += w * r / time; wdiv += w; - } for(std::size_t i = 1; i < images.size(); ++i) { - // for each images + // for each image const double value = images[i](y, x)(channel); const double time = times[i]; // // weight: ____ // _______/ \________ // 0 1 - double w = std::max(0.f, weight(value, channel)); // - weight(0.05, 0) + double w = std::max(0.f, weight(value, channel)); const double r = response(value, channel); wsum += w * r / time; @@ -141,6 +141,15 @@ void hdrMerge::process(const std::vector< image::Image > &imag radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetTime; } + double clampedValueLuminanceCompensation = isPixelClamped / 3.0; + double clampedValueLuminanceCompensationInv = (1.0 - clampedValueLuminanceCompensation); + assert(clampedValueLuminanceCompensation <= 1.0); + + for(std::size_t channel = 0; channel < 3; ++channel) + { + radianceColor(channel) = clampedValueLuminanceCompensationInv * radianceColor(channel) + + clampedValueLuminanceCompensation * clampedValueCorrection * maxLum; + } } } } diff --git a/src/aliceVision/hdr/rgbCurve.cpp b/src/aliceVision/hdr/rgbCurve.cpp index 8d03987515..55b89411a7 100644 --- a/src/aliceVision/hdr/rgbCurve.cpp +++ b/src/aliceVision/hdr/rgbCurve.cpp @@ -183,6 +183,18 @@ void rgbCurve::inverseAllValues() } } +void rgbCurve::freezeSecondPartValues() +{ + for (auto &curve : _data) + { + std::size_t midIndex = (curve.size() / 2); + for (std::size_t i = midIndex + 1; i < curve.size(); ++i) + { + curve[i] = curve[midIndex]; + } + } +} + void rgbCurve::invertAndScaleSecondPart(float scale) { for (auto &curve : _data) diff --git a/src/aliceVision/hdr/rgbCurve.hpp b/src/aliceVision/hdr/rgbCurve.hpp index 4169e01dab..0db12e7f77 100644 --- a/src/aliceVision/hdr/rgbCurve.hpp +++ b/src/aliceVision/hdr/rgbCurve.hpp @@ -193,6 +193,8 @@ class rgbCurve */ void inverseAllValues(); + void freezeSecondPartValues(); + void invertAndScaleSecondPart(float scale); /** From bbcd5e067591991c91d0ffe295650f280a02483e Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 28 Nov 2019 17:23:50 +0100 Subject: [PATCH 126/174] [hdr] use general camera exposure instead of shutter speed --- src/aliceVision/hdr/hdrMerge.cpp | 4 +- src/aliceVision/hdr/hdrMerge.hpp | 4 +- src/aliceVision/sfmData/SfMData.hpp | 24 +++++------ src/aliceVision/sfmData/View.hpp | 41 ++++++++----------- src/samples/texturing/main_evcorrection.cpp | 20 ++++----- src/software/convert/main_convertLDRToHDR.cpp | 35 +++++++++------- .../pipeline/main_prepareDenseScene.cpp | 14 +++---- 7 files changed, 71 insertions(+), 71 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index 8a827fef25..16f18c00c8 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -47,7 +47,7 @@ void hdrMerge::process(const std::vector< image::Image > &imag const rgbCurve &weight, const rgbCurve &response, image::Image &radiance, - float targetTime, + float targetCameraExposure, bool robCalibrate, float clampedValueCorrection) { @@ -139,7 +139,7 @@ void hdrMerge::process(const std::vector< image::Image > &imag // double clampedLowValue = sigmoid(0.0f, 1.0f, /*sigWidth=*/0.01f, /*sigMid=*/0.005, lowValue); //} - radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetTime; + radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetCameraExposure; } double clampedValueLuminanceCompensation = isPixelClamped / 3.0; double clampedValueLuminanceCompensationInv = (1.0 - clampedValueLuminanceCompensation); diff --git a/src/aliceVision/hdr/hdrMerge.hpp b/src/aliceVision/hdr/hdrMerge.hpp index 95c4f29fd1..5ef4ee4ec6 100644 --- a/src/aliceVision/hdr/hdrMerge.hpp +++ b/src/aliceVision/hdr/hdrMerge.hpp @@ -21,7 +21,7 @@ class hdrMerge { * @param images * @param radiance * @param times - * @param targetTime + * @param targetCameraExposure * @param response */ void process(const std::vector< image::Image > &images, @@ -29,7 +29,7 @@ class hdrMerge { const rgbCurve &weight, const rgbCurve &response, image::Image &radiance, - float targetTime, + float targetCameraExposure, bool robCalibrate = false, float clampedValueCorrection = 1.f); diff --git a/src/aliceVision/sfmData/SfMData.hpp b/src/aliceVision/sfmData/SfMData.hpp index 9a3ef37700..972c13c9d6 100644 --- a/src/aliceVision/sfmData/SfMData.hpp +++ b/src/aliceVision/sfmData/SfMData.hpp @@ -324,29 +324,29 @@ class SfMData } /** - * @brief Get the median Exposure Value (Ev) of + * @brief Get the median Camera Exposure Setting * @return */ - float getMedianEv() const + float getMedianCameraExposureSetting() const { - std::vector evList; - evList.reserve(views.size()); + std::vector cameraExposureList; + cameraExposureList.reserve(views.size()); for(const auto& view : views) { - float ev = view.second->getEv(); - if(ev != -1.0f) + float ce = view.second->getCameraExposureSetting(); + if(ce != -1.0f) { - auto find = std::find(std::begin(evList), std::end(evList), ev); - if(find == std::end(evList)) - evList.emplace_back(ev); + auto find = std::find(std::begin(cameraExposureList), std::end(cameraExposureList), ce); + if(find == std::end(cameraExposureList)) + cameraExposureList.emplace_back(ce); } } - std::nth_element(evList.begin(), evList.begin() + evList.size()/2, evList.end()); - float evMedian = evList[evList.size()/2]; + std::nth_element(cameraExposureList.begin(), cameraExposureList.begin() + cameraExposureList.size()/2, cameraExposureList.end()); + float ceMedian = cameraExposureList[cameraExposureList.size()/2]; - return evMedian; + return ceMedian; } /** diff --git a/src/aliceVision/sfmData/View.hpp b/src/aliceVision/sfmData/View.hpp index 892f83f818..86c5595e86 100644 --- a/src/aliceVision/sfmData/View.hpp +++ b/src/aliceVision/sfmData/View.hpp @@ -191,34 +191,29 @@ class View return (!isPartOfRig() || _isIndependantPose); } - float getEv() const + /** + * + */ + float getCameraExposureSetting() const { - const float shutter = getMetadataShutter(); - const float aperture = getMetadataAperture(); - const float iso = static_cast(getMetadataISO()); + const float shutter = getMetadataShutter(); + const float aperture = getMetadataAperture(); + if (shutter < 0 || aperture < 0) + return -1.f; - if(shutter < 0 || aperture < 0 || iso < 0) - return -1; + const float iso = getMetadataISO(); + const float isoRatio = (iso < 0.f) ? 1.0 : (iso / 100.f); - // WIKIPEDIA : + log2f(iso/100.f) - float ev = log2f(std::pow(aperture, 2.0f) / shutter) - log2f(iso/100.f); - return ev; + // The value of the constant must be within the range 10.6 to 13.4, according to the standard. + const float K = 12.07488f; + float cameraExposure = (shutter * isoRatio) / (aperture * aperture * K); + return cameraExposure; } - /** - * @brief Get the value of the gap bewteen the view's exposition and a reference exposition - * @param [refEv] the median exposition of all views - * @return the exposure compensation - */ - float getEvCompensation(float refEv) const - { - const float ev = getEv(); - if(ev == -1) - return 1.0f; - - return std::pow(2.0f, ev - refEv); - } - + float getEv() const + { + return std::log2(1.f/getCameraExposureSetting()); + } /** * @brief Return true if the given metadata name exists diff --git a/src/samples/texturing/main_evcorrection.cpp b/src/samples/texturing/main_evcorrection.cpp index 949af45003..b693fdb5e2 100644 --- a/src/samples/texturing/main_evcorrection.cpp +++ b/src/samples/texturing/main_evcorrection.cpp @@ -108,13 +108,13 @@ int main(int argc, char **argv) } - float evMedian = sfm_data.getMedianEv(); - ALICEVISION_LOG_INFO(" EV Median :" << evMedian); + float cameraExposureMedian = sfm_data.getMedianCameraExposureSetting(); + ALICEVISION_LOG_INFO(" EV Median :" << cameraExposureMedian); for(int i = 0; i < sfm_data.views.size(); ++i) { - sfmData::View view = *(sfm_data.views[i]); - float evComp = view.getEvCompensation(evMedian); + sfmData::View view = *(sfm_data.views[i]); + float evComp = cameraExposureMedian / view.getCameraExposureSetting(); image::Image img; image::readImage(view.getImagePath(), img, image::EImageColorSpace::LINEAR); @@ -123,9 +123,9 @@ int main(int argc, char **argv) img(pix) *= evComp; ALICEVISION_LOG_INFO(fs::path(view.getImagePath()).stem()); - ALICEVISION_LOG_INFO(" EV :" << view.getEv()); - ALICEVISION_LOG_INFO(" EV Comp:" << view.getEvCompensation(evMedian)); - + ALICEVISION_LOG_INFO(" EV: " << view.getEv()); + ALICEVISION_LOG_INFO(" EV Compensation: " << evComp); + std::string outputPath = outputFilePath + fs::path(view.getImagePath()).stem().string() + ".EXR"; oiio::ParamValueList metadata = image::getMetadataFromMap(view.getMetadata()); image::writeImage(outputPath, img, image::EImageColorSpace::LINEAR, metadata); @@ -167,8 +167,8 @@ int main(int argc, char **argv) // calculate median EV if necessary std::sort(evList.begin(), evList.end()); - evMedian = evList[evList.size()/2]; - ALICEVISION_LOG_INFO("Median EV: " << evMedian); + cameraExposureMedian = evList[evList.size()/2]; + ALICEVISION_LOG_INFO("Median EV: " << cameraExposureMedian); // write corrected images, not over exposed images for(int i = 0; i < imgList.size(); ++i) @@ -176,7 +176,7 @@ int main(int argc, char **argv) Image& img = imgList[i]; ALICEVISION_LOG_INFO(img.getName()); - float evComp = std::pow(2.0f, img.getEv() - evMedian); + float evComp = std::pow(2.0f, img.getEv() - cameraExposureMedian); ALICEVISION_LOG_INFO(" EV Compensation: " << evComp); for(int pix = 0; pix < img.width() * img.height(); ++pix) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index b51b3f2a64..4abba975af 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -326,20 +326,25 @@ int main(int argc, char * argv[]) { return (path_a.stem().string() < path_b.stem().string()); }); - /* - Make sure all images have same aperture - */ - float aperture = viewsOrderedByName[0]->getMetadataAperture(); - for (auto & view : viewsOrderedByName) { - - if (view->getMetadataAperture() != aperture) { - ALICEVISION_LOG_ERROR("Different apertures amongst the dataset"); - return EXIT_FAILURE; + { + // Put a warning, if the aperture changes. + std::set apertures; + for (auto & view : viewsOrderedByName) + { + apertures.insert(view->getMetadataAperture()); + } + if(apertures.size() != 1) + { + ALICEVISION_LOG_WARNING("Different apertures amongst the dataset. For correct HDR, you should only change the shutter speed (and eventually the ISO)."); + ALICEVISION_LOG_WARNING("Used apertures:"); + for (auto a : apertures) + { + ALICEVISION_LOG_WARNING(" * " << a); + } } } - - /*Make groups*/ + // Make groups std::vector>> groupedViews; std::vector> group; for (auto & view : viewsOrderedByName) { @@ -357,7 +362,7 @@ int main(int argc, char * argv[]) { /*Sort all images by exposure time*/ std::sort(group.begin(), group.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { if (a == nullptr || b == nullptr) return true; - return (a->getMetadataShutter() < b->getMetadataShutter()); + return (a->getCameraExposureSetting() < b->getCameraExposureSetting()); }); /*Target views are the middle exposed views*/ @@ -380,7 +385,7 @@ int main(int argc, char * argv[]) { for (int j = 0; j < group.size(); j++) { - float etime = group[j]->getMetadataShutter(); + float etime = group[j]->getCameraExposureSetting(); exposures.push_back(etime); } groupedExposures.push_back(exposures); @@ -540,9 +545,9 @@ int main(int argc, char * argv[]) { /* Merge HDR images */ hdr::hdrMerge merge; - float targetTime = targetView->getMetadataShutter(); + float targetCameraExposure = targetView->getCameraExposureSetting(); image::Image HDRimage; - merge.process(images, groupedExposures[g], fusionWeight, response, HDRimage, targetTime, false, clampedValueCorrection); + merge.process(images, groupedExposures[g], fusionWeight, response, HDRimage, targetCameraExposure, false, clampedValueCorrection); /* Output image file path */ std::string hdr_output_path; diff --git a/src/software/pipeline/main_prepareDenseScene.cpp b/src/software/pipeline/main_prepareDenseScene.cpp index 6b194f81dc..d0c38459bf 100644 --- a/src/software/pipeline/main_prepareDenseScene.cpp +++ b/src/software/pipeline/main_prepareDenseScene.cpp @@ -78,8 +78,8 @@ bool prepareDenseScene(const SfMData& sfmData, boost::progress_display progressBar(viewIds.size(), std::cout, "Exporting Scene Undistorted Images\n"); // for exposure correction - const float medianEv = sfmData.getMedianEv(); - ALICEVISION_LOG_INFO("median Ev : " << medianEv); + const float medianCameraExposure = sfmData.getMedianCameraExposureSetting(); + ALICEVISION_LOG_INFO("Median Camera Exposure: " << medianCameraExposure << ", Median EV: " << std::log2(1.0f/medianCameraExposure)); #pragma omp parallel for num_threads(3) for(int i = 0; i < viewIds.size(); ++i) @@ -195,16 +195,16 @@ bool prepareDenseScene(const SfMData& sfmData, readImage(srcImage, image, image::EImageColorSpace::LINEAR); // add exposure values to images metadata - float exposure = view->getEv(); - float exposureCompensation = view->getEvCompensation(medianEv); - metadata.push_back(oiio::ParamValue("AliceVision:EV", exposure)); + float cameraExposure = view->getCameraExposureSetting(); + float ev = std::log2(1.0 / cameraExposure); + float exposureCompensation = medianCameraExposure / cameraExposure; + metadata.push_back(oiio::ParamValue("AliceVision:EV", ev)); metadata.push_back(oiio::ParamValue("AliceVision:EVComp", exposureCompensation)); //exposure correction if(evCorrection) { - ALICEVISION_LOG_INFO("image " + std::to_string(viewId) + " Ev : " + std::to_string(exposure)); - ALICEVISION_LOG_INFO("image " + std::to_string(viewId) + " Ev compensation : " + std::to_string(exposureCompensation)); + ALICEVISION_LOG_INFO("View: " << viewId << ", Ev: " << ev << ", Ev compensation: " << exposureCompensation); for(int pix = 0; pix < image.Width() * image.Height(); ++pix) image(pix) = image(pix) * exposureCompensation; From 21a4f03df08433670dab6779f751de2da5f12234 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 28 Nov 2019 17:24:27 +0100 Subject: [PATCH 127/174] [software] LDRToHDR formatting --- src/software/convert/main_convertLDRToHDR.cpp | 163 +++++++++--------- 1 file changed, 78 insertions(+), 85 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 4abba975af..2ce2585f52 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -96,10 +96,8 @@ inline std::istream& operator>>(std::istream& in, ECalibrationMethod& calibratio } - - -int main(int argc, char * argv[]) { - +int main(int argc, char * argv[]) +{ std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); std::string sfmInputDataFilename = ""; std::string sfmOutputDataFilename = ""; @@ -113,10 +111,7 @@ int main(int argc, char * argv[]) { std::string calibrationWeightFunction = "default"; hdr::EFunctionType fusionWeightFunction = hdr::EFunctionType::GAUSSIAN; - - /***** - * DESCRIBE COMMAND LINE PARAMETERS - */ + // Command line parameters po::options_description allParams( "Parse external information about cameras used in a panorama.\n" "AliceVision PanoramaExternalInfo"); @@ -153,9 +148,6 @@ int main(int argc, char * argv[]) { allParams.add(requiredParams).add(optionalParams).add(logParams); - /** - * READ COMMAND LINE - */ po::variables_map vm; try { @@ -184,50 +176,49 @@ int main(int argc, char * argv[]) { ALICEVISION_COUT("Program called with the following parameters:"); ALICEVISION_COUT(vm); - /** - * set verbose level - **/ system::Logger::get()->setLogLevel(verboseLevel); - if (groupSize < 0) { + if (groupSize < 0) + { ALICEVISION_LOG_ERROR("Invalid number of brackets"); return EXIT_FAILURE; } - /*Analyze path*/ + // Analyze path boost::filesystem::path path(sfmOutputDataFilename); - std::string output_path = path.parent_path().string(); + std::string outputPath = path.parent_path().string(); - /** - * Read sfm data - */ + // Read sfm data sfmData::SfMData sfmData; if(!sfmDataIO::Load(sfmData, sfmInputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS | sfmDataIO::INTRINSICS))) { ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmInputDataFilename << "' cannot be read."); return EXIT_FAILURE; } - size_t countImages = sfmData.getViews().size(); - if (countImages == 0) { + if (countImages == 0) + { ALICEVISION_LOG_ERROR("The input SfMData contains no input !"); return EXIT_FAILURE; } - if (countImages % groupSize != 0) { + if (countImages % groupSize != 0) + { ALICEVISION_LOG_ERROR("The input SfMData file is not compatible with this bracket size"); return EXIT_FAILURE; } size_t countGroups = countImages / groupSize; - /*Make sure there is only one kind of image in dataset*/ - if (sfmData.getIntrinsics().size() > 2) { + // Make sure there is only one kind of image in dataset + if (sfmData.getIntrinsics().size() > 2) + { ALICEVISION_LOG_ERROR("Multiple intrinsics : Different kind of images in dataset"); return EXIT_FAILURE; } - /*If two intrinsics, may be some images are simply rotated*/ - if (sfmData.getIntrinsics().size() == 2) { + // If two intrinsics, may be some images are simply rotated + if (sfmData.getIntrinsics().size() == 2) + { const sfmData::Intrinsics & intrinsics = sfmData.getIntrinsics(); unsigned int w = intrinsics.begin()->second->w(); @@ -235,7 +226,8 @@ int main(int argc, char * argv[]) { unsigned int rw = intrinsics.rbegin()->second->w(); unsigned int rh = intrinsics.rbegin()->second->h(); - if (w != rh || h != rw) { + if (w != rh || h != rw) + { ALICEVISION_LOG_ERROR("Multiple intrinsics : Different kind of images in dataset"); return EXIT_FAILURE; } @@ -246,25 +238,32 @@ int main(int argc, char * argv[]) { size_t first = 0; size_t second = 0; sfmData::Views & views = sfmData.getViews(); - for (const auto & v : views) { - if (v.second->getIntrinsicId() == firstId) { + for (const auto & v : views) + { + if (v.second->getIntrinsicId() == firstId) + { first++; } - else { + else + { second++; } } - /* Set all view with the majority intrinsics */ - if (first > second) { - for (const auto & v : views) { + // Set all view with the majority intrinsics + if (first > second) + { + for (const auto & v : views) + { v.second->setIntrinsicId(firstId); } sfmData.getIntrinsics().erase(secondId); } - else { - for (const auto & v : views) { + else + { + for (const auto & v : views) + { v.second->setIntrinsicId(secondId); } @@ -272,49 +271,53 @@ int main(int argc, char * argv[]) { } } - /* Rotate needed images */ + // Rotate needed images { const sfmData::Intrinsics & intrinsics = sfmData.getIntrinsics(); unsigned int w = intrinsics.begin()->second->w(); unsigned int h = intrinsics.begin()->second->h(); sfmData::Views & views = sfmData.getViews(); - for (auto & v : views) { - if (v.second->getWidth() == h && v.second->getHeight() == w) { + for (auto & v : views) + { + if (v.second->getWidth() == h && v.second->getHeight() == w) + { ALICEVISION_LOG_INFO("Create intermediate rotated image !"); - /*Read original image*/ + // Read original image image::Image originalImage; image::readImage(v.second->getImagePath(), originalImage, image::EImageColorSpace::LINEAR); - /*Create a rotated image*/ + // Create a rotated image image::Image rotated(w, h); - for (int k = 0; k < h; k++) { - for (int l = 0; l < w; l++) { - rotated(k, l) = originalImage(l, rotated.Height() - 1 - k); + for (int y = 0; y < h; ++y) + { + for (int x = 0; x < w; x++) + { + rotated(y, x) = originalImage(x, rotated.Height() - 1 - y); } } - boost::filesystem::path old_path(v.second->getImagePath()); - std::string old_filename = old_path.stem().string(); + boost::filesystem::path origImgPath(v.second->getImagePath()); + std::string origFilename = origImgPath.stem().string(); - /*Save this image*/ - std::stringstream sstream; - sstream << output_path << "/" << old_filename << ".exr"; + // Save this image + std::string rotatedImagePath = (fs::path(outputPath) / (origFilename + ".exr")).string(); oiio::ParamValueList metadata = image::readImageMetadata(v.second->getImagePath()); - image::writeImage(sstream.str(), rotated, image::EImageColorSpace::AUTO, metadata); + image::writeImage(rotatedImagePath, rotated, image::EImageColorSpace::AUTO, metadata); - /*Update view for this modification*/ + // Update view for this modification v.second->setWidth(w); v.second->setHeight(h); - v.second->setImagePath(sstream.str()); + v.second->setImagePath(rotatedImagePath); } } } - /*Order views by their image names (without path and extension to make sure we handle rotated images) */ + // Order views by their image names (without path and extension to make sure we handle rotated images) std::vector> viewsOrderedByName; - for (auto & viewIt: sfmData.getViews()) { + for (auto & viewIt: sfmData.getViews()) + { viewsOrderedByName.push_back(viewIt.second); } std::sort(viewsOrderedByName.begin(), viewsOrderedByName.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { @@ -359,16 +362,16 @@ int main(int argc, char * argv[]) { std::vector> targetViews; for (auto & group : groupedViews) { - /*Sort all images by exposure time*/ + // Sort all images by exposure time std::sort(group.begin(), group.end(), [](const std::shared_ptr & a, const std::shared_ptr & b) -> bool { if (a == nullptr || b == nullptr) return true; return (a->getCameraExposureSetting() < b->getCameraExposureSetting()); }); - /*Target views are the middle exposed views*/ + // Target views are the middle exposed views int middleIndex = group.size() / 2; - /*If odd size, choose the more exposed view*/ + // If odd size, choose the more exposed view if (group.size() % 2 && group.size() > 1) { middleIndex++; } @@ -376,7 +379,7 @@ int main(int argc, char * argv[]) { targetViews.push_back(group[middleIndex]); } - /*Build exposure times table*/ + // Build exposure times table std::vector> groupedExposures; for (int i = 0; i < groupedViews.size(); i++) { @@ -391,7 +394,7 @@ int main(int argc, char * argv[]) { groupedExposures.push_back(exposures); } - /*Build table of file names*/ + // Build table of file names std::vector> groupedFilenames; for (int i = 0; i < groupedViews.size(); i++) { @@ -444,16 +447,14 @@ int main(int argc, char * argv[]) { { hdr::rgbCurve r = response; r.exponential(); // TODO - std::string outputFolder = fs::path(sfmOutputDataFilename).parent_path().string(); std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); - std::string outputResponsePath = (fs::path(outputFolder) / (std::string("response_log_") + methodName + std::string(".csv"))).string(); - std::string outputResponsePathHtml = (fs::path(outputFolder) / (std::string("response_log_") + methodName + std::string(".html"))).string(); + std::string outputResponsePath = (fs::path(outputPath) / (std::string("response_log_") + methodName + std::string(".csv"))).string(); + std::string outputResponsePathHtml = (fs::path(outputPath) / (std::string("response_log_") + methodName + std::string(".html"))).string(); r.write(outputResponsePath); r.writeHtml(outputResponsePathHtml, "Camera Response Curve " + methodName); ALICEVISION_LOG_INFO("Camera response function written as " << outputResponsePath); } - } break; case ECalibrationMethod::DEBEVEC: @@ -466,10 +467,9 @@ int main(int argc, char * argv[]) { calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, calibrationWeight, lambda, response); { - std::string outputFolder = fs::path(sfmOutputDataFilename).parent_path().string(); std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); - std::string outputResponsePath = (fs::path(outputFolder) / (std::string("response_log_") + methodName + std::string(".csv"))).string(); - std::string outputResponsePathHtml = (fs::path(outputFolder) / (std::string("response_log_") + methodName + std::string(".html"))).string(); + std::string outputResponsePath = (fs::path(outputPath) / (std::string("response_log_") + methodName + std::string(".csv"))).string(); + std::string outputResponsePathHtml = (fs::path(outputPath) / (std::string("response_log_") + methodName + std::string(".html"))).string(); response.write(outputResponsePath); response.writeHtml(outputResponsePathHtml, "Camera Response Curve " + methodName); @@ -507,10 +507,9 @@ int main(int argc, char * argv[]) { ALICEVISION_LOG_INFO("Calibration done."); { - std::string outputFolder = fs::path(sfmOutputDataFilename).parent_path().string(); std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); - std::string outputResponsePath = (fs::path(outputFolder) / (std::string("response_") + methodName + std::string(".csv"))).string(); - std::string outputResponsePathHtml = (fs::path(outputFolder) / (std::string("response_") + methodName + std::string(".html"))).string(); + std::string outputResponsePath = (fs::path(outputPath) / (std::string("response_") + methodName + std::string(".csv"))).string(); + std::string outputResponsePathHtml = (fs::path(outputPath) / (std::string("response_") + methodName + std::string(".html"))).string(); response.write(outputResponsePath); response.writeHtml(outputResponsePathHtml, "Camera Response Curve " + methodName); @@ -530,42 +529,36 @@ int main(int argc, char * argv[]) { { std::vector> images(groupSize); std::shared_ptr targetView = targetViews[g]; - if (targetView == nullptr) - { - ALICEVISION_LOG_ERROR("Null view"); - return EXIT_FAILURE; - } - /* Load all images of the group */ + // Load all images of the group for (int i = 0; i < groupSize; i++) { ALICEVISION_LOG_INFO("Load " << groupedFilenames[g][i]); image::readImage(groupedFilenames[g][i], images[i], (calibrationMethod == ECalibrationMethod::LINEAR) ? image::EImageColorSpace::LINEAR : image::EImageColorSpace::SRGB); } - /* Merge HDR images */ + // Merge HDR images hdr::hdrMerge merge; float targetCameraExposure = targetView->getCameraExposureSetting(); image::Image HDRimage; merge.process(images, groupedExposures[g], fusionWeight, response, HDRimage, targetCameraExposure, false, clampedValueCorrection); - /* Output image file path */ - std::string hdr_output_path; + // Output image file path std::stringstream sstream; - sstream << output_path << "/" << "hdr_" << std::setfill('0') << std::setw(4) << g << ".exr"; + sstream << "hdr_" << std::setfill('0') << std::setw(4) << g << ".exr"; + + std::string hdrImagePath = (fs::path(outputPath) / sstream.str()).string(); - /* Write an image with parameters from the target view */ + // Write an image with parameters from the target view oiio::ParamValueList targetMetadata = image::readImageMetadata(targetView->getImagePath()); - image::writeImage(sstream.str(), HDRimage, image::EImageColorSpace::AUTO, targetMetadata); + image::writeImage(hdrImagePath, HDRimage, image::EImageColorSpace::AUTO, targetMetadata); targetViews[g]->setImagePath(sstream.str()); vs[targetViews[g]->getViewId()] = targetViews[g]; } - /* - Save output sfmdata - */ - if (!sfmDataIO::Save(outputSfm, sfmOutputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS|sfmDataIO::INTRINSICS))) + // Export output sfmData + if (!sfmDataIO::Save(outputSfm, sfmOutputDataFilename, sfmDataIO::ESfMData::ALL)) { ALICEVISION_LOG_ERROR("Can not save output sfm file at " << sfmOutputDataFilename); return EXIT_FAILURE; From 9ee8b301162f3a90bbf5085f471d6ebedf0af1a2 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 29 Nov 2019 19:27:41 +0100 Subject: [PATCH 128/174] [hdr] highlight correction is now a post-processing step * keep a standard merge fusion and use another function for highlight post-processing * change default plateau weight from 12 to 8 * change camera exposure compensation * fnumber metadata: fallback (based on aperture) if fnumber is missing * highlight target value for clamped pixels is now in LUX --- src/aliceVision/hdr/RobertsonCalibrate.cpp | 2 +- src/aliceVision/hdr/hdrMerge.cpp | 120 ++++++++++++------ src/aliceVision/hdr/hdrMerge.hpp | 56 ++------ src/aliceVision/hdr/rgbCurve.cpp | 25 ++-- src/aliceVision/hdr/rgbCurve.hpp | 5 +- src/aliceVision/image/Image.hpp | 4 + src/aliceVision/sfmData/View.hpp | 24 ++-- src/software/convert/main_convertLDRToHDR.cpp | 67 ++++++---- 8 files changed, 167 insertions(+), 136 deletions(-) diff --git a/src/aliceVision/hdr/RobertsonCalibrate.cpp b/src/aliceVision/hdr/RobertsonCalibrate.cpp index f582c76848..a7cc36127a 100644 --- a/src/aliceVision/hdr/RobertsonCalibrate.cpp +++ b/src/aliceVision/hdr/RobertsonCalibrate.cpp @@ -124,7 +124,7 @@ void RobertsonCalibrate::process(const std::vector< std::vector< image::Image > &images, - const std::vector ×, - const rgbCurve &weight, - const rgbCurve &response, - image::Image &radiance, - float targetCameraExposure, - bool robCalibrate, - float clampedValueCorrection) + const std::vector ×, + const rgbCurve &weight, + const rgbCurve &response, + image::Image &radiance, + float targetCameraExposure) { //checks assert(!response.isEmpty()); @@ -69,10 +67,8 @@ void hdrMerge::process(const std::vector< image::Image > &imag ALICEVISION_LOG_TRACE(images[i].Width() << "x" << images[i].Height() << ", time: " << times[i]); } - const float maxLum = 1000.0f; - rgbCurve weightShortestExposure = weight; - weightShortestExposure.freezeSecondPartValues(); // invertAndScaleSecondPart(clampedValueCorrection * maxLum); + weightShortestExposure.freezeSecondPartValues(); #pragma omp parallel for for(int y = 0; y < height; ++y) @@ -82,8 +78,6 @@ void hdrMerge::process(const std::vector< image::Image > &imag //for each pixels image::RGBfColor &radianceColor = radiance(y, x); - double isPixelClamped = 0.0; - for(std::size_t channel = 0; channel < 3; ++channel) { double wsum = 0.0; @@ -96,13 +90,6 @@ void hdrMerge::process(const std::vector< image::Image > &imag const double value = images[exposureIndex](y, x)(channel); const double time = times[exposureIndex]; - // https://www.desmos.com/calculator/xamvguu8zw - // ____ - // sigmoid inv: _______/ - // 0 1 - const float isChannelClamped = sigmoidInv(0.0f, 1.0f, /*sigWidth=*/0.2f, /*sigMid=*/0.9f, value); - isPixelClamped += isChannelClamped; - // // weightShortestExposure: _______ // _______/ @@ -129,31 +116,90 @@ void hdrMerge::process(const std::vector< image::Image > &imag wsum += w * r / time; wdiv += w; } - //{ - // int exposureIndex = images.size() - 1; - // double lowValue = images[exposureIndex](y, x)(channel); - // // https://www.desmos.com/calculator/cvu8s3rlvy - // // ____ - // // sigmoid: \________ - // // 0 1 - // double clampedLowValue = sigmoid(0.0f, 1.0f, /*sigWidth=*/0.01f, /*sigMid=*/0.005, lowValue); - //} radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetCameraExposure; } - double clampedValueLuminanceCompensation = isPixelClamped / 3.0; - double clampedValueLuminanceCompensationInv = (1.0 - clampedValueLuminanceCompensation); - assert(clampedValueLuminanceCompensation <= 1.0); - - for(std::size_t channel = 0; channel < 3; ++channel) - { - radianceColor(channel) = clampedValueLuminanceCompensationInv * radianceColor(channel) + - clampedValueLuminanceCompensation * clampedValueCorrection * maxLum; - } } } } +void hdrMerge::postProcessHighlight(const std::vector< image::Image > &images, + const std::vector ×, + const rgbCurve &weight, + const rgbCurve &response, + image::Image &radiance, + float targetCameraExposure, + float highlightCorrectionFactor, + float highlightTargetLux) +{ + //checks + assert(!response.isEmpty()); + assert(!images.empty()); + assert(images.size() == times.size()); + + if (highlightCorrectionFactor == 0.0f) + return; + + const image::Image& inputImage = images.front(); + // Target Camera Exposure = 1 for EV-0 (iso=100, shutter=1, fnumber=1) => 2.5 lux + float highlightTarget = highlightTargetLux * targetCameraExposure * 2.5; + + // get images width, height + const std::size_t width = inputImage.Width(); + const std::size_t height = inputImage.Height(); + + image::Image isPixelClamped(width, height); + +#pragma omp parallel for + for (int y = 0; y < height; ++y) + { + for (int x = 0; x < width; ++x) + { + //for each pixels + image::RGBfColor &radianceColor = radiance(y, x); + + float& isClamped = isPixelClamped(y, x); + isClamped = 0.0f; + + for (std::size_t channel = 0; channel < 3; ++channel) + { + const float value = inputImage(y, x)(channel); + + // https://www.desmos.com/calculator/vpvzmidy1a + // ____ + // sigmoid inv: _______/ + // 0 1 + const float isChannelClamped = sigmoidInv(0.0f, 1.0f, /*sigWidth=*/0.08f, /*sigMid=*/0.95f, value); + isClamped += isChannelClamped; + } + isPixelClamped(y, x) /= 3.0; + } + } + + image::Image isPixelClamped_g(width, height); + image::ImageGaussianFilter(isPixelClamped, 1.0f, isPixelClamped_g, 3, 3); + +#pragma omp parallel for + for (int y = 0; y < height; ++y) + { + for (int x = 0; x < width; ++x) + { + image::RGBfColor& radianceColor = radiance(y, x); + + double clampingCompensation = highlightCorrectionFactor * (isPixelClamped_g(y, x) / 3.0); + double clampingCompensationInv = (1.0 - clampingCompensation); + assert(clampingCompensation <= 1.0); + + for (std::size_t channel = 0; channel < 3; ++channel) + { + if(highlightTarget > radianceColor(channel)) + { + radianceColor(channel) = clampingCompensation * highlightTarget + clampingCompensationInv * radianceColor(channel); + } + } + } + } +} } // namespace hdr } // namespace aliceVision diff --git a/src/aliceVision/hdr/hdrMerge.hpp b/src/aliceVision/hdr/hdrMerge.hpp index 5ef4ee4ec6..e30be56c2a 100644 --- a/src/aliceVision/hdr/hdrMerge.hpp +++ b/src/aliceVision/hdr/hdrMerge.hpp @@ -29,53 +29,17 @@ class hdrMerge { const rgbCurve &weight, const rgbCurve &response, image::Image &radiance, - float targetCameraExposure, - bool robCalibrate = false, - float clampedValueCorrection = 1.f); - - /** - * @brief This function obtains the "average scene luminance" EV value - * from an image file. - * - * "average scene luminance" is the L (aka B) value mentioned in [1] - * We return the log2f value to get an EV value. - * We are using K=12.07488f and the exif-implied value of N=1/3.125 (see [1]). - * K=12.07488f is the 1.0592f * 11.4f value in pfscalibration's - * pfshdrcalibrate.cpp file. - * Based on [3] we can say that the value can also be 12.5 or even 14. - * Another reference for APEX is [4] where N is 0.3, closer to the APEX - * specification of 2^(-7/4)=0.2973. - * - * [1] http://en.wikipedia.org/wiki/APEX_system - * [2] http://en.wikipedia.org/wiki/Exposure_value - * [3] http://en.wikipedia.org/wiki/Light_meter - * [4] http://doug.kerr.home.att.net/pumpkin/#APEX - * - * This function tries first to obtain the shutter speed from either of - * two exif tags (there is no standard between camera manifacturers): - * ExposureTime or ShutterSpeedValue. - * Same thing for f-number: it can be found in FNumber or in ApertureValue. - * - * F-number and shutter speed are mandatory in exif data for EV - * calculation, iso is not. - * - * Function from Luminance HDR - * - * @param shutter - * @param iso - * @param aperture - * @return "average scene luminance" - */ - static float getExposure(float shutter, float iso, float aperture) - { - //reflected-light meter calibration constant - const float K = 12.07488f; - return std::log2((shutter * iso) / (aperture * aperture * K)); + float targetCameraExposure); + + void postProcessHighlight(const std::vector< image::Image > &images, + const std::vector ×, + const rgbCurve &weight, + const rgbCurve &response, + image::Image &radiance, + float clampedValueCorrection, + float targetCameraExposure, + float highlightMaxLumimance); - //EV = log2 (pow2(fstop) / shutter time) - //LV = LV = EV + log2 (ISO / 100) (LV light Value as exposure) - //return std::log2( ((aperture * aperture)/shutter) * (iso / 100) ); - } }; } // namespace hdr diff --git a/src/aliceVision/hdr/rgbCurve.cpp b/src/aliceVision/hdr/rgbCurve.cpp index 55b89411a7..dda69b1730 100644 --- a/src/aliceVision/hdr/rgbCurve.cpp +++ b/src/aliceVision/hdr/rgbCurve.cpp @@ -104,23 +104,13 @@ void rgbCurve::setEmor() } } -//void rgbCurve::setGaussian(double size) -//{ -// const float coefficient = 1.f / (static_cast(getSize() - 1) / 4.0f); -// for(std::size_t i = 0; i < getSize(); ++i) -// { -// float factor = i / size * coefficient - 2.0f / size; -// setAllChannels(i, std::exp( -factor * factor )); -// } -//} - void rgbCurve::setGaussian(double mu, double sigma) { + // https://www.desmos.com/calculator/s3q3ow1mpy for(std::size_t i = 0; i < getSize(); ++i) { float factor = i / (static_cast(getSize() - 1)) - mu; setAllChannels(i, std::exp( -factor * factor / (2.0 * sigma * sigma))); - // setAllChannels(i, std::max(0.0, std::exp( -factor * factor / (2.0 * sigma * sigma)) - 0.005)); } } @@ -135,26 +125,27 @@ void rgbCurve::setRobertsonWeight() void rgbCurve::setTriangular() { - const float coefficient = 1.f / static_cast(getSize() - 1); + const float coefficient = 2.f / static_cast(getSize() - 1); for(std::size_t i = 0; i < getSize(); ++i) { float value = i * coefficient; - if (value > 0.5f) + if (value > 1.0f) { value = 1.0f - value; } - setAllChannels(i, 2.f * value); + setAllChannels(i, value); } } -void rgbCurve::setPlateau() +void rgbCurve::setPlateau(float weight) { + // https://www.desmos.com/calculator/mouwyuvjvw + const float coefficient = 1.f / static_cast(getSize() - 1); for(std::size_t i = 0; i < getSize(); ++i) { - setAllChannels(i, 1.0f - std::pow((2.0f * i * coefficient - 1.0f), 12.0f)); - // setAllChannels(i, 1.0f - std::pow((2.0f * i * coefficient - 1.0f), 4.0f)); + setAllChannels(i, 1.0f - std::pow((2.0f * i * coefficient - 1.0f), weight)); } } diff --git a/src/aliceVision/hdr/rgbCurve.hpp b/src/aliceVision/hdr/rgbCurve.hpp index 0db12e7f77..4d40f6a02c 100644 --- a/src/aliceVision/hdr/rgbCurve.hpp +++ b/src/aliceVision/hdr/rgbCurve.hpp @@ -164,7 +164,6 @@ class rgbCurve /** *@brief Set curves to gaussian */ -// void setGaussian(double mu = 0.5, double sigma = 1.0 / (4.0 * sqrt(2.0))); void setGaussian(double mu = 0.5, double sigma = 1.0 / (5.0 * sqrt(2.0))); /** @@ -178,9 +177,9 @@ class rgbCurve void setTriangular(); /** - *@brief Set curves to plateau + * @brief Set curves to plateau */ - void setPlateau(); + void setPlateau(float weight = 8.0f); /** *@brief Set curves to log10 diff --git a/src/aliceVision/image/Image.hpp b/src/aliceVision/image/Image.hpp index fafb7ce2b8..7d69d0db63 100644 --- a/src/aliceVision/image/Image.hpp +++ b/src/aliceVision/image/Image.hpp @@ -182,6 +182,10 @@ namespace aliceVision { return ( *this ); } + inline Base& GetMat() + { + return (*this); + } //-- accessors/getters methods //------------------------------ diff --git a/src/aliceVision/sfmData/View.hpp b/src/aliceVision/sfmData/View.hpp index 86c5595e86..716e554167 100644 --- a/src/aliceVision/sfmData/View.hpp +++ b/src/aliceVision/sfmData/View.hpp @@ -197,16 +197,14 @@ class View float getCameraExposureSetting() const { const float shutter = getMetadataShutter(); - const float aperture = getMetadataAperture(); - if (shutter < 0 || aperture < 0) + const float fnumber = getMetadataFNumber(); + if (shutter < 0 || fnumber < 0) return -1.f; const float iso = getMetadataISO(); const float isoRatio = (iso < 0.f) ? 1.0 : (iso / 100.f); - // The value of the constant must be within the range 10.6 to 13.4, according to the standard. - const float K = 12.07488f; - float cameraExposure = (shutter * isoRatio) / (aperture * aperture * K); + float cameraExposure = (shutter * isoRatio) / (fnumber * fnumber); return cameraExposure; } @@ -355,13 +353,21 @@ class View } /** - * @brief Get the corresponding "FNumber" (aperture) metadata value - * @return the metadata value float or -1 if no corresponding value - */ - float getMetadataAperture() const + * @brief Get the corresponding "FNumber" (relative aperture) metadata value + * @return the metadata value float or -1 if no corresponding value + */ + float getMetadataFNumber() const { if(hasDigitMetadata("FNumber")) + { return std::stof(getMetadata("FNumber")); + } + if (hasDigitMetadata("ApertureValue")) + { + const float aperture = std::stof(getMetadata("ApertureValue")); + // fnumber = 2^(aperture/2) + return std::pow(2.0f, aperture / 2.0f); + } return -1; } diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 2ce2585f52..dac09bbddc 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -103,7 +103,8 @@ int main(int argc, char * argv[]) std::string sfmOutputDataFilename = ""; int groupSize = 3; ECalibrationMethod calibrationMethod = ECalibrationMethod::LINEAR; - float clampedValueCorrection = 1.0f; + float highlightCorrectionFactor = 1.0f; + float highlightTargetLux = 100000.0f; bool fisheye = false; int channelQuantizationPower = 10; int calibrationNbPoints = 0; @@ -127,8 +128,10 @@ int main(int argc, char * argv[]) optionalParams.add_options() ("calibrationMethod,m", po::value(&calibrationMethod)->default_value(calibrationMethod), "Name of method used for camera calibration: linear, robertson (slow), debevec, grossberg.") - ("expandDynamicRange,e", po::value(&clampedValueCorrection)->default_value(clampedValueCorrection), - "float value between 0 and 1 to correct clamped high values in dynamic range: use 0 for no correction, 0.5 for interior lighting and 1 for outdoor lighting.") + ("highlightsMaxLuminance", po::value(&highlightTargetLux)->default_value(highlightTargetLux), + "Highlights maximum luminance.") + ("expandDynamicRange"/*"highlightCorrectionFactor"*/, po::value(&highlightCorrectionFactor)->default_value(highlightCorrectionFactor), + "float value between 0 and 1 to correct clamped highlights in dynamic range: use 0 for no correction, 1 for full correction to maxLuminance.") ("fisheyeLens,f", po::value(&fisheye)->default_value(fisheye), "Set to 1 if images are taken with a fisheye lens and to 0 if not. Default value is set to 1.") ("channelQuantizationPower", po::value(&channelQuantizationPower)->default_value(channelQuantizationPower), @@ -190,7 +193,8 @@ int main(int argc, char * argv[]) // Read sfm data sfmData::SfMData sfmData; - if(!sfmDataIO::Load(sfmData, sfmInputDataFilename, sfmDataIO::ESfMData(sfmDataIO::VIEWS | sfmDataIO::INTRINSICS))) { + if(!sfmDataIO::Load(sfmData, sfmInputDataFilename, sfmDataIO::ESfMData::ALL)) + { ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmInputDataFilename << "' cannot be read."); return EXIT_FAILURE; } @@ -330,19 +334,19 @@ int main(int argc, char * argv[]) }); { - // Put a warning, if the aperture changes. - std::set apertures; + // Print a warning if the aperture changes. + std::set fnumbers; for (auto & view : viewsOrderedByName) { - apertures.insert(view->getMetadataAperture()); + fnumbers.insert(view->getMetadataFNumber()); } - if(apertures.size() != 1) + if(fnumbers.size() != 1) { ALICEVISION_LOG_WARNING("Different apertures amongst the dataset. For correct HDR, you should only change the shutter speed (and eventually the ISO)."); - ALICEVISION_LOG_WARNING("Used apertures:"); - for (auto a : apertures) + ALICEVISION_LOG_WARNING("Used f-numbers:"); + for (auto f : fnumbers) { - ALICEVISION_LOG_WARNING(" * " << a); + ALICEVISION_LOG_WARNING(" * " << f); } } } @@ -350,10 +354,11 @@ int main(int argc, char * argv[]) // Make groups std::vector>> groupedViews; std::vector> group; - for (auto & view : viewsOrderedByName) { - + for (auto & view : viewsOrderedByName) + { group.push_back(view); - if (group.size() == groupSize) { + if (group.size() == groupSize) + { groupedViews.push_back(group); group.clear(); } @@ -372,14 +377,15 @@ int main(int argc, char * argv[]) int middleIndex = group.size() / 2; // If odd size, choose the more exposed view - if (group.size() % 2 && group.size() > 1) { + if (group.size() % 2 && group.size() > 1) + { middleIndex++; } targetViews.push_back(group[middleIndex]); } - // Build exposure times table + // Build camera exposure table std::vector> groupedExposures; for (int i = 0; i < groupedViews.size(); i++) { @@ -433,7 +439,6 @@ int main(int argc, char * argv[]) hdr::rgbCurve response(channelQuantization); const float lambda = channelQuantization * 1.f; - calibrationWeight.setTriangular(); // calculate the response function according to the method given in argument or take the response provided by the user { @@ -461,7 +466,7 @@ int main(int argc, char * argv[]) { ALICEVISION_LOG_INFO("Debevec calibration"); const float lambda = channelQuantization * 1.f; - if(calibrationNbPoints == 0) + if(calibrationNbPoints < 0) calibrationNbPoints = 10000; hdr::DebevecCalibrate calibration; calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, calibrationWeight, lambda, response); @@ -485,7 +490,7 @@ int main(int argc, char * argv[]) /* ALICEVISION_LOG_INFO("Robertson calibration"); hdr::RobertsonCalibrate calibration(10); - if(calibrationNbPoints == 0) + if(calibrationNbPoints < 0) calibrationNbPoints = 1000000; calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, calibrationWeight, response); response.scale(); @@ -495,7 +500,7 @@ int main(int argc, char * argv[]) case ECalibrationMethod::GROSSBERG: { ALICEVISION_LOG_INFO("Grossberg calibration"); - if (calibrationNbPoints == 0) + if (calibrationNbPoints < 0) calibrationNbPoints = 1000000; hdr::GrossbergCalibrate calibration(3); calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, response); @@ -541,19 +546,35 @@ int main(int argc, char * argv[]) hdr::hdrMerge merge; float targetCameraExposure = targetView->getCameraExposureSetting(); image::Image HDRimage; - merge.process(images, groupedExposures[g], fusionWeight, response, HDRimage, targetCameraExposure, false, clampedValueCorrection); + merge.process(images, groupedExposures[g], fusionWeight, response, HDRimage, targetCameraExposure); + + // TO REMOVE: Temporary export for debug + { + // Output image file path + std::stringstream sstream; + sstream << "hdr_" << std::setfill('0') << std::setw(4) << g << "_beforeCorrection.exr"; + std::string hdrImagePath = (fs::path(outputPath) / sstream.str()).string(); + + // Write an image with parameters from the target view + oiio::ParamValueList targetMetadata = image::readImageMetadata(targetView->getImagePath()); + image::writeImage(hdrImagePath, HDRimage, image::EImageColorSpace::AUTO, targetMetadata); + } + + if(highlightCorrectionFactor > 0.0) + { + merge.postProcessHighlight(images, groupedExposures[g], fusionWeight, response, HDRimage, targetCameraExposure, highlightCorrectionFactor, highlightTargetLux); + } // Output image file path std::stringstream sstream; sstream << "hdr_" << std::setfill('0') << std::setw(4) << g << ".exr"; - std::string hdrImagePath = (fs::path(outputPath) / sstream.str()).string(); // Write an image with parameters from the target view oiio::ParamValueList targetMetadata = image::readImageMetadata(targetView->getImagePath()); image::writeImage(hdrImagePath, HDRimage, image::EImageColorSpace::AUTO, targetMetadata); - targetViews[g]->setImagePath(sstream.str()); + targetViews[g]->setImagePath(hdrImagePath); vs[targetViews[g]->getViewId()] = targetViews[g]; } From 368ab284f69c68bfeab90702a56e989c2f93bdac Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 29 Nov 2019 20:24:50 +0100 Subject: [PATCH 129/174] [hdr] formatting --- src/aliceVision/hdr/DebevecCalibrate.cpp | 30 ++++++++---------------- 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/src/aliceVision/hdr/DebevecCalibrate.cpp b/src/aliceVision/hdr/DebevecCalibrate.cpp index 7ce26e31d4..5371688a90 100644 --- a/src/aliceVision/hdr/DebevecCalibrate.cpp +++ b/src/aliceVision/hdr/DebevecCalibrate.cpp @@ -31,19 +31,17 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im const int nbImages = imagePathsGroups.front().size(); const int samplesPerImage = nbPoints / (nbGroups*nbImages); - /* - Always 3 channels for the input images ... - */ + // Always 3 channels for the input images static const std::size_t channelsCount = 3; - //initialize response + // Initialize response response = rgbCurve(channelQuantization); - /*Store intermediate data for all three channels*/ + // Store intermediate data for all three channels Vec b_array[channelsCount]; std::vector tripletList_array[channelsCount]; - /* Initialize intermediate buffers */ + // Initialize intermediate buffers for(unsigned int channel=0; channel < channelsCount; ++channel) { Vec & b = b_array[channel]; @@ -67,10 +65,7 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im const std::size_t width = ldrImagesGroup.front().Width(); const std::size_t height = ldrImagesGroup.front().Height(); - /* - Include the data-fitting equations. - If images are fisheye, we take only pixels inside a disk with a radius of image's minimum side - */ + // If images are fisheye, we take only pixels inside a disk with a radius of image's minimum side if(fisheye) { const std::size_t minSize = std::min(width, height) * 0.97; @@ -95,13 +90,11 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im { std::size_t dist2 = pow(center(0)-x, 2) + pow(center(1)-y, 2); - /*This looks stupid ...*/ if(dist2 > maxDist2) { continue; } - for (int channel = 0; channel < channelsCount; channel++) { float sample = clamp(image(y, x)(channel), 0.f, 1.f); @@ -155,8 +148,7 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im } count += 1; - /* include the smoothness equations */ - + // include the smoothness equations for(std::size_t k = 0; k> & im A.setFromTriplets(tripletList_array[channel].begin(), tripletList_array[channel].end()); b_array[channel].conservativeResize(count); - /* - solve the system using SVD decomposition - */ + // solve the system using SVD decomposition A.makeCompressed(); Eigen::SparseQR> solver; solver.compute(A); - /*Check solver failure*/ + // Check solver failure if (solver.info() != Eigen::Success) { return false; @@ -191,7 +181,7 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im Vec x = solver.solve(b_array[channel]); - /*Check solver failure*/ + // Check solver failure if(solver.info() != Eigen::Success) { return false; @@ -199,7 +189,7 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im double relative_error = (A*x - b_array[channel]).norm() / b_array[channel].norm(); - /* Save result to response curve*/ + // Copy the result to the response curve for(std::size_t k = 0; k < channelQuantization; ++k) { response.setValue(k, channel, x(k)); From 4e7ca4628f470eb9af30fc42899844a1de931509 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 2 Dec 2019 11:06:34 +0100 Subject: [PATCH 130/174] [software] ldr2hdr: minor fix on calibrationNbPoints==0 --- src/software/convert/main_convertLDRToHDR.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index dac09bbddc..713561fa2d 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -466,7 +466,7 @@ int main(int argc, char * argv[]) { ALICEVISION_LOG_INFO("Debevec calibration"); const float lambda = channelQuantization * 1.f; - if(calibrationNbPoints < 0) + if(calibrationNbPoints <= 0) calibrationNbPoints = 10000; hdr::DebevecCalibrate calibration; calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, calibrationWeight, lambda, response); @@ -490,7 +490,7 @@ int main(int argc, char * argv[]) /* ALICEVISION_LOG_INFO("Robertson calibration"); hdr::RobertsonCalibrate calibration(10); - if(calibrationNbPoints < 0) + if(calibrationNbPoints <= 0) calibrationNbPoints = 1000000; calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, calibrationWeight, response); response.scale(); @@ -500,7 +500,7 @@ int main(int argc, char * argv[]) case ECalibrationMethod::GROSSBERG: { ALICEVISION_LOG_INFO("Grossberg calibration"); - if (calibrationNbPoints < 0) + if (calibrationNbPoints <= 0) calibrationNbPoints = 1000000; hdr::GrossbergCalibrate calibration(3); calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, response); From d0f4d6558826408a24bb84b333324e5499281d23 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 2 Dec 2019 11:07:17 +0100 Subject: [PATCH 131/174] [hdr] fix identation --- src/aliceVision/hdr/GrossbergCalibrate.cpp | 64 +++++++++++----------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/src/aliceVision/hdr/GrossbergCalibrate.cpp b/src/aliceVision/hdr/GrossbergCalibrate.cpp index 2108a5fa11..59d0977332 100644 --- a/src/aliceVision/hdr/GrossbergCalibrate.cpp +++ b/src/aliceVision/hdr/GrossbergCalibrate.cpp @@ -49,46 +49,46 @@ void GrossbergCalibrate::process(const std::vector>& im const double *h = getEmorInvCurve(i+1); if(emorSize == channelQuantization) { - hCurves[i].assign(h, h + emorSize); + hCurves[i].assign(h, h + emorSize); } else if(emorSize > channelQuantization) { - std::vector emorH; - emorH.assign(h, h + emorSize); - std::vector h0 = std::vector(emorH.begin(), emorH.end()); + std::vector emorH; + emorH.assign(h, h + emorSize); + std::vector h0 = std::vector(emorH.begin(), emorH.end()); - std::size_t step = emorSize/channelQuantization; - for(std::size_t k = 0; k emorH; - emorH.assign(h, h + emorSize); - std::vector h0 = std::vector(emorH.begin(), emorH.end()); + std::vector emorH; + emorH.assign(h, h + emorSize); + std::vector h0 = std::vector(emorH.begin(), emorH.end()); - std::size_t step = channelQuantization/emorSize; - hCurves[i].resize(channelQuantization, 0.0); - for(std::size_t k = 0; k(hCurves[i].data(), channelQuantization); From 8303d14aae4cdda1274dcc8a4b87420ad081d0b9 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Tue, 3 Dec 2019 14:43:21 +0100 Subject: [PATCH 132/174] sync with paris --- .../pipeline/main_panoramaCompositing.cpp | 608 +++++++++--------- 1 file changed, 295 insertions(+), 313 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 3bd180225c..3fdf96eee6 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -42,451 +42,429 @@ typedef struct { } ConfigView; -bool difference(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & aColor, const aliceVision::image::Image & aMask, const aliceVision::image::Image & bColor, const aliceVision::image::Image & bMask) { - size_t width = outputColor.Width(); - size_t height = outputColor.Height(); - - if (outputMask.Width() != width || outputMask.Height() != height) { - return false; - } - - if (aColor.Width() != width || aColor.Height() != height) { - return false; +class Compositer { +public: + Compositer(size_t outputWidth, size_t outputHeight) : + _panorama(outputWidth, outputHeight, true, image::RGBfColor(1.0f, 0.0f, 0.0f)), + _mask(outputWidth, outputHeight, true, false) + { } - if (aMask.Width() != width || aMask.Height() != height) { - return false; - } + virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - if (bColor.Width() != width || bColor.Height() != height) { - return false; - } + for (size_t i = 0; i < color.Height(); i++) { - if (bMask.Width() != width || bMask.Height() != height) { - return false; - } + size_t pano_i = offset_y + i; + if (pano_i >= _panorama.Height()) { + continue; + } - for (int i = 0; i < height; i++) { - for (int j = 0; j < width; j++) { + for (size_t j = 0; j < color.Width(); j++) { - if (aMask(i, j)) { - if (bMask(i, j)) { - outputColor(i, j) = aColor(i, j) - bColor(i, j); - outputMask(i, j) = 1; - } - else { - outputColor(i, j) = aColor(i, j); - outputMask(i, j) = 1; - } - } - else { - if (bMask(i, j)) { - outputColor(i, j) = image::RGBfColor(0.0f); - outputMask(i, j) = 0; + if (!inputMask(i, j)) { + continue; } - else { - outputColor(i, j) = image::RGBfColor(0.0f); - outputMask(i, j) = 0; + + size_t pano_j = offset_x + j; + if (pano_j >= _panorama.Width()) { + pano_j = pano_j - _panorama.Width(); } + + _panorama(pano_i, pano_j) = color(i, j); + _mask(pano_i, pano_j) = 1; } } + + return true; } - return true; -} + const aliceVision::image::Image & getPanorama() const { + return _panorama; + } -bool addition(aliceVision::image::Image & outputColor, aliceVision::image::Image & outputMask, const aliceVision::image::Image & aColor, const aliceVision::image::Image & aMask, const aliceVision::image::Image & bColor, const aliceVision::image::Image & bMask) { + const aliceVision::image::Image & getPanoramaMask() const { + return _mask; + } - size_t width = outputColor.Width(); - size_t height = outputColor.Height(); +protected: + aliceVision::image::Image _panorama; + aliceVision::image::Image _mask; +}; +class AlphaCompositer : public Compositer { +public: - if (outputMask.Width() != width || outputMask.Height() != height) { - return false; - } + AlphaCompositer(size_t outputWidth, size_t outputHeight) : + Compositer(outputWidth, outputHeight), + _weightmap(outputWidth, outputHeight, true, 0.0f) { - if (aColor.Width() != width || aColor.Height() != height) { - return false; } - if (aMask.Width() != width || aMask.Height() != height) { - return false; - } + virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - if (bColor.Width() != width || bColor.Height() != height) { - return false; - } + for (size_t i = 0; i < color.Height(); i++) { - if (bMask.Width() != width || bMask.Height() != height) { - return false; - } + size_t pano_i = offset_y + i; + if (pano_i >= _panorama.Height()) { + continue; + } - for (int i = 0; i < height; i++) { - for (int j = 0; j < width; j++) { + for (size_t j = 0; j < color.Width(); j++) { - if (aMask(i, j)) { - if (bMask(i, j)) { - outputColor(i, j).r() = aColor(i, j).r() + bColor(i, j).r(); - outputColor(i, j).g() = aColor(i, j).g() + bColor(i, j).g(); - outputColor(i, j).b() = aColor(i, j).b() + bColor(i, j).b(); - outputMask(i, j) = 1; + if (!inputMask(i, j)) { + continue; } - else { - outputColor(i, j) = image::RGBfColor(0.0f); - outputMask(i, j) = 0; + + size_t pano_j = offset_x + j; + if (pano_j >= _panorama.Width()) { + pano_j = pano_j - _panorama.Width(); } - } - else { - if (bMask(i, j)) { - outputColor(i, j) = bColor(i, j); - outputMask(i, j) = 1; + + if (!_mask(pano_i, pano_j)) { + _panorama(pano_i, pano_j) = color(i, j); + _weightmap(pano_i, pano_j) = inputWeights(i, j); } else { - outputColor(i, j) = image::RGBfColor(0.0f); - outputMask(i, j) = 0; + float wp = _weightmap(pano_i, pano_j); + float wc = inputWeights(i, j); + float wtotal = wp + wc; + wp /= wtotal; + wc /= wtotal; + + _panorama(pano_i, pano_j).r() = wp * _panorama(pano_i, pano_j).r() + wc * color(i, j).r(); + _panorama(pano_i, pano_j).g() = wp * _panorama(pano_i, pano_j).g() + wc * color(i, j).g(); + _panorama(pano_i, pano_j).b() = wp * _panorama(pano_i, pano_j).b() + wc * color(i, j).b(); + + _weightmap(pano_i, pano_j) = wtotal; } + + _mask(pano_i, pano_j) = true; } } - } - return true; -} + return true; + } -size_t countValid(const aliceVision::image::Image & inputMask) { +protected: + aliceVision::image::Image _weightmap; +}; - size_t count = 0; +bool convolveHorizontal(image::Image & output, const image::Image & input, const Eigen::VectorXf & kernel) { - for (int i = 0; i < inputMask.Height(); i++) { - for (int j = 0; j < inputMask.Width(); j++) { - if (inputMask(i, j)) { - count++; - } - } + if (output.size() != input.size()) { + return false; } - return count; -} + if (kernel.size() % 2 == 0) { + return false; + } + int radius = kernel.size() / 2; -class LaplacianPyramid { -public: - bool process(const aliceVision::image::Image & inputColor, const image::Image & weights, size_t max_levels) { + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { - _original_width = inputColor.Width(); - _original_height = inputColor.Height(); + image::RGBAfColor sum = image::RGBAfColor(0.0); - std::vector> pyramid_gaussian; + if (input(i, j).a() < 1e-5) { + output(i,j) = sum; + continue; + } - pyramid_gaussian.push_back(inputColor); - _pyramid_weights.push_back(weights); + for (int k = 0; k < kernel.size(); k++) { - /*Build pyramid using 2x2 kernel **/ - for (int level = 0; level < max_levels; level++) { + double w = kernel(k); + int col = j + k - radius; - const image::Image & color = pyramid_gaussian[level]; - const image::Image & weight = _pyramid_weights[level]; + if (col < 0 || col >= input.Width()) { + continue; + } - image::Image nextColor(color.Width() / 2, color.Height() / 2, true, image::RGBfColor(0.0f)); - image::Image nextWeight(weight.Width() / 2, weight.Height() / 2, true, 0.0f); + sum += w * input(i, col); + } - for (int i = 0; i < nextColor.Height(); i++) { + output(i, j) = sum; + } + } - int di = i * 2; + return true; +} - for (int j = 0; j < nextColor.Width(); j++) { - int dj = j * 2; +bool convolveVertical(image::Image & output, const image::Image & input, const Eigen::VectorXf & kernel) { - nextColor(i, j).r() = 0.25f * (weight(di, dj) * color(di, dj).r() + weight(di, dj + 1) * color(di, dj + 1).r() + weight(di + 1, dj) * color(di + 1, dj).r() + weight(di + 1, dj + 1) * color(di + 1, dj + 1).r()); - nextColor(i, j).g() = 0.25f * (weight(di, dj) * color(di, dj).g() + weight(di, dj + 1) * color(di, dj + 1).g() + weight(di + 1, dj) * color(di + 1, dj).g() + weight(di + 1, dj + 1) * color(di + 1, dj + 1).g()); - nextColor(i, j).b() = 0.25f * (weight(di, dj) * color(di, dj).b() + weight(di, dj + 1) * color(di, dj + 1).b() + weight(di + 1, dj) * color(di + 1, dj).b() + weight(di + 1, dj + 1) * color(di + 1, dj + 1).b()); - nextWeight(i, j) = 0.25f * (weight(di, dj) + weight(di, dj + 1) + weight(di + 1, dj) + weight(di + 1, dj + 1)); - } - } + if (output.size() != input.size()) { + return false; + } - pyramid_gaussian.push_back(nextColor); - _pyramid_weights.push_back(nextWeight); - } + if (kernel.size() % 2 == 0) { + return false; + } - /*Compute laplacian*/ - for (int level = 0; level < pyramid_gaussian.size() - 1; level++) { + int radius = kernel.size() / 2; - const image::Image & color = pyramid_gaussian[level]; - const image::Image & next_color = pyramid_gaussian[level + 1]; + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { - image::Image upscaled(color.Width(), color.Height(), true, image::RGBfColor(0.0f)); - image::Image difference(color.Width(), color.Height(), true, image::RGBfColor(0.0f)); + image::RGBAfColor sum = image::RGBAfColor(0.0); - for (int i = 0; i < next_color.Height(); i++) { - int di = i * 2; - for (int j = 0; j < next_color.Width(); j++) { - int dj = j * 2; - upscaled(di, dj) = next_color(i, j); - upscaled(di, dj + 1) = next_color(i, j); - upscaled(di + 1, dj) = next_color(i, j); - upscaled(di + 1, dj + 1) = next_color(i, j); - } + if (input(i, j).a() < 1e-5) { + output(i,j) = sum; + continue; } - for (int i = 0; i < color.Height(); i++) { - for (int j = 0; j < color.Width(); j++) { - difference(i, j) = color(i, j) - upscaled(i, j); + for (int k = 0; k < kernel.size(); k++) { + + double w = kernel(k); + int row = i + k - radius; + + if (row < 0 || row >= input.Height()) { + continue; } + + sum += w * input(row, j); } - _pyramid_color.push_back(difference); + output(i, j) = sum; } - - _pyramid_color.push_back(pyramid_gaussian[pyramid_gaussian.size() - 1]); - - - return true; } - bool stack() { + return true; +} - image::Image prev = _pyramid_color[_pyramid_color.size() - 1]; +bool downscale(aliceVision::image::Image & outputColor, const aliceVision::image::Image & inputColor) { - for (int level = _pyramid_color.size() - 2; level >= 0; level--) { + size_t width = inputColor.Width(); + size_t height = inputColor.Height(); - + size_t output_width = width / 2; + size_t output_height = height / 2; - image::Image current = _pyramid_color[level]; - image::Image rescaled(prev.Width() * 2, prev.Height() * 2, true, image::RGBfColor(0.0f)); + for (int i = 0; i < output_height; i++) { + for (int j = 0; j < output_width; j++) { + outputColor(i, j) = inputColor(i * 2, j * 2); + } + } + return true; +} - for (int i = 0; i < prev.Height(); i++) { +bool upscale(aliceVision::image::Image & outputColor, const aliceVision::image::Image & inputColor) { - int di = i * 2; + size_t width = inputColor.Width(); + size_t height = inputColor.Height(); - for (int j = 0; j < prev.Width(); j++) { + size_t output_width = width * 2; + size_t output_height = height * 2; - int dj = j * 2; + for (int i = 0; i < height; i++) { - rescaled(di, dj) = prev(i, j); - rescaled(di, dj + 1) = prev(i, j); - rescaled(di + 1, dj) = prev(i, j); - rescaled(di + 1, dj + 1) = prev(i, j); - } - } + int di = i * 2; - for (int i = 0; i < rescaled.Height(); i++) { - for (int j = 0; j < rescaled.Width(); j++) { - rescaled(i, j).r() = rescaled(i, j).r() + current(i, j).r(); - rescaled(i, j).g() = rescaled(i, j).g() + current(i, j).g(); - rescaled(i, j).b() = rescaled(i, j).b() + current(i, j).b(); - } - } + for (int j = 0; j < width; j++) { + int dj = j * 2; - prev = rescaled; + outputColor(di, dj) = image::RGBAfColor(0.0); + outputColor(di, dj + 1) = image::RGBAfColor(0.0); + outputColor(di + 1, dj) = image::RGBAfColor(0.0); + outputColor(di + 1, dj + 1) = inputColor(i, j); } + } + return true; +} - _stack_result = prev; +bool divideByAlpha(aliceVision::image::Image & inputOutputColor) { - return true; - } + size_t width = inputOutputColor.Width(); + size_t height = inputOutputColor.Height(); - bool merge(const LaplacianPyramid & other) { - if (_pyramid_color.size() == 0) { - _pyramid_color = other._pyramid_color; - _pyramid_weights = other._pyramid_weights; - return true; - } + for (int i = 0; i < height; i++) { - for (int level = 0; level < _pyramid_color.size() - 1; level++) { - image::Image & color = _pyramid_color[level]; - image::Image & weight = _pyramid_weights[level]; + for (int j = 0; j < width; j++) { + + inputOutputColor(i, j).r() = inputOutputColor(i, j).r() / (inputOutputColor(i, j).a() + 1e-5); + inputOutputColor(i, j).g() = inputOutputColor(i, j).g() / (inputOutputColor(i, j).a() + 1e-5); + inputOutputColor(i, j).b() = inputOutputColor(i, j).b() / (inputOutputColor(i, j).a() + 1e-5); + } + } - const image::Image & ocolor = other._pyramid_color[level]; - const image::Image & oweight = other._pyramid_weights[level]; + return true; +} - for (int i = 0; i < color.Height(); i++) { - for (int j = 0; j < color.Width(); j++) { +bool substract(aliceVision::image::Image & AminusB, const aliceVision::image::Image & A, const aliceVision::image::Image & B) { - if (weight(i, j) < oweight(i, j)) { - color(i, j) = ocolor(i, j); - } - } - } - } + size_t width = AminusB.Width(); + size_t height = AminusB.Height(); - size_t max_level = _pyramid_color.size() - 1; - image::Image & color = _pyramid_color[max_level]; - image::Image & weight = _pyramid_weights[max_level]; - const image::Image & ocolor = other._pyramid_color[max_level]; - const image::Image & oweight = other._pyramid_weights[max_level]; + if (AminusB.size() != A.size()) { + return false; + } - for (int i = 0; i < color.Height(); i++) { - for (int j = 0; j < color.Width(); j++) { + if (AminusB.size() != B.size()) { + return false; + } - float w = weight(i, j); - float ow = oweight(i, j); - float sum = w + ow; + for (int i = 0; i < height; i++) { - if (sum > 1e-8) { - w = w / sum; - ow = ow / sum; + for (int j = 0; j < width; j++) { - color(i, j).r() = w * color(i, j).r() + ow * ocolor(i, j).r(); - color(i, j).g() = w * color(i, j).g() + ow * ocolor(i, j).g(); - color(i, j).b() = w * color(i, j).b() + ow * ocolor(i, j).b(); - } - } + AminusB(i, j).r() = A(i, j).r() - B(i, j).r(); + AminusB(i, j).g() = A(i, j).g() - B(i, j).g(); + AminusB(i, j).b() = A(i, j).b() - B(i, j).b(); + AminusB(i, j).a() = A(i, j).a() - B(i, j).a(); } + } - for (int level = 0; level < _pyramid_color.size(); level++) { - char filename[512]; - sprintf(filename, "/home/mmoc/color_%d.exr", level); - image::writeImage(filename, _pyramid_color[level], image::EImageColorSpace::NO_CONVERSION); - } + return true; +} - return true; - } +bool addition(aliceVision::image::Image & AplusB, const aliceVision::image::Image & A, const aliceVision::image::Image & B) { - const image::Image & getStackResult() const { - return _stack_result; - } + size_t width = AplusB.Width(); + size_t height = AplusB.Height(); - const image::Image & getStackMask() const { - return _stack_mask; + if (AplusB.size() != A.size()) { + return false; } - size_t getDepth() const { - return _pyramid_color.size(); + if (AplusB.size() != B.size()) { + return false; } -protected: - std::vector> _pyramid_color; - std::vector> _pyramid_weights; - - image::Image _stack_result; - image::Image _stack_mask; + for (int i = 0; i < height; i++) { - size_t _original_width; - size_t _original_height; -}; + for (int j = 0; j < width; j++) { -class Compositer { -public: - Compositer(size_t outputWidth, size_t outputHeight) : - _panorama(outputWidth, outputHeight, true, image::RGBfColor(1.0f, 0.0f, 0.0f)), - _mask(outputWidth, outputHeight, true, false) - { + AplusB(i, j).r() = A(i, j).r() + B(i, j).r(); + AplusB(i, j).g() = A(i, j).g() + B(i, j).g(); + AplusB(i, j).b() = A(i, j).b() + B(i, j).b(); + AplusB(i, j).a() = A(i, j).a() + B(i, j).a(); + } } - virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - - for (size_t i = 0; i < color.Height(); i++) { - - size_t pano_i = offset_y + i; - if (pano_i >= _panorama.Height()) { - continue; - } + return true; +} - for (size_t j = 0; j < color.Width(); j++) { +class LaplacianPyramid { +public: + LaplacianPyramid(size_t base_width, size_t base_height, size_t max_levels) { - if (!inputMask(i, j)) { - continue; - } + size_t width = base_width; + size_t height = base_height; - size_t pano_j = offset_x + j; - if (pano_j >= _panorama.Width()) { - pano_j = pano_j - _panorama.Width(); - } + for (int lvl = 0; lvl < max_levels; lvl++) { - _panorama(pano_i, pano_j) = color(i, j); - _mask(pano_i, pano_j) = 1; - } + _levels.push_back(aliceVision::image::Image(base_width, base_height, true, image::RGBAfColor(0.0f))); + + base_height /= 2; + base_width /= 2; } - - return true; } - const aliceVision::image::Image & getPanorama() const { - return _panorama; - } + bool apply(const aliceVision::image::Image & source) { - const aliceVision::image::Image & getPanoramaMask() const { - return _mask; - } + /*Create the gaussian kernel*/ + Eigen::VectorXf kernel(5); + kernel[0] = 1.0f; + kernel[1] = 4.0f; + kernel[2] = 6.0f; + kernel[3] = 4.0f; + kernel[4] = 1.0f; + kernel.normalize(); -protected: - aliceVision::image::Image _panorama; - aliceVision::image::Image _mask; -}; + _levels[0] = source; -class AlphaCompositer : public Compositer { -public: + for (int l = 1; l < _levels.size(); l++) { - AlphaCompositer(size_t outputWidth, size_t outputHeight) : - Compositer(outputWidth, outputHeight), - _weightmap(outputWidth, outputHeight, true, 0.0f) { + aliceVision::image::Image buf(_levels[l - 1].Width(), _levels[l - 1].Height()); + aliceVision::image::Image buf2(_levels[l - 1].Width(), _levels[l - 1].Height()); + convolveHorizontal(buf, _levels[l - 1], kernel); + convolveVertical(buf2, buf, kernel); + downscale(_levels[l], buf2); + //divideByAlpha(_levels[l]); + } - } - virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - for (size_t i = 0; i < color.Height(); i++) { + for (int l = 0; l < _levels.size() - 1; l++) { + aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); + aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); + aliceVision::image::Image buf3(_levels[l].Width(), _levels[l].Height()); - size_t pano_i = offset_y + i; - if (pano_i >= _panorama.Height()) { - continue; - } + upscale(buf, _levels[l + 1]); + convolveHorizontal(buf2, buf, kernel * 4.0f); + convolveVertical(buf3, buf2, kernel * 4.0f); + //divideByAlpha(buf3); - for (size_t j = 0; j < color.Width(); j++) { + substract(_levels[l], _levels[l], buf3); + } - if (!inputMask(i, j)) { - continue; - } + return true; + } - size_t pano_j = offset_x + j; - if (pano_j >= _panorama.Width()) { - pano_j = pano_j - _panorama.Width(); - } + bool rebuild() { - if (!_mask(pano_i, pano_j)) { - _panorama(pano_i, pano_j) = color(i, j); - _weightmap(pano_i, pano_j) = inputWeights(i, j); - } - else { - float wp = _weightmap(pano_i, pano_j); - float wc = inputWeights(i, j); - float wtotal = wp + wc; - wp /= wtotal; - wc /= wtotal; + /*Create the gaussian kernel*/ + Eigen::VectorXf kernel(5); + kernel[0] = 1.0f; + kernel[1] = 4.0f; + kernel[2] = 6.0f; + kernel[3] = 4.0f; + kernel[4] = 1.0f; + kernel.normalize(); + kernel *= 4.0f; - _panorama(pano_i, pano_j).r() = wp * _panorama(pano_i, pano_j).r() + wc * color(i, j).r(); - _panorama(pano_i, pano_j).g() = wp * _panorama(pano_i, pano_j).g() + wc * color(i, j).g(); - _panorama(pano_i, pano_j).b() = wp * _panorama(pano_i, pano_j).b() + wc * color(i, j).b(); + for (int l = _levels.size() - 2; l >= 0; l--) { - _weightmap(pano_i, pano_j) = wtotal; - } + aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); + aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); + aliceVision::image::Image buf3(_levels[l].Width(), _levels[l].Height()); - _mask(pano_i, pano_j) = true; + upscale(buf, _levels[l + 1]); + convolveHorizontal(buf2, buf, kernel); + convolveVertical(buf3, buf2, kernel); + addition(_levels[l], _levels[l], buf3); + } + + for (int l = 0; l < _levels.size(); l++) { + + char filename[FILENAME_MAX]; + + image::Image tmp(_levels[l].Width(), _levels[l].Height(), true, image::RGBfColor(0.0)); + sprintf(filename, "/home/mmoc/test%d.exr", l); + for (int i = 0; i < _levels[l].Height(); i++) { + for (int j = 0; j < _levels[l].Width(); j++) { + if (_levels[0](i, j).a() > 1e-3) { + tmp(i, j).r() = (_levels[l](i, j).r()); + tmp(i, j).g() = (_levels[l](i, j).g()); + tmp(i, j).b() = (_levels[l](i, j).b()); + } + } } + image::writeImage(filename, tmp, image::EImageColorSpace::NO_CONVERSION); } return true; } -protected: - aliceVision::image::Image _weightmap; +private: + std::vector> _levels; }; class LaplacianCompositer : public AlphaCompositer { public: LaplacianCompositer(size_t outputWidth, size_t outputHeight) : - AlphaCompositer(outputWidth, outputHeight) { + AlphaCompositer(outputWidth, outputHeight), + _pyramid_panorama(outputWidth, outputHeight, 4) { } virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBfColor(0.0f)); - aliceVision::image::Image weight(_panorama.Width(), _panorama.Height(), true, 0); + aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBAfColor(0.0f)); for (int i = 0; i < color.Height(); i++) { @@ -502,29 +480,33 @@ class LaplacianCompositer : public AlphaCompositer { /* Create binary mask */ if (inputMask(i ,j)) { - view(di, dj) = color(i, j); + view(di, dj).r() = color(i, j).r(); + view(di, dj).g() = color(i, j).g(); + view(di, dj).b() = color(i, j).b(); if (inputWeights(i,j) > _weightmap(di, dj)) { - weight(di, dj) = 1.0f; + view(di, dj).a() = 1.0f; } else { - weight(di, dj) = 0.0f; + view(di, dj).a() = 0.0f; } } } } - LaplacianPyramid pyramid; - pyramid.process(view, weight, 4); - _pyramid_panorama.merge(pyramid); - _pyramid_panorama.stack(); + LaplacianPyramid pyramid(_panorama.Width(), _panorama.Height(), 4); + pyramid.apply(view); + pyramid.rebuild(); + + /*_pyramid_panorama.merge(pyramid); + _pyramid_panorama.stack();*/ - const aliceVision::image::Image & img = _pyramid_panorama.getStackResult(); + //const aliceVision::image::Image & img = _pyramid_panorama.getStackResult(); //const aliceVision::image::Image & mask = _pyramid_panorama.getStackMask(); - _panorama = img.block(0, 0, _panorama.Height(), _panorama.Width()); + //_panorama = img.block(0, 0, _panorama.Height(), _panorama.Width()); //_mask = mask.block(0, 0, _panorama.Height(), _panorama.Width());*/ for (int i = 0; i < inputWeights.Height(); i++) { From 4dddb5f25cffc8fec733142e41dae75664221cb8 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Tue, 3 Dec 2019 16:14:58 +0100 Subject: [PATCH 133/174] sync --- .../pipeline/main_panoramaCompositing.cpp | 106 ++++++++++++------ 1 file changed, 74 insertions(+), 32 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 3fdf96eee6..e4a512435a 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -165,7 +165,7 @@ bool convolveHorizontal(image::Image & output, const image::I for (int i = 0; i < output.Height(); i++) { for (int j = 0; j < output.Width(); j++) { - image::RGBAfColor sum = image::RGBAfColor(0.0); + image::RGBAfColor sum = image::RGBAfColor(0.0,0.0,0.0,0.0); if (input(i, j).a() < 1e-5) { output(i,j) = sum; @@ -206,7 +206,7 @@ bool convolveVertical(image::Image & output, const image::Ima for (int i = 0; i < output.Height(); i++) { for (int j = 0; j < output.Width(); j++) { - image::RGBAfColor sum = image::RGBAfColor(0.0); + image::RGBAfColor sum = image::RGBAfColor(0.0,0.0,0.0,0.0); if (input(i, j).a() < 1e-5) { output(i,j) = sum; @@ -264,9 +264,9 @@ bool upscale(aliceVision::image::Image & outputColor, const a for (int j = 0; j < width; j++) { int dj = j * 2; - outputColor(di, dj) = image::RGBAfColor(0.0); - outputColor(di, dj + 1) = image::RGBAfColor(0.0); - outputColor(di + 1, dj) = image::RGBAfColor(0.0); + outputColor(di, dj) = image::RGBAfColor(0.0,0.0,0.0,inputColor(i, j).a()); + outputColor(di, dj + 1) = image::RGBAfColor(0.0,0.0,0.0,inputColor(i, j).a()); + outputColor(di + 1, dj) = image::RGBAfColor(0.0,0.0,0.0,inputColor(i, j).a()); outputColor(di + 1, dj + 1) = inputColor(i, j); } } @@ -356,7 +356,7 @@ class LaplacianPyramid { for (int lvl = 0; lvl < max_levels; lvl++) { - _levels.push_back(aliceVision::image::Image(base_width, base_height, true, image::RGBAfColor(0.0f))); + _levels.push_back(aliceVision::image::Image(base_width, base_height, true, image::RGBAfColor(0.0f,0.0f,0.0f,0.0f))); base_height /= 2; base_width /= 2; @@ -382,12 +382,10 @@ class LaplacianPyramid { aliceVision::image::Image buf2(_levels[l - 1].Width(), _levels[l - 1].Height()); convolveHorizontal(buf, _levels[l - 1], kernel); convolveVertical(buf2, buf, kernel); + divideByAlpha(buf2); downscale(_levels[l], buf2); - //divideByAlpha(_levels[l]); } - - for (int l = 0; l < _levels.size() - 1; l++) { aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); @@ -396,8 +394,7 @@ class LaplacianPyramid { upscale(buf, _levels[l + 1]); convolveHorizontal(buf2, buf, kernel * 4.0f); convolveVertical(buf3, buf2, kernel * 4.0f); - //divideByAlpha(buf3); - + divideByAlpha(buf3); substract(_levels[l], _levels[l], buf3); } @@ -418,6 +415,18 @@ class LaplacianPyramid { for (int l = _levels.size() - 2; l >= 0; l--) { + divideByAlpha(_levels[l]); + for (int i = 0; i < _levels[l].Height(); i++) { + for (int j = 0; j < _levels[l].Width(); j++) { + if (_levels[l](i, j).a() > 1e-5) { + _levels[l](i, j).a() = 1.0; + } + else { + _levels[l](i, j).a() = 0.0; + } + } + } + aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); aliceVision::image::Image buf3(_levels[l].Width(), _levels[l].Height()); @@ -425,26 +434,52 @@ class LaplacianPyramid { upscale(buf, _levels[l + 1]); convolveHorizontal(buf2, buf, kernel); convolveVertical(buf3, buf2, kernel); + divideByAlpha(buf3); + addition(_levels[l], _levels[l], buf3); - } - for (int l = 0; l < _levels.size(); l++) { - - char filename[FILENAME_MAX]; - - image::Image tmp(_levels[l].Width(), _levels[l].Height(), true, image::RGBfColor(0.0)); - sprintf(filename, "/home/mmoc/test%d.exr", l); - for (int i = 0; i < _levels[l].Height(); i++) { - for (int j = 0; j < _levels[l].Width(); j++) { - if (_levels[0](i, j).a() > 1e-3) { - tmp(i, j).r() = (_levels[l](i, j).r()); - tmp(i, j).g() = (_levels[l](i, j).g()); - tmp(i, j).b() = (_levels[l](i, j).b()); + { + char filename[FILENAME_MAX]; + const image::Image & img = _levels[l]; + image::Image tmp(img.Width(), img.Height(), true, image::RGBfColor(0.0)); + sprintf(filename, "/home/mmoc/test%d.exr", l); + for (int i = 0; i < img.Height(); i++) { + for (int j = 0; j < img.Width(); j++) { + //if (img(i, j).a() > 1e-3) { + tmp(i, j).r() = img(i, j).r(); + tmp(i, j).g() = img(i, j).g(); + tmp(i, j).b() = img(i, j).b(); + //} } } + image::writeImage(filename, tmp, image::EImageColorSpace::NO_CONVERSION); + } + + } + + + return true; + } + + bool merge(const LaplacianPyramid & other) { + + for (int l = 0; l < _levels.size(); l++) { + + image::Image & img = _levels[l]; + const image::Image & oimg = other._levels[l]; + + for (int i = 0; i < img.Height(); i++) { + for (int j = 0; j < img.Width(); j++) { + + + img(i, j).r() += oimg(i, j).r() * oimg(i, j).a(); + img(i, j).g() += oimg(i, j).g() * oimg(i, j).a(); + img(i, j).b() += oimg(i, j).b() * oimg(i, j).a(); + img(i, j).a() += oimg(i, j).a(); + } } - image::writeImage(filename, tmp, image::EImageColorSpace::NO_CONVERSION); } + return true; } @@ -453,18 +488,20 @@ class LaplacianPyramid { std::vector> _levels; }; +int count = 0; + class LaplacianCompositer : public AlphaCompositer { public: LaplacianCompositer(size_t outputWidth, size_t outputHeight) : AlphaCompositer(outputWidth, outputHeight), - _pyramid_panorama(outputWidth, outputHeight, 4) { + _pyramid_panorama(outputWidth, outputHeight, 8) { } virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBAfColor(0.0f)); + aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); for (int i = 0; i < color.Height(); i++) { @@ -494,12 +531,17 @@ class LaplacianCompositer : public AlphaCompositer { } } - LaplacianPyramid pyramid(_panorama.Width(), _panorama.Height(), 4); + LaplacianPyramid pyramid(_panorama.Width(), _panorama.Height(), 8); pyramid.apply(view); - pyramid.rebuild(); - /*_pyramid_panorama.merge(pyramid); - _pyramid_panorama.stack();*/ + + + _pyramid_panorama.merge(pyramid); + if (count == 28) { + + _pyramid_panorama.rebuild(); + } + count++; @@ -639,7 +681,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - if (pos == 24 || pos == 25) + //if (pos == 24 || pos == 25) { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); From cc377252e50ca7ba27f313e8507f560940b3c2aa Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 5 Dec 2019 13:02:51 +0100 Subject: [PATCH 134/174] [hdr] fix triangular function --- src/aliceVision/hdr/rgbCurve.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aliceVision/hdr/rgbCurve.cpp b/src/aliceVision/hdr/rgbCurve.cpp index dda69b1730..8ddd11aeca 100644 --- a/src/aliceVision/hdr/rgbCurve.cpp +++ b/src/aliceVision/hdr/rgbCurve.cpp @@ -131,7 +131,7 @@ void rgbCurve::setTriangular() float value = i * coefficient; if (value > 1.0f) { - value = 1.0f - value; + value = 2.0f - value; } setAllChannels(i, value); } From a5378aae4243ce084192bf21bd7a8904c2442134 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 5 Dec 2019 16:00:19 +0100 Subject: [PATCH 135/174] [hdr] specific case for longest exposure hdr merge --- src/aliceVision/hdr/hdrMerge.cpp | 28 ++++++++++++++++++++++++---- src/aliceVision/hdr/rgbCurve.cpp | 12 ++++++++++++ src/aliceVision/hdr/rgbCurve.hpp | 2 ++ 3 files changed, 38 insertions(+), 4 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index e53746a998..7e85cc7aab 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -69,6 +69,8 @@ void hdrMerge::process(const std::vector< image::Image > &imag rgbCurve weightShortestExposure = weight; weightShortestExposure.freezeSecondPartValues(); + rgbCurve weightLongestExposure = weight; + weightLongestExposure.freezeFirstPartValues(); #pragma omp parallel for for(int y = 0; y < height; ++y) @@ -83,25 +85,26 @@ void hdrMerge::process(const std::vector< image::Image > &imag double wsum = 0.0; double wdiv = 0.0; + // Merge shortest exposure { int exposureIndex = 0; // for each image const double value = images[exposureIndex](y, x)(channel); const double time = times[exposureIndex]; - // // weightShortestExposure: _______ // _______/ // 0 1 - double w = std::max(0.f, weightShortestExposure(value, channel)); + double w = std::max(0.001f, weightShortestExposure(value, channel)); const double r = response(value, channel); wsum += w * r / time; wdiv += w; } - for(std::size_t i = 1; i < images.size(); ++i) + // Merge intermediate exposures + for(std::size_t i = 1; i < images.size() - 1; ++i) { // for each image const double value = images[i](y, x)(channel); @@ -110,13 +113,30 @@ void hdrMerge::process(const std::vector< image::Image > &imag // weight: ____ // _______/ \________ // 0 1 - double w = std::max(0.f, weight(value, channel)); + double w = std::max(0.001f, weight(value, channel)); const double r = response(value, channel); wsum += w * r / time; wdiv += w; } + // Merge longest exposure + { + int exposureIndex = images.size() - 1; + // for each image + const double value = images[exposureIndex](y, x)(channel); + const double time = times[exposureIndex]; + // + // weightLongestExposure: ____________ + // \_______ + // 0 1 + double w = std::max(0.001f, weightLongestExposure(value, channel)); + + const double r = response(value, channel); + + wsum += w * r / time; + wdiv += w; + } radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetCameraExposure; } } diff --git a/src/aliceVision/hdr/rgbCurve.cpp b/src/aliceVision/hdr/rgbCurve.cpp index 8ddd11aeca..5a64dcbe0e 100644 --- a/src/aliceVision/hdr/rgbCurve.cpp +++ b/src/aliceVision/hdr/rgbCurve.cpp @@ -174,6 +174,18 @@ void rgbCurve::inverseAllValues() } } +void rgbCurve::freezeFirstPartValues() +{ + for (auto &curve : _data) + { + std::size_t midIndex = (curve.size() / 2); + for (std::size_t i = 0; i < midIndex; ++i) + { + curve[i] = curve[midIndex]; + } + } +} + void rgbCurve::freezeSecondPartValues() { for (auto &curve : _data) diff --git a/src/aliceVision/hdr/rgbCurve.hpp b/src/aliceVision/hdr/rgbCurve.hpp index 4d40f6a02c..be56d7d1e0 100644 --- a/src/aliceVision/hdr/rgbCurve.hpp +++ b/src/aliceVision/hdr/rgbCurve.hpp @@ -192,6 +192,8 @@ class rgbCurve */ void inverseAllValues(); + void freezeFirstPartValues(); + void freezeSecondPartValues(); void invertAndScaleSecondPart(float scale); From 1d54f9037d0b29129e334137e31c9f9e9d7ceefd Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 5 Dec 2019 16:01:23 +0100 Subject: [PATCH 136/174] [hdr] export weight functions --- src/software/convert/main_convertLDRToHDR.cpp | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 713561fa2d..10394b5690 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -421,13 +421,14 @@ int main(int argc, char * argv[]) // set the correct weight functions corresponding to the string parameter hdr::rgbCurve calibrationWeight(channelQuantization); std::transform(calibrationWeightFunction.begin(), calibrationWeightFunction.end(), calibrationWeightFunction.begin(), ::tolower); + std::string calibrationWeightFunctionV = calibrationWeightFunction; if (calibrationWeightFunction == "default") { switch (calibrationMethod) { case ECalibrationMethod::LINEAR: break; - case ECalibrationMethod::DEBEVEC: calibrationWeight.setTriangular(); break; - case ECalibrationMethod::ROBERTSON: calibrationWeight.setRobertsonWeight(); break; + case ECalibrationMethod::DEBEVEC: calibrationWeight.setTriangular(); calibrationWeightFunctionV="triangular"; break; + case ECalibrationMethod::ROBERTSON: calibrationWeight.setRobertsonWeight(); calibrationWeightFunctionV = "robertsonWeight"; break; case ECalibrationMethod::GROSSBERG: break; } } @@ -436,6 +437,13 @@ int main(int argc, char * argv[]) calibrationWeight.setFunction(hdr::EFunctionType_stringToEnum(calibrationWeightFunction)); } + { + std::string methodName = calibrationWeightFunctionV; + std::string outputHtmlPath = (fs::path(outputPath) / (std::string("calibration_weight_") + methodName + std::string(".html"))).string(); + + calibrationWeight.writeHtml(outputHtmlPath, "Calibration weight: " + methodName); + } + hdr::rgbCurve response(channelQuantization); const float lambda = channelQuantization * 1.f; @@ -526,6 +534,13 @@ int main(int argc, char * argv[]) hdr::rgbCurve fusionWeight(channelQuantization); fusionWeight.setFunction(fusionWeightFunction); + { + std::string methodName = EFunctionType_enumToString(fusionWeightFunction); + std::string outputHtmlPath = (fs::path(outputPath) / (std::string("fusion_weight_") + methodName + std::string(".html"))).string(); + + calibrationWeight.writeHtml(outputHtmlPath, "Fusion weight: " + methodName); + } + sfmData::SfMData outputSfm; sfmData::Views & vs = outputSfm.getViews(); outputSfm.getIntrinsics() = sfmData.getIntrinsics(); From 3f999138eb0290e93387ca16908b649b44b041bd Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 5 Dec 2019 16:07:05 +0100 Subject: [PATCH 137/174] [hdr] replace plateau with plateauSigmoid --- src/aliceVision/hdr/rgbCurve.cpp | 17 ++++++++++++++++- src/aliceVision/hdr/rgbCurve.hpp | 2 ++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/src/aliceVision/hdr/rgbCurve.cpp b/src/aliceVision/hdr/rgbCurve.cpp index 5a64dcbe0e..98c8edb7e2 100644 --- a/src/aliceVision/hdr/rgbCurve.cpp +++ b/src/aliceVision/hdr/rgbCurve.cpp @@ -40,7 +40,7 @@ void rgbCurve::setFunction(EFunctionType functionType) case EFunctionType::LINEAR: setLinear(); return; case EFunctionType::GAUSSIAN: setGaussian(); return; case EFunctionType::TRIANGLE: setTriangular(); return; - case EFunctionType::PLATEAU: setPlateau(); return; + case EFunctionType::PLATEAU: setPlateauSigmoid(); return; case EFunctionType::GAMMA: setGamma(); return; case EFunctionType::LOG10: setLog10(); return; } @@ -149,6 +149,21 @@ void rgbCurve::setPlateau(float weight) } } +inline float plateauSigmoidFunction(float cA, float wA, float cB, float wB, float x) +{ + // https://www.desmos.com/calculator/aoojidncmi + return 1.0 / (1.0 + std::exp(10.0 * (x - cB) / wB)) - 1.0 / (1.0 + std::exp(10.0 * (x - cA) / wA)); +} + +void rgbCurve::setPlateauSigmoid(float cA, float wA, float cB, float wB) +{ + const float coefficient = 1.f / static_cast(getSize() - 1); + for (std::size_t i = 0; i < getSize(); ++i) + { + setAllChannels(i, plateauSigmoidFunction(cA, wA, cB, wB, i * coefficient)); + } +} + void rgbCurve::setLog10() { const float coefficient = 1.f / static_cast(getSize() - 1); diff --git a/src/aliceVision/hdr/rgbCurve.hpp b/src/aliceVision/hdr/rgbCurve.hpp index be56d7d1e0..ee6e1ed256 100644 --- a/src/aliceVision/hdr/rgbCurve.hpp +++ b/src/aliceVision/hdr/rgbCurve.hpp @@ -181,6 +181,8 @@ class rgbCurve */ void setPlateau(float weight = 8.0f); + void setPlateauSigmoid(float cA = 0.2, float wA = 0.5, float cB = 0.85, float wB = 0.22); + /** *@brief Set curves to log10 */ From 1f298ce09d7826e8013f30444b2899086e0d6a7a Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 5 Dec 2019 16:07:32 +0100 Subject: [PATCH 138/174] [hdr] rgbCurve: add gamma functions --- src/aliceVision/hdr/rgbCurve.cpp | 47 ++++++++++++++++++++++++++++++++ src/aliceVision/hdr/rgbCurve.hpp | 4 +++ 2 files changed, 51 insertions(+) diff --git a/src/aliceVision/hdr/rgbCurve.cpp b/src/aliceVision/hdr/rgbCurve.cpp index 98c8edb7e2..8ebc690981 100644 --- a/src/aliceVision/hdr/rgbCurve.cpp +++ b/src/aliceVision/hdr/rgbCurve.cpp @@ -235,6 +235,53 @@ void rgbCurve::setAllAbsolute() } } +inline float gammaFunction(float value, float gamma) +{ + // 1/0.45 = 2.22 + if (value < 0.018) + { + return 4.5 * value; + } + else + { + return 1.099 * std::pow(value, 0.45) - 0.099; + } +} + +inline float inverseGammaFunction(float value, float gamma) +{ + if (value <= 0.0812f) + { + return value / 4.5f; + } + else + { + return pow((value + 0.099f) / 1.099f, gamma); + } +} + +void rgbCurve::applyGamma(float gamma) +{ + for (auto &curve : _data) + { + for (auto &value : curve) + { + value = gammaFunction(value, gamma); + } + } +} + +void rgbCurve::applyGammaInv(float gamma) +{ + for (auto &curve : _data) + { + for (auto &value : curve) + { + value = inverseGammaFunction(value, gamma); + } + } +} + void rgbCurve::normalize() { for(auto &curve : _data) diff --git a/src/aliceVision/hdr/rgbCurve.hpp b/src/aliceVision/hdr/rgbCurve.hpp index ee6e1ed256..6b6f192e77 100644 --- a/src/aliceVision/hdr/rgbCurve.hpp +++ b/src/aliceVision/hdr/rgbCurve.hpp @@ -205,6 +205,10 @@ class rgbCurve */ void setAllAbsolute(); + void applyGamma(float gamma = 2.2); + + void applyGammaInv(float gamma = 2.2); + /** * @brief normalize the curve */ From 42debb59b4d1c72dda908ba85d9d897a315fd2b4 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 6 Dec 2019 11:48:11 +0100 Subject: [PATCH 139/174] wip laplacian --- .../pipeline/main_panoramaCompositing.cpp | 358 ++++++++++++++---- 1 file changed, 286 insertions(+), 72 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index e4a512435a..abe1002562 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -166,15 +166,11 @@ bool convolveHorizontal(image::Image & output, const image::I for (int j = 0; j < output.Width(); j++) { image::RGBAfColor sum = image::RGBAfColor(0.0,0.0,0.0,0.0); - - if (input(i, j).a() < 1e-5) { - output(i,j) = sum; - continue; - } + float sumw = 0.0f; for (int k = 0; k < kernel.size(); k++) { - double w = kernel(k); + float w = kernel(k); int col = j + k - radius; if (col < 0 || col >= input.Width()) { @@ -182,9 +178,13 @@ bool convolveHorizontal(image::Image & output, const image::I } sum += w * input(i, col); + sumw += w; } - output(i, j) = sum; + output(i, j).r() = sum.r(); + output(i, j).g() = sum.g(); + output(i, j).b() = sum.b(); + output(i, j).a() = sum.a(); } } @@ -207,15 +207,12 @@ bool convolveVertical(image::Image & output, const image::Ima for (int j = 0; j < output.Width(); j++) { image::RGBAfColor sum = image::RGBAfColor(0.0,0.0,0.0,0.0); - - if (input(i, j).a() < 1e-5) { - output(i,j) = sum; - continue; - } + float w = 0.0f; + float sumw = 0.0f; for (int k = 0; k < kernel.size(); k++) { - double w = kernel(k); + float w = kernel(k); int row = i + k - radius; if (row < 0 || row >= input.Height()) { @@ -223,9 +220,13 @@ bool convolveVertical(image::Image & output, const image::Ima } sum += w * input(row, j); + sumw += w; } - output(i, j) = sum; + output(i, j).r() = sum.r(); + output(i, j).g() = sum.g(); + output(i, j).b() = sum.b(); + output(i, j).a() = sum.a(); } } @@ -264,10 +265,10 @@ bool upscale(aliceVision::image::Image & outputColor, const a for (int j = 0; j < width; j++) { int dj = j * 2; - outputColor(di, dj) = image::RGBAfColor(0.0,0.0,0.0,inputColor(i, j).a()); - outputColor(di, dj + 1) = image::RGBAfColor(0.0,0.0,0.0,inputColor(i, j).a()); - outputColor(di + 1, dj) = image::RGBAfColor(0.0,0.0,0.0,inputColor(i, j).a()); - outputColor(di + 1, dj + 1) = inputColor(i, j); + outputColor(di, dj) = inputColor(i, j); + outputColor(di, dj + 1) = image::RGBAfColor(0.0f,0.0f,0.0f,0.0f); + outputColor(di + 1, dj) = image::RGBAfColor(0.0f,0.0f,0.0f,0.0f); + outputColor(di + 1, dj + 1) = image::RGBAfColor(0.0f,0.0f,0.0f,0.0f); } } @@ -283,7 +284,7 @@ bool divideByAlpha(aliceVision::image::Image & inputOutputCol for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { - + inputOutputColor(i, j).r() = inputOutputColor(i, j).r() / (inputOutputColor(i, j).a() + 1e-5); inputOutputColor(i, j).g() = inputOutputColor(i, j).g() / (inputOutputColor(i, j).a() + 1e-5); inputOutputColor(i, j).b() = inputOutputColor(i, j).b() / (inputOutputColor(i, j).a() + 1e-5); @@ -357,6 +358,7 @@ class LaplacianPyramid { for (int lvl = 0; lvl < max_levels; lvl++) { _levels.push_back(aliceVision::image::Image(base_width, base_height, true, image::RGBAfColor(0.0f,0.0f,0.0f,0.0f))); + _weights.push_back(aliceVision::image::Image(base_width, base_height, true, 0.0f)); base_height /= 2; base_width /= 2; @@ -372,18 +374,26 @@ class LaplacianPyramid { kernel[2] = 6.0f; kernel[3] = 4.0f; kernel[4] = 1.0f; - kernel.normalize(); + kernel = kernel / kernel.sum(); _levels[0] = source; + for (int l = 1; l < _levels.size(); l++) { aliceVision::image::Image buf(_levels[l - 1].Width(), _levels[l - 1].Height()); aliceVision::image::Image buf2(_levels[l - 1].Width(), _levels[l - 1].Height()); convolveHorizontal(buf, _levels[l - 1], kernel); convolveVertical(buf2, buf, kernel); - divideByAlpha(buf2); - downscale(_levels[l], buf2); + downscale(_levels[l], buf2); + } + + for (int l = 0; l < _levels.size(); l++) { + for (int i = 0; i < _levels[l].Height(); i++) { + for (int j = 0; j < _levels[l].Width(); j++) { + _weights[l](i,j) = _levels[l](i, j).a(); + } + } } for (int l = 0; l < _levels.size() - 1; l++) { @@ -392,16 +402,24 @@ class LaplacianPyramid { aliceVision::image::Image buf3(_levels[l].Width(), _levels[l].Height()); upscale(buf, _levels[l + 1]); - convolveHorizontal(buf2, buf, kernel * 4.0f); - convolveVertical(buf3, buf2, kernel * 4.0f); - divideByAlpha(buf3); + convolveHorizontal(buf2, buf, kernel ); + convolveVertical(buf3, buf2, kernel ); + + for (int i = 0; i < buf3.Height(); i++) { + for (int j = 0; j < buf3.Width(); j++) { + buf3(i,j) *= 4.0f; + } + } + substract(_levels[l], _levels[l], buf3); } + + return true; } - bool rebuild() { + bool rebuild(image::Image & output) { /*Create the gaussian kernel*/ Eigen::VectorXf kernel(5); @@ -410,22 +428,48 @@ class LaplacianPyramid { kernel[2] = 6.0f; kernel[3] = 4.0f; kernel[4] = 1.0f; - kernel.normalize(); - kernel *= 4.0f; - - for (int l = _levels.size() - 2; l >= 0; l--) { + kernel = kernel / kernel.sum(); - divideByAlpha(_levels[l]); + for (int l = 0; l < _levels.size(); l++) { for (int i = 0; i < _levels[l].Height(); i++) { for (int j = 0; j < _levels[l].Width(); j++) { - if (_levels[l](i, j).a() > 1e-5) { - _levels[l](i, j).a() = 1.0; + if (_weights[l](i, j) < 1e-12) { + _levels[l](i, j) = image::RGBAfColor(0.0,0.0,0.0,0.0); + continue; } - else { - _levels[l](i, j).a() = 0.0; + _levels[l](i, j).r() = _levels[l](i, j).r() / _weights[l](i, j); + _levels[l](i, j).g() = _levels[l](i, j).g() / _weights[l](i, j); + _levels[l](i, j).b() = _levels[l](i, j).b() / _weights[l](i, j); + } + } + } + + { + image::Image & img = _levels[_levels.size() - 1]; + for (int i = 0; i < img.Height(); i++) { + for (int j = 0; j < img.Width(); j++) { + image::RGBAfColor & pix = img(i, j); + image::RGBAfColor rpix; + rpix.r() = std::exp(pix.r()); + rpix.g() = std::exp(pix.g()); + rpix.b() = std::exp(pix.b()); + + if (rpix.r() < 0.0) { + pix.r() = 0.0; + } + + if (rpix.g() < 0.0) { + pix.g() = 0.0; + } + + if (rpix.b() < 0.0) { + pix.b() = 0.0; } } } + } + + for (int l = _levels.size() - 2; l >= 0; l--) { aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); @@ -434,29 +478,42 @@ class LaplacianPyramid { upscale(buf, _levels[l + 1]); convolveHorizontal(buf2, buf, kernel); convolveVertical(buf3, buf2, kernel); - divideByAlpha(buf3); + + for (int i = 0; i < buf3.Height(); i++) { + for (int j = 0; j < buf3.Width(); j++) { + buf3(i,j) *= 4.0f; + } + } addition(_levels[l], _levels[l], buf3); - { - char filename[FILENAME_MAX]; - const image::Image & img = _levels[l]; - image::Image tmp(img.Width(), img.Height(), true, image::RGBfColor(0.0)); - sprintf(filename, "/home/mmoc/test%d.exr", l); - for (int i = 0; i < img.Height(); i++) { + { + image::Image & img = _levels[l]; + for (int i = 0; i < img.Height(); i++) { for (int j = 0; j < img.Width(); j++) { - //if (img(i, j).a() > 1e-3) { - tmp(i, j).r() = img(i, j).r(); - tmp(i, j).g() = img(i, j).g(); - tmp(i, j).b() = img(i, j).b(); - //} + image::RGBAfColor & pix = img(i, j); + image::RGBAfColor rpix; + rpix.r() = std::exp(pix.r()); + rpix.g() = std::exp(pix.g()); + rpix.b() = std::exp(pix.b()); + + if (rpix.r() < 0.0) { + pix.r() = 0.0; + } + + if (rpix.g() < 0.0) { + pix.g() = 0.0; + } + + if (rpix.b() < 0.0) { + pix.b() = 0.0; + } } } - image::writeImage(filename, tmp, image::EImageColorSpace::NO_CONVERSION); } - } + output = _levels[0]; return true; } @@ -468,14 +525,17 @@ class LaplacianPyramid { image::Image & img = _levels[l]; const image::Image & oimg = other._levels[l]; + image::Image & weight = _weights[l]; + const image::Image & oweight = other._weights[l]; + for (int i = 0; i < img.Height(); i++) { for (int j = 0; j < img.Width(); j++) { - img(i, j).r() += oimg(i, j).r() * oimg(i, j).a(); - img(i, j).g() += oimg(i, j).g() * oimg(i, j).a(); - img(i, j).b() += oimg(i, j).b() * oimg(i, j).a(); - img(i, j).a() += oimg(i, j).a(); + img(i, j).r() += oimg(i, j).r() * oweight(i, j); + img(i, j).g() += oimg(i, j).g() * oweight(i, j); + img(i, j).b() += oimg(i, j).b() * oweight(i, j); + weight(i, j) += oweight(i, j); } } } @@ -486,6 +546,7 @@ class LaplacianPyramid { private: std::vector> _levels; + std::vector> _weights; }; int count = 0; @@ -495,13 +556,123 @@ class LaplacianCompositer : public AlphaCompositer { LaplacianCompositer(size_t outputWidth, size_t outputHeight) : AlphaCompositer(outputWidth, outputHeight), - _pyramid_panorama(outputWidth, outputHeight, 8) { + _pyramid_panorama(outputWidth, outputHeight, 6) { } + bool feathering(aliceVision::image::Image & output, const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask) { + + std::vector> feathering; + std::vector> feathering_mask; + feathering.push_back(color); + feathering_mask.push_back(inputMask); + + int lvl = 0; + int width = color.Width(); + int height = color.Height(); + + while (1) { + const image::Image & src = feathering[lvl]; + const image::Image & src_mask = feathering_mask[lvl]; + + image::Image half(width / 2, height / 2); + image::Image half_mask(width / 2, height / 2); + + for (int i = 0; i < half.Height(); i++) { + + int di = i * 2; + for (int j = 0; j < half.Width(); j++) { + int dj = j * 2; + + int count = 0; + half(i, j) = image::RGBfColor(0.0,0.0,0.0); + + if (src_mask(di, dj)) { + half(i, j) += src(di, dj); + count++; + } + + if (src_mask(di, dj + 1)) { + half(i, j) += src(di, dj + 1); + count++; + } + + if (src_mask(di + 1, dj)) { + half(i, j) += src(di + 1, dj); + count++; + } + + if (src_mask(di + 1, dj + 1)) { + half(i, j) += src(di + 1, dj + 1); + count++; + } + + if (count > 0) { + half(i, j) /= float(count); + half_mask(i, j) = 1; + } + else { + half_mask(i, j) = 0; + } + } + + + } + + feathering.push_back(half); + feathering_mask.push_back(half_mask); + + + width = half.Width(); + height = half.Height(); + + if (width < 2 || height < 2) break; + + lvl++; + } + + + for (int lvl = feathering.size() - 2; lvl >= 0; lvl--) { + + image::Image & src = feathering[lvl]; + image::Image & src_mask = feathering_mask[lvl]; + image::Image & ref = feathering[lvl + 1]; + image::Image & ref_mask = feathering_mask[lvl + 1]; + + for (int i = 0; i < src_mask.Height(); i++) { + for (int j = 0; j < src_mask.Width(); j++) { + if (!src_mask(i, j)) { + int mi = i / 2; + int mj = j / 2; + + if (mi >= ref_mask.Height()) { + mi = ref_mask.Height() - 1; + } + + if (mj >= ref_mask.Width()) { + mj = ref_mask.Width() - 1; + } + + src_mask(i, j) = ref_mask(mi, mj); + src(i, j) = ref(mi, mj); + } + } + } + } + + output = feathering[0]; + + return true; + } + virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + aliceVision::image::Image color_big(_panorama.Width(), _panorama.Height(), true, image::RGBfColor(0.0f, 0.0f, 0.0f)); + aliceVision::image::Image inputMask_big(_panorama.Width(), _panorama.Height(), true, 0); + aliceVision::image::Image inputWeights_big(_panorama.Width(), _panorama.Height(), true, 0); + aliceVision::image::Image color_big_feathered; + + for (int i = 0; i < color.Height(); i++) { @@ -511,35 +682,78 @@ class LaplacianCompositer : public AlphaCompositer { int dj = j + offset_x; - if (dj >= view.Width()) { - dj = dj - view.Width(); + if (dj >= color_big.Width()) { + dj = dj - color_big.Width(); } - /* Create binary mask */ - if (inputMask(i ,j)) { - view(di, dj).r() = color(i, j).r(); - view(di, dj).g() = color(i, j).g(); - view(di, dj).b() = color(i, j).b(); + color_big(di, dj).r() = color(i, j).r(); + color_big(di, dj).g() = color(i, j).g(); + color_big(di, dj).b() = color(i, j).b(); + inputMask_big(di, dj) = inputMask(i, j); + inputWeights_big(di, dj) = inputWeights(i, j); + } + } + + aliceVision::image::Image feathered; + feathering(feathered, color_big, inputMask_big); + + + aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + + const image::Image & img = feathered; + - if (inputWeights(i,j) > _weightmap(di, dj)) { - view(di, dj).a() = 1.0f; + for (int i = 0; i < view.Height(); i++) { + + for (int j = 0; j < view.Width(); j++) { + + view(i, j).r() = std::log(std::max(0.001f, feathered(i, j).r())); + view(i, j).g() = std::log(std::max(0.001f, feathered(i, j).g())); + view(i, j).b() = std::log(std::max(0.001f, feathered(i, j).b())); + + if (inputMask_big(i ,j)) { + + if (inputWeights_big(i,j) > _weightmap(i, j)) { + view(i, j).a() = 1.0f; } else { - view(di, dj).a() = 0.0f; + view(i, j).a() = 0.0f; } } } } - LaplacianPyramid pyramid(_panorama.Width(), _panorama.Height(), 8); + + LaplacianPyramid pyramid(_panorama.Width(), _panorama.Height(), 6); pyramid.apply(view); - - + _pyramid_panorama.merge(pyramid); - if (count == 28) { - - _pyramid_panorama.rebuild(); + + + + if (count == 72) { + image::Image res; + _pyramid_panorama.rebuild(res); + + { + char filename[FILENAME_MAX]; + const image::Image & img = res; + image::Image tmp(img.Width(), img.Height(), true, image::RGBfColor(0.0)); + sprintf(filename, "/home/mmoc/out.exr"); + for (int i = 0; i < img.Height(); i++) { + for (int j = 0; j < img.Width(); j++) { + //if (img(i, j).a() > 1e-3) { + tmp(i, j).r() = std::exp(img(i, j).r()); + tmp(i, j).g() = std::exp(img(i, j).g()); + tmp(i, j).b() = std::exp(img(i, j).b()); + + //} + } + } + + image::writeImage(filename, tmp, image::EImageColorSpace::SRGB); + } } count++; @@ -681,7 +895,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - //if (pos == 24 || pos == 25) + if (1/*pos == 24 || pos == 25*/) { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); From c56cc2099c23607ac6c8c834df0b29f4507987e8 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 6 Dec 2019 22:53:18 +0100 Subject: [PATCH 140/174] [hdr] add new HDR calibration method named "Laguerre" Use Bundle Adjustment to estimate a one-parameter Laguerre function --- .../hdr/BundleAdjustmentCalibration.cpp | 281 ++++++++++++++++++ .../hdr/BundleAdjustmentCalibration.hpp | 58 ++++ src/aliceVision/hdr/CMakeLists.txt | 3 + src/software/convert/main_convertLDRToHDR.cpp | 49 ++- 4 files changed, 387 insertions(+), 4 deletions(-) create mode 100644 src/aliceVision/hdr/BundleAdjustmentCalibration.cpp create mode 100644 src/aliceVision/hdr/BundleAdjustmentCalibration.hpp diff --git a/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp b/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp new file mode 100644 index 0000000000..db76e3563d --- /dev/null +++ b/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp @@ -0,0 +1,281 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2019 AliceVision contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#include "BundleAdjustmentCalibration.hpp" + +#include +#include + +#include + +#include + +#include +#include +#include +#include + + +namespace aliceVision { +namespace hdr { + +using namespace aliceVision::image; + +BundleAdjustmentCalibration::BundleAdjustmentCalibration() +{ +} + +struct ImageSamples +{ + std::vector> colors; + int camId = 0; + double exposure = 0.0; +}; + +void extractSamples( + std::vector>& out_samples, + const std::vector>& imagePathsGroups, + const std::vector< std::vector >& cameraExposures, + const int nbPoints, + const bool fisheye + ) +{ + const int nbGroups = imagePathsGroups.size(); + out_samples.resize(nbGroups); + + int averallNbImages = 0; + for (const auto& imgPaths : imagePathsGroups) + { + averallNbImages += imgPaths.size(); + } + const int samplesPerImage = nbPoints / averallNbImages; + + ALICEVISION_LOG_TRACE("samplesPerImage: " << samplesPerImage); + + #pragma omp parallel for num_threads(3) + for (int g = 0; g& out_hdrSamples = out_samples[g]; + + const std::vector &imagePaths = imagePathsGroups[g]; + out_hdrSamples.resize(imagePaths.size()); + + const std::vector& exposures = cameraExposures[g]; + + for (unsigned int i = 0; i < imagePaths.size(); ++i) + { + out_hdrSamples[i].exposure = exposures[i]; + out_hdrSamples[i].colors.reserve(samplesPerImage); + std::vector>& colors = out_hdrSamples[i].colors; + + Image img; + readImage(imagePaths[i], img, EImageColorSpace::LINEAR); + + const std::size_t width = img.Width(); + const std::size_t height = img.Height(); + + const std::size_t minSize = std::min(width, height) * 0.97; + const Vec2i center(width / 2, height / 2); + + const int xMin = std::ceil(center(0) - minSize / 2); + const int yMin = std::ceil(center(1) - minSize / 2); + const int xMax = std::floor(center(0) + minSize / 2); + const int yMax = std::floor(center(1) + minSize / 2); + const std::size_t maxDist2 = pow(minSize * 0.5, 2); + + const int step = std::ceil(minSize / sqrt(samplesPerImage)); + + // extract samples + for (int y = yMin; y <= yMax - step; y += step) + { + for (int x = xMin; x <= xMax - step; x += step) + { + if (fisheye) + { + std::size_t dist2 = pow(center(0) - x, 2) + pow(center(1) - y, 2); + if (dist2 > maxDist2) + continue; + } + RGBfColor& c = img(y, x); + colors.push_back(Rgb(c(0), c(1), c(2))); + } + } + } + } +} + + +template +T laguerreFunction(const T& a, const T& x) +{ + // https://www.desmos.com/calculator/ib1y06t4pe + using namespace boost::math::constants; + return x + 2.0 * pi() * atan((a * sin(pi() * x)) / (1.0 - a * cos(pi() * x))); +} +template +T laguerreFunctionInv(const T& a, const T& x) +{ + return laguerreFunction(-a, x); +} + +/** + * + */ +struct HdrResidual +{ + HdrResidual(const Rgb& a, const Rgb& b) + : _colorA(a) + , _colorB(b) + {} + + template + bool operator()(const T* const laguerreParam, const T* const relativeWB_R, const T* const relativeWB_B, const T* const expA, const T* const expB, T* residual) const + { + static const int red = 0; + static const int green = 1; + static const int blue = 2; + /* + { + // GREEN + T greenParam = laguerreParam[green]; + T a = laguerreFunction(greenParam, T(_colorA(green))); + T b = laguerreFunction(greenParam, T(_colorB(green))); + residual[green] = abs(a * (*expB) / (*expA) - b) + abs(a - b * (*expA) / (*expB)); + } + { + // RED + T redParam = laguerreParam[green] + laguerreParam[red]; + T a = *relativeWB_R + laguerreFunction(redParam, T(_colorA(red))); + T b = *relativeWB_R + laguerreFunction(redParam, T(_colorB(red))); + residual[red] = abs(a * (*expB) / (*expA) - b) + abs(a - b * (*expA) / (*expB)); + } + { + // BLUE + T blueParam = laguerreParam[green] + laguerreParam[blue]; + T a = *relativeWB_B + laguerreFunction(blueParam, T(_colorA(blue))); + T b = *relativeWB_B + laguerreFunction(blueParam, T(_colorB(blue))); + residual[blue] = abs(a * (*expB) / (*expA) - b) + abs(a - b * (*expA) / (*expB)); + } + */ + { + // GREEN + T greenParam = laguerreParam[green]; + T a = laguerreFunction(greenParam, T(_colorA(green))); + T b = laguerreFunction(greenParam, T(_colorB(green))); + residual[green] = abs(laguerreFunctionInv(greenParam, laguerreFunction(greenParam, T(_colorA(green))) * (*expB) / (*expA)) - T(_colorB(green))) + + abs(laguerreFunctionInv(greenParam, laguerreFunction(greenParam, T(_colorB(green))) * (*expA) / (*expB)) - T(_colorA(green))); + } + { + // RED + T redParam = laguerreParam[green] + laguerreParam[red]; + T a = *relativeWB_R + laguerreFunction(redParam, T(_colorA(red))); + T b = *relativeWB_R + laguerreFunction(redParam, T(_colorB(red))); + residual[red] = abs(laguerreFunctionInv(redParam, laguerreFunction(redParam, T(_colorA(red))) * (*expB) / (*expA)) - T(_colorB(red))) + + abs(laguerreFunctionInv(redParam, laguerreFunction(redParam, T(_colorB(red))) * (*expA) / (*expB)) - T(_colorA(red))); + } + { + // BLUE + T blueParam = laguerreParam[green] + laguerreParam[blue]; + T a = *relativeWB_B + laguerreFunction(blueParam, T(_colorA(blue))); + T b = *relativeWB_B + laguerreFunction(blueParam, T(_colorB(blue))); + residual[blue] = abs(laguerreFunctionInv(blueParam, laguerreFunction(blueParam, T(_colorA(blue))) * (*expB) / (*expA)) - T(_colorB(blue))) + + abs(laguerreFunctionInv(blueParam, laguerreFunction(blueParam, T(_colorB(blue))) * (*expA) / (*expB)) - T(_colorA(blue))); + } + + return true; + } + +private: + const Rgb& _colorA; + const Rgb& _colorB; +}; + +void BundleAdjustmentCalibration::process( + const std::vector>& imagePathsGroups, + const std::size_t channelQuantization, + const std::vector>& cameraExposures, + int nbPoints, + bool fisheye, + bool refineExposures, + rgbCurve &response) +{ + ALICEVISION_LOG_TRACE("Extract color samples"); + std::vector> samples; + extractSamples(samples, imagePathsGroups, cameraExposures, nbPoints, fisheye); + + ALICEVISION_LOG_TRACE("Create exposure list"); + std::map, double> exposures; + for(const auto& camExp: cameraExposures) + { + for (const auto& exp : camExp) + { + // TODO: camId + exposures[std::make_pair(0, exp)] = exp; + } + } + std::array laguerreParam = { 0.0, 0.0, 0.0 }; + std::array relativeWB = { 0.0, 0.0, 0.0 }; + + ALICEVISION_LOG_TRACE("Create BA problem"); + ceres::Problem problem; + + // TODO: make the LOSS function and the parameter an option + ceres::LossFunction* lossFunction = new ceres::HuberLoss(Square(0.12)); // 0.12 ~= 30/255 + + for (int g = 0; g < samples.size(); ++g) + { + std::vector& hdrSamples = samples[g]; + + for (int i = 0; i < hdrSamples[0].colors.size(); ++i) + { + for (int h = 0; h < hdrSamples.size() - 1; ++h) + { + ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction( + new HdrResidual(hdrSamples[h].colors[i], hdrSamples[h+1].colors[i])); + + double& expA = exposures[std::make_pair(0, cameraExposures[g][h])]; + double& expB = exposures[std::make_pair(0, cameraExposures[g][h+1])]; + + problem.AddResidualBlock(cost_function, lossFunction, laguerreParam.data(), &relativeWB[0], &relativeWB[2], &expA, &expB); + } + } + } + + ALICEVISION_LOG_DEBUG("BA Solve"); + + ceres::Solver::Options solverOptions; + solverOptions.linear_solver_type = ceres::SPARSE_NORMAL_CHOLESKY; + solverOptions.minimizer_progress_to_stdout = true; + + ceres::Solver::Summary summary; + ceres::Solve(solverOptions, &problem, &summary); + + ALICEVISION_LOG_DEBUG(summary.BriefReport()); + + ALICEVISION_LOG_TRACE("laguerreParam: " << laguerreParam); + ALICEVISION_LOG_TRACE("relativeWB: " << relativeWB); + + ALICEVISION_LOG_TRACE("Exposures:"); + for (const auto& expIt: exposures) + { + ALICEVISION_LOG_TRACE(" * [" << expIt.first.first << ", " << expIt.first.second << "]: " << expIt.second); + } + + for(unsigned int channel = 0; channel < 3; ++channel) + { + std::vector& curve = response.getCurve(channel); + double step = 1.0f / curve.size(); + for (unsigned int i = 0; i < curve.size(); ++i) + { + const double offset = ((channel == 1) ? 0 : laguerreParam[1]); // non-green channels are relative to green channel + curve[i] = relativeWB[channel] + laguerreFunction(offset + laguerreParam[channel], i * step); + } + } +} + + +} // namespace hdr +} // namespace aliceVision diff --git a/src/aliceVision/hdr/BundleAdjustmentCalibration.hpp b/src/aliceVision/hdr/BundleAdjustmentCalibration.hpp new file mode 100644 index 0000000000..eee0a6605a --- /dev/null +++ b/src/aliceVision/hdr/BundleAdjustmentCalibration.hpp @@ -0,0 +1,58 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2019 AliceVision contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#pragma once + +#include +#include +#include "emorCurve.hpp" +#include "rgbCurve.hpp" + +namespace aliceVision { +namespace hdr { + +/** + * The implementation is based on the following paper: + * "Mapping Colour in Image Stitching Applications", David Hasler, Sabine Susstrunk, 2003, JVCIR-2004 + * https://infoscience.epfl.ch/record/50201/files/hs03_JVCIR.pdf + * Itself based on: + * "Radiometric Self Calibration", Tomoo Mitsunaga, Shree K. Nayar, CVPR-1999 + * http://www.cs.columbia.edu/CAVE/publications/pdfs/Mitsunaga_CVPR99.pdf + * + * Some precisions are also provided in: + * "Radiometric alignment and vignetting calibration", Pablo d’Angelo, ICVS 2007 + * http://hugin.sourceforge.net/tech/icvs2007_final.pdf + */ +class BundleAdjustmentCalibration +{ +public: + explicit BundleAdjustmentCalibration(); + + /** + * @brief + * @param[in] LDR images groups + * @param[in] channel quantization + * @param[in] exposure times + * @param[in] number of samples + * @param[in] calibration weight function + * @param[out] camera response function + */ + void process( + const std::vector>& imagePathsGroups, + const std::size_t channelQuantization, + const std::vector>& cameraExposures, + int nbPoints, + bool fisheye, + bool refineExposures, + rgbCurve &response); + +private: + /// Dimension of the response ie number of basis vectors to calculate the response function + unsigned int _dimension; +}; + +} // namespace hdr +} // namespace aliceVision diff --git a/src/aliceVision/hdr/CMakeLists.txt b/src/aliceVision/hdr/CMakeLists.txt index 0a4685fd38..25b74e1819 100644 --- a/src/aliceVision/hdr/CMakeLists.txt +++ b/src/aliceVision/hdr/CMakeLists.txt @@ -1,6 +1,7 @@ # Headers set(hdr_files_headers + BundleAdjustmentCalibration.hpp rgbCurve.hpp RobertsonCalibrate.hpp hdrMerge.hpp @@ -12,6 +13,7 @@ set(hdr_files_headers # Sources set(hdr_files_sources rgbCurve.cpp + BundleAdjustmentCalibration.cpp RobertsonCalibrate.cpp hdrMerge.cpp DebevecCalibrate.cpp @@ -25,6 +27,7 @@ alicevision_add_library(aliceVision_hdr aliceVision_system aliceVision_image ${Boost_FILESYSTEM_LIBRARY} + ${CERES_LIBRARIES} ) # Unit tests diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 10394b5690..36d1cd885f 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -20,6 +20,7 @@ #include #include #include +#include /*Command line parameters*/ #include @@ -43,7 +44,8 @@ enum class ECalibrationMethod LINEAR, ROBERTSON, DEBEVEC, - GROSSBERG + GROSSBERG, + LAGUERRE, }; /** @@ -59,6 +61,7 @@ inline std::string ECalibrationMethod_enumToString(const ECalibrationMethod cali case ECalibrationMethod::ROBERTSON: return "robertson"; case ECalibrationMethod::DEBEVEC: return "debevec"; case ECalibrationMethod::GROSSBERG: return "grossberg"; + case ECalibrationMethod::LAGUERRE: return "laguerre"; } throw std::out_of_range("Invalid method name enum"); } @@ -77,6 +80,7 @@ inline ECalibrationMethod ECalibrationMethod_stringToEnum(const std::string& cal if (methodName == "robertson") return ECalibrationMethod::ROBERTSON; if (methodName == "debevec") return ECalibrationMethod::DEBEVEC; if (methodName == "grossberg") return ECalibrationMethod::GROSSBERG; + if (methodName == "laguerre") return ECalibrationMethod::LAGUERRE; throw std::out_of_range("Invalid method name : '" + calibrationMethodName + "'"); } @@ -127,7 +131,7 @@ int main(int argc, char * argv[]) po::options_description optionalParams("Optional parameters"); optionalParams.add_options() ("calibrationMethod,m", po::value(&calibrationMethod)->default_value(calibrationMethod), - "Name of method used for camera calibration: linear, robertson (slow), debevec, grossberg.") + "Name of method used for camera calibration: linear, robertson (slow), debevec, grossberg, laguerre.") ("highlightsMaxLuminance", po::value(&highlightTargetLux)->default_value(highlightTargetLux), "Highlights maximum luminance.") ("expandDynamicRange"/*"highlightCorrectionFactor"*/, po::value(&highlightCorrectionFactor)->default_value(highlightCorrectionFactor), @@ -430,6 +434,7 @@ int main(int argc, char * argv[]) case ECalibrationMethod::DEBEVEC: calibrationWeight.setTriangular(); calibrationWeightFunctionV="triangular"; break; case ECalibrationMethod::ROBERTSON: calibrationWeight.setRobertsonWeight(); calibrationWeightFunctionV = "robertsonWeight"; break; case ECalibrationMethod::GROSSBERG: break; + case ECalibrationMethod::LAGUERRE: break; } } else @@ -459,7 +464,7 @@ int main(int argc, char * argv[]) { hdr::rgbCurve r = response; - r.exponential(); // TODO + r.applyGamma(2.2); std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); std::string outputResponsePath = (fs::path(outputPath) / (std::string("response_log_") + methodName + std::string(".csv"))).string(); std::string outputResponsePathHtml = (fs::path(outputPath) / (std::string("response_log_") + methodName + std::string(".html"))).string(); @@ -489,6 +494,18 @@ int main(int argc, char * argv[]) ALICEVISION_LOG_INFO("Camera response function written as " << outputResponsePath); } + { + hdr::rgbCurve r = response; + r.applyGammaInv(2.2); + std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); + std::string outputResponsePath = (fs::path(outputPath) / (std::string("response_invGamma_") + methodName + std::string(".csv"))).string(); + std::string outputResponsePathHtml = (fs::path(outputPath) / (std::string("response_invGamma_") + methodName + std::string(".html"))).string(); + + r.write(outputResponsePath); + r.writeHtml(outputResponsePathHtml, "Camera Response Curve " + methodName); + ALICEVISION_LOG_INFO("Camera response function written as " << outputResponsePath); + } + response.exponential(); response.scale(); } @@ -514,6 +531,16 @@ int main(int argc, char * argv[]) calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, response); } break; + case ECalibrationMethod::LAGUERRE: + { + ALICEVISION_LOG_INFO("Laguerre calibration"); + if (calibrationNbPoints <= 0) + calibrationNbPoints = 1000000; + hdr::BundleAdjustmentCalibration calibration; + bool refineExposures = false; + calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, refineExposures, response); + } + break; } } @@ -545,6 +572,20 @@ int main(int argc, char * argv[]) sfmData::Views & vs = outputSfm.getViews(); outputSfm.getIntrinsics() = sfmData.getIntrinsics(); + image::EImageColorSpace mergeColorspace = image::EImageColorSpace::LINEAR; + + switch (calibrationMethod) + { + case ECalibrationMethod::LINEAR: + case ECalibrationMethod::GROSSBERG: + case ECalibrationMethod::LAGUERRE: + mergeColorspace = image::EImageColorSpace::LINEAR; + break; + case ECalibrationMethod::DEBEVEC: + case ECalibrationMethod::ROBERTSON: + mergeColorspace = image::EImageColorSpace::SRGB; + break; + } for(int g = 0; g < groupedFilenames.size(); ++g) { std::vector> images(groupSize); @@ -554,7 +595,7 @@ int main(int argc, char * argv[]) for (int i = 0; i < groupSize; i++) { ALICEVISION_LOG_INFO("Load " << groupedFilenames[g][i]); - image::readImage(groupedFilenames[g][i], images[i], (calibrationMethod == ECalibrationMethod::LINEAR) ? image::EImageColorSpace::LINEAR : image::EImageColorSpace::SRGB); + image::readImage(groupedFilenames[g][i], images[i], mergeColorspace); } // Merge HDR images From 4af5ffcc47d63d754c780d662ecdddad279a84c0 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 9 Dec 2019 11:08:08 +0100 Subject: [PATCH 141/174] [hdr] Laguerre: supports multiple images with the same exposure --- .../hdr/BundleAdjustmentCalibration.cpp | 101 +++++++++++++++--- src/aliceVision/hdr/CMakeLists.txt | 2 +- 2 files changed, 86 insertions(+), 17 deletions(-) diff --git a/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp b/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp index db76e3563d..71277f54c2 100644 --- a/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp +++ b/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp @@ -202,16 +202,21 @@ void BundleAdjustmentCalibration::process( bool refineExposures, rgbCurve &response) { - ALICEVISION_LOG_TRACE("Extract color samples"); + ALICEVISION_LOG_DEBUG("Extract color samples"); std::vector> samples; extractSamples(samples, imagePathsGroups, cameraExposures, nbPoints, fisheye); - ALICEVISION_LOG_TRACE("Create exposure list"); + ALICEVISION_LOG_DEBUG("Create exposure list"); std::map, double> exposures; - for(const auto& camExp: cameraExposures) + for(int i = 0; i < cameraExposures.size(); ++i) { - for (const auto& exp : camExp) + const std::vector& camExp = cameraExposures[i]; + for (int j = 0; j < camExp.size(); ++j) { + const auto& exp = camExp[j]; + + ALICEVISION_LOG_TRACE(" * " << imagePathsGroups[i][j] << ": " << exp); + // TODO: camId exposures[std::make_pair(0, exp)] = exp; } @@ -219,32 +224,97 @@ void BundleAdjustmentCalibration::process( std::array laguerreParam = { 0.0, 0.0, 0.0 }; std::array relativeWB = { 0.0, 0.0, 0.0 }; - ALICEVISION_LOG_TRACE("Create BA problem"); + ALICEVISION_LOG_DEBUG("Create BA problem"); ceres::Problem problem; - // TODO: make the LOSS function and the parameter an option + // Should we expose the LOSS function parameter? ceres::LossFunction* lossFunction = new ceres::HuberLoss(Square(0.12)); // 0.12 ~= 30/255 + // In the Bundle Adjustment each image is optimized relativelty to the next one. The images are ordered by exposures. + // But the exposures need to be different between 2 consecutive images to get constraints. + // So if multiple images have been taken with the same parameters, we search for the next closest exposure (larger or smaller). + // For example a dataset of 6 images with 4 different exposures: + // 0 1 2 3 4 5 + // 1/800 1/800 -- 1/400 -- 1/200 1/200 -- 1/100 + // \_________________/ \________________/ + // Without duplicates the relative indexes would be: [1, 2, 3, 4, 5, 4] + // In this example with 2 duplicates, the relative indexes are: [2, 2, 3, 5, 5, 3] + std::vector> closestRelativeExpIndex; + { + closestRelativeExpIndex.resize(cameraExposures.size()); + for(int i = 0; i < cameraExposures.size(); ++i) + { + const std::vector& camExp = cameraExposures[i]; + std::vector& camRelativeExpIndex = closestRelativeExpIndex[i]; + camRelativeExpIndex.resize(camExp.size(), -1); + for(int j = 0; j < camExp.size(); ++j) + { + // Search in the following indexes + for (int rj = j + 1; rj < camExp.size(); ++rj) + { + if (camExp[rj] != camExp[j]) + { + camRelativeExpIndex[j] = rj; + break; + } + } + if(camRelativeExpIndex[j] != -1) + continue; + // Search in backward direction + for (int rj = j - 1; rj >= 0; --rj) + { + if (camExp[rj] != camExp[j]) + { + camRelativeExpIndex[j] = rj; + break; + } + } + } + } + /* + // Log relative indexes + for (int i = 0; i < closestRelativeExpIndex.size(); ++i) + { + std::vector& camRelativeExpIndex = closestRelativeExpIndex[i]; + for (int j = 0; j < camRelativeExpIndex.size(); ++j) + { + ALICEVISION_LOG_TRACE(" * closestRelativeExpIndex[" << i << "][" << j << "] = " << closestRelativeExpIndex[i][j]); + } + }*/ + } + for (int g = 0; g < samples.size(); ++g) { std::vector& hdrSamples = samples[g]; + ALICEVISION_LOG_TRACE("Group: " << g << ", hdr brakets: " << hdrSamples.size() << ", nb color samples: " << hdrSamples[0].colors.size()); + for (int i = 0; i < hdrSamples[0].colors.size(); ++i) { - for (int h = 0; h < hdrSamples.size() - 1; ++h) + for (int h = 0; h < hdrSamples.size(); ++h) { + int hNext = closestRelativeExpIndex[g][h]; + if(h == hNext) + throw std::runtime_error("Error in exposure chain. Relative exposure refer to itself (h: " + std::to_string(h) + ")."); + if (hNext < h) + { + // The residual cost is bidirectional. So if 2 elements refer to each other, it will be a duplicates, so we have to skip one of them. + int hNextNext = closestRelativeExpIndex[g][hNext]; + if(hNextNext == h) + continue; + } ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction( - new HdrResidual(hdrSamples[h].colors[i], hdrSamples[h+1].colors[i])); + new HdrResidual(hdrSamples[h].colors[i], hdrSamples[hNext].colors[i])); double& expA = exposures[std::make_pair(0, cameraExposures[g][h])]; - double& expB = exposures[std::make_pair(0, cameraExposures[g][h+1])]; + double& expB = exposures[std::make_pair(0, cameraExposures[g][hNext])]; problem.AddResidualBlock(cost_function, lossFunction, laguerreParam.data(), &relativeWB[0], &relativeWB[2], &expA, &expB); } } } - ALICEVISION_LOG_DEBUG("BA Solve"); + ALICEVISION_LOG_INFO("BA Solve"); ceres::Solver::Options solverOptions; solverOptions.linear_solver_type = ceres::SPARSE_NORMAL_CHOLESKY; @@ -253,15 +323,14 @@ void BundleAdjustmentCalibration::process( ceres::Solver::Summary summary; ceres::Solve(solverOptions, &problem, &summary); - ALICEVISION_LOG_DEBUG(summary.BriefReport()); - - ALICEVISION_LOG_TRACE("laguerreParam: " << laguerreParam); - ALICEVISION_LOG_TRACE("relativeWB: " << relativeWB); + ALICEVISION_LOG_TRACE(summary.BriefReport()); - ALICEVISION_LOG_TRACE("Exposures:"); + ALICEVISION_LOG_INFO("Laguerre params: " << laguerreParam); + ALICEVISION_LOG_INFO("Relative WB: " << relativeWB); + ALICEVISION_LOG_INFO("Exposures:"); for (const auto& expIt: exposures) { - ALICEVISION_LOG_TRACE(" * [" << expIt.first.first << ", " << expIt.first.second << "]: " << expIt.second); + ALICEVISION_LOG_INFO(" * [" << expIt.first.first << ", " << expIt.first.second << "]: " << expIt.second); } for(unsigned int channel = 0; channel < 3; ++channel) diff --git a/src/aliceVision/hdr/CMakeLists.txt b/src/aliceVision/hdr/CMakeLists.txt index 25b74e1819..f4320bb6f1 100644 --- a/src/aliceVision/hdr/CMakeLists.txt +++ b/src/aliceVision/hdr/CMakeLists.txt @@ -12,8 +12,8 @@ set(hdr_files_headers # Sources set(hdr_files_sources - rgbCurve.cpp BundleAdjustmentCalibration.cpp + rgbCurve.cpp RobertsonCalibrate.cpp hdrMerge.cpp DebevecCalibrate.cpp From a8d5d51aa809020ce7af5a87135440467f7f2065 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 9 Dec 2019 11:45:15 +0100 Subject: [PATCH 142/174] [hdr] move sampling in a dedicated file --- .../hdr/BundleAdjustmentCalibration.cpp | 81 +--------------- src/aliceVision/hdr/CMakeLists.txt | 2 + src/aliceVision/hdr/sampling.cpp | 93 +++++++++++++++++++ src/aliceVision/hdr/sampling.hpp | 33 +++++++ 4 files changed, 129 insertions(+), 80 deletions(-) create mode 100644 src/aliceVision/hdr/sampling.cpp create mode 100644 src/aliceVision/hdr/sampling.hpp diff --git a/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp b/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp index 71277f54c2..66b94fd9e6 100644 --- a/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp +++ b/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp @@ -5,6 +5,7 @@ // You can obtain one at https://mozilla.org/MPL/2.0/. #include "BundleAdjustmentCalibration.hpp" +#include "sampling.hpp" #include #include @@ -28,86 +29,6 @@ BundleAdjustmentCalibration::BundleAdjustmentCalibration() { } -struct ImageSamples -{ - std::vector> colors; - int camId = 0; - double exposure = 0.0; -}; - -void extractSamples( - std::vector>& out_samples, - const std::vector>& imagePathsGroups, - const std::vector< std::vector >& cameraExposures, - const int nbPoints, - const bool fisheye - ) -{ - const int nbGroups = imagePathsGroups.size(); - out_samples.resize(nbGroups); - - int averallNbImages = 0; - for (const auto& imgPaths : imagePathsGroups) - { - averallNbImages += imgPaths.size(); - } - const int samplesPerImage = nbPoints / averallNbImages; - - ALICEVISION_LOG_TRACE("samplesPerImage: " << samplesPerImage); - - #pragma omp parallel for num_threads(3) - for (int g = 0; g& out_hdrSamples = out_samples[g]; - - const std::vector &imagePaths = imagePathsGroups[g]; - out_hdrSamples.resize(imagePaths.size()); - - const std::vector& exposures = cameraExposures[g]; - - for (unsigned int i = 0; i < imagePaths.size(); ++i) - { - out_hdrSamples[i].exposure = exposures[i]; - out_hdrSamples[i].colors.reserve(samplesPerImage); - std::vector>& colors = out_hdrSamples[i].colors; - - Image img; - readImage(imagePaths[i], img, EImageColorSpace::LINEAR); - - const std::size_t width = img.Width(); - const std::size_t height = img.Height(); - - const std::size_t minSize = std::min(width, height) * 0.97; - const Vec2i center(width / 2, height / 2); - - const int xMin = std::ceil(center(0) - minSize / 2); - const int yMin = std::ceil(center(1) - minSize / 2); - const int xMax = std::floor(center(0) + minSize / 2); - const int yMax = std::floor(center(1) + minSize / 2); - const std::size_t maxDist2 = pow(minSize * 0.5, 2); - - const int step = std::ceil(minSize / sqrt(samplesPerImage)); - - // extract samples - for (int y = yMin; y <= yMax - step; y += step) - { - for (int x = xMin; x <= xMax - step; x += step) - { - if (fisheye) - { - std::size_t dist2 = pow(center(0) - x, 2) + pow(center(1) - y, 2); - if (dist2 > maxDist2) - continue; - } - RGBfColor& c = img(y, x); - colors.push_back(Rgb(c(0), c(1), c(2))); - } - } - } - } -} - - template T laguerreFunction(const T& a, const T& x) { diff --git a/src/aliceVision/hdr/CMakeLists.txt b/src/aliceVision/hdr/CMakeLists.txt index f4320bb6f1..95b8bfee3b 100644 --- a/src/aliceVision/hdr/CMakeLists.txt +++ b/src/aliceVision/hdr/CMakeLists.txt @@ -2,6 +2,7 @@ # Headers set(hdr_files_headers BundleAdjustmentCalibration.hpp + sampling.hpp rgbCurve.hpp RobertsonCalibrate.hpp hdrMerge.hpp @@ -13,6 +14,7 @@ set(hdr_files_headers # Sources set(hdr_files_sources BundleAdjustmentCalibration.cpp + sampling.cpp rgbCurve.cpp RobertsonCalibrate.cpp hdrMerge.cpp diff --git a/src/aliceVision/hdr/sampling.cpp b/src/aliceVision/hdr/sampling.cpp new file mode 100644 index 0000000000..108d3ea39c --- /dev/null +++ b/src/aliceVision/hdr/sampling.cpp @@ -0,0 +1,93 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2019 AliceVision contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#include "sampling.hpp" + +#include +#include + + +namespace aliceVision { +namespace hdr { + +using namespace aliceVision::image; + +void extractSamples( + std::vector>& out_samples, + const std::vector>& imagePathsGroups, + const std::vector< std::vector >& cameraExposures, + const int nbPoints, + const bool fisheye + ) +{ + const int nbGroups = imagePathsGroups.size(); + out_samples.resize(nbGroups); + + int averallNbImages = 0; + for (const auto& imgPaths : imagePathsGroups) + { + averallNbImages += imgPaths.size(); + } + const int samplesPerImage = nbPoints / averallNbImages; + + ALICEVISION_LOG_TRACE("samplesPerImage: " << samplesPerImage); + + #pragma omp parallel for num_threads(3) + for (int g = 0; g& out_hdrSamples = out_samples[g]; + + const std::vector &imagePaths = imagePathsGroups[g]; + out_hdrSamples.resize(imagePaths.size()); + + const std::vector& exposures = cameraExposures[g]; + + for (unsigned int i = 0; i < imagePaths.size(); ++i) + { + out_hdrSamples[i].exposure = exposures[i]; + out_hdrSamples[i].colors.reserve(samplesPerImage); + std::vector>& colors = out_hdrSamples[i].colors; + + Image img; + readImage(imagePaths[i], img, EImageColorSpace::LINEAR); + + const std::size_t width = img.Width(); + const std::size_t height = img.Height(); + + const std::size_t minSize = std::min(width, height) * 0.97; + const Vec2i center(width / 2, height / 2); + + const int xMin = std::ceil(center(0) - minSize / 2); + const int yMin = std::ceil(center(1) - minSize / 2); + const int xMax = std::floor(center(0) + minSize / 2); + const int yMax = std::floor(center(1) + minSize / 2); + const std::size_t maxDist2 = pow(minSize * 0.5, 2); + + const int step = std::ceil(minSize / sqrt(samplesPerImage)); + + // extract samples + for (int y = yMin; y <= yMax - step; y += step) + { + for (int x = xMin; x <= xMax - step; x += step) + { + if (fisheye) + { + std::size_t dist2 = pow(center(0) - x, 2) + pow(center(1) - y, 2); + if (dist2 > maxDist2) + continue; + } + RGBfColor& c = img(y, x); + colors.push_back(Rgb(c(0), c(1), c(2))); + } + } + } + } +} + + + +} // namespace hdr +} // namespace aliceVision diff --git a/src/aliceVision/hdr/sampling.hpp b/src/aliceVision/hdr/sampling.hpp new file mode 100644 index 0000000000..a6441e43c8 --- /dev/null +++ b/src/aliceVision/hdr/sampling.hpp @@ -0,0 +1,33 @@ +// This file is part of the AliceVision project. +// Copyright (c) 2019 AliceVision contributors. +// This Source Code Form is subject to the terms of the Mozilla Public License, +// v. 2.0. If a copy of the MPL was not distributed with this file, +// You can obtain one at https://mozilla.org/MPL/2.0/. + +#pragma once + +#include +#include + +namespace aliceVision { +namespace hdr { + + +struct ImageSamples +{ + std::vector> colors; + int camId = 0; + double exposure = 0.0; +}; + + +void extractSamples( + std::vector>& out_samples, + const std::vector>& imagePathsGroups, + const std::vector< std::vector >& cameraExposures, + const int nbPoints, + const bool fisheye); + + +} // namespace hdr +} // namespace aliceVision From a2330ebb8e5bf1a7133446ed2e11c9e4383cbf3a Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 9 Dec 2019 11:54:02 +0100 Subject: [PATCH 143/174] [hdr] rename BundleAdjustmentCalibration into LaguerreBACalibration --- src/aliceVision/hdr/CMakeLists.txt | 4 ++-- ...stmentCalibration.cpp => LaguerreBACalibration.cpp} | 6 +++--- ...stmentCalibration.hpp => LaguerreBACalibration.hpp} | 10 +++------- src/software/convert/main_convertLDRToHDR.cpp | 4 ++-- 4 files changed, 10 insertions(+), 14 deletions(-) rename src/aliceVision/hdr/{BundleAdjustmentCalibration.cpp => LaguerreBACalibration.cpp} (98%) rename src/aliceVision/hdr/{BundleAdjustmentCalibration.hpp => LaguerreBACalibration.hpp} (85%) diff --git a/src/aliceVision/hdr/CMakeLists.txt b/src/aliceVision/hdr/CMakeLists.txt index 95b8bfee3b..b657154ee1 100644 --- a/src/aliceVision/hdr/CMakeLists.txt +++ b/src/aliceVision/hdr/CMakeLists.txt @@ -1,7 +1,7 @@ # Headers set(hdr_files_headers - BundleAdjustmentCalibration.hpp + LaguerreBACalibration.hpp sampling.hpp rgbCurve.hpp RobertsonCalibrate.hpp @@ -13,7 +13,7 @@ set(hdr_files_headers # Sources set(hdr_files_sources - BundleAdjustmentCalibration.cpp + LaguerreBACalibration.cpp sampling.cpp rgbCurve.cpp RobertsonCalibrate.cpp diff --git a/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp b/src/aliceVision/hdr/LaguerreBACalibration.cpp similarity index 98% rename from src/aliceVision/hdr/BundleAdjustmentCalibration.cpp rename to src/aliceVision/hdr/LaguerreBACalibration.cpp index 66b94fd9e6..1fb690879d 100644 --- a/src/aliceVision/hdr/BundleAdjustmentCalibration.cpp +++ b/src/aliceVision/hdr/LaguerreBACalibration.cpp @@ -4,7 +4,7 @@ // v. 2.0. If a copy of the MPL was not distributed with this file, // You can obtain one at https://mozilla.org/MPL/2.0/. -#include "BundleAdjustmentCalibration.hpp" +#include "LaguerreBACalibration.hpp" #include "sampling.hpp" #include @@ -25,7 +25,7 @@ namespace hdr { using namespace aliceVision::image; -BundleAdjustmentCalibration::BundleAdjustmentCalibration() +LaguerreBACalibration::LaguerreBACalibration() { } @@ -114,7 +114,7 @@ struct HdrResidual const Rgb& _colorB; }; -void BundleAdjustmentCalibration::process( +void LaguerreBACalibration::process( const std::vector>& imagePathsGroups, const std::size_t channelQuantization, const std::vector>& cameraExposures, diff --git a/src/aliceVision/hdr/BundleAdjustmentCalibration.hpp b/src/aliceVision/hdr/LaguerreBACalibration.hpp similarity index 85% rename from src/aliceVision/hdr/BundleAdjustmentCalibration.hpp rename to src/aliceVision/hdr/LaguerreBACalibration.hpp index eee0a6605a..f84d52240e 100644 --- a/src/aliceVision/hdr/BundleAdjustmentCalibration.hpp +++ b/src/aliceVision/hdr/LaguerreBACalibration.hpp @@ -23,13 +23,13 @@ namespace hdr { * http://www.cs.columbia.edu/CAVE/publications/pdfs/Mitsunaga_CVPR99.pdf * * Some precisions are also provided in: - * "Radiometric alignment and vignetting calibration", Pablo d’Angelo, ICVS 2007 + * "Radiometric alignment and vignetting calibration", Pablo d'Angelo, ICVS 2007 * http://hugin.sourceforge.net/tech/icvs2007_final.pdf */ -class BundleAdjustmentCalibration +class LaguerreBACalibration { public: - explicit BundleAdjustmentCalibration(); + explicit LaguerreBACalibration(); /** * @brief @@ -48,10 +48,6 @@ class BundleAdjustmentCalibration bool fisheye, bool refineExposures, rgbCurve &response); - -private: - /// Dimension of the response ie number of basis vectors to calculate the response function - unsigned int _dimension; }; } // namespace hdr diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 36d1cd885f..ea73288a8d 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -20,7 +20,7 @@ #include #include #include -#include +#include /*Command line parameters*/ #include @@ -536,7 +536,7 @@ int main(int argc, char * argv[]) ALICEVISION_LOG_INFO("Laguerre calibration"); if (calibrationNbPoints <= 0) calibrationNbPoints = 1000000; - hdr::BundleAdjustmentCalibration calibration; + hdr::LaguerreBACalibration calibration; bool refineExposures = false; calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, refineExposures, response); } From 00f8ae076ceafb2fef99f95947d656d21d722ca1 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Mon, 9 Dec 2019 15:02:56 +0100 Subject: [PATCH 144/174] Bug on image border for sampler --- src/aliceVision/image/Sampler.hpp | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/aliceVision/image/Sampler.hpp b/src/aliceVision/image/Sampler.hpp index 4bef6feae8..7e83a8423b 100644 --- a/src/aliceVision/image/Sampler.hpp +++ b/src/aliceVision/image/Sampler.hpp @@ -477,7 +477,15 @@ struct Sampler2d // If value too small, it should be so instable, so return the sampled value if( total_weight <= 0.2 ) { - return T(); + int row = floor(y); + int col = floor(x); + + if (row < 0) row = 0; + if (col < 0) col = 0; + if (row >= im_height) row = im_height - 1; + if (col >= im_width) col = im_width - 1; + + return src(row, col); } if( total_weight != 1.0 ) From a601611d12547653434dbcefb3e4efa6aa7c21ef Mon Sep 17 00:00:00 2001 From: fabienservant Date: Mon, 9 Dec 2019 15:03:09 +0100 Subject: [PATCH 145/174] WIP memory reduction --- .../pipeline/main_panoramaCompositing.cpp | 290 ++++++++++++------ .../pipeline/main_panoramaWarping.cpp | 4 +- 2 files changed, 190 insertions(+), 104 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index abe1002562..910f38dafe 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -42,6 +42,60 @@ typedef struct { } ConfigView; +template +bool makeImagePyramidCompatible(image::Image & output, size_t & out_offset_x, size_t & out_offset_y, const image::Image & input, size_t offset_x, size_t offset_y, size_t num_levels) { + + if (num_levels == 0) { + return false; + } + + double max_scale = 1.0 / pow(2.0, num_levels - 1); + + double low_offset_x = double(offset_x) * max_scale; + double low_offset_y = double(offset_y) * max_scale; + + /*Make sure offset is integer even at the lowest level*/ + double corrected_low_offset_x = floor(low_offset_x); + double corrected_low_offset_y = floor(low_offset_y); + + /*Add some borders on the top and left to make sure mask can be smoothed*/ + corrected_low_offset_x = std::max(0.0, corrected_low_offset_x - 3.0); + corrected_low_offset_y = std::max(0.0, corrected_low_offset_y - 3.0); + + /*Compute offset at largest level*/ + out_offset_x = size_t(corrected_low_offset_x / max_scale); + out_offset_y = size_t(corrected_low_offset_y / max_scale); + + /*Compute difference*/ + double doffset_x = double(offset_x) - double(out_offset_x); + double doffset_y = double(offset_y) - double(out_offset_y); + + /* update size with border update */ + double large_width = double(input.Width()) + doffset_x; + double large_height = double(input.Height()) + doffset_y; + + /* compute size at largest scale */ + double low_width = large_width * max_scale; + double low_height = large_height * max_scale; + + /*Make sure width is integer event at the lowest level*/ + double corrected_low_width = ceil(low_width); + double corrected_low_height = ceil(low_height); + + /*Add some borders on the right and bottom to make sure mask can be smoothed*/ + corrected_low_width = corrected_low_width + 3; + corrected_low_height = corrected_low_height + 3; + + /*Compute size at largest level*/ + size_t width = size_t(corrected_low_width / max_scale); + size_t height = size_t(corrected_low_height / max_scale); + + output = image::Image(width, height, true, T(0.0f)); + output.block(doffset_y, doffset_x, input.Height(), input.Width()) = input; + + return true; +} + class Compositer { public: @@ -79,6 +133,10 @@ class Compositer { return true; } + virtual bool terminate() { + return true; + } + const aliceVision::image::Image & getPanorama() const { return _panorama; } @@ -173,18 +231,23 @@ bool convolveHorizontal(image::Image & output, const image::I float w = kernel(k); int col = j + k - radius; - if (col < 0 || col >= input.Width()) { - continue; + /* mirror 5432 | 123456 | 5432 */ + if (col < 0) { + col = - col; + } + + if (col >= input.Width()) { + col = input.Width() - 1 - (col + 1 - input.Width()); } sum += w * input(i, col); sumw += w; } - output(i, j).r() = sum.r(); - output(i, j).g() = sum.g(); - output(i, j).b() = sum.b(); - output(i, j).a() = sum.a(); + output(i, j).r() = sum.r() / sumw; + output(i, j).g() = sum.g() / sumw; + output(i, j).b() = sum.b() / sumw; + output(i, j).a() = sum.a() / sumw; } } @@ -215,18 +278,22 @@ bool convolveVertical(image::Image & output, const image::Ima float w = kernel(k); int row = i + k - radius; - if (row < 0 || row >= input.Height()) { - continue; + if (row < 0) { + row = -row; + } + + if (row >= input.Height()) { + row = input.Height() - 1 - (row + 1 - input.Height()); } sum += w * input(row, j); sumw += w; } - output(i, j).r() = sum.r(); - output(i, j).g() = sum.g(); - output(i, j).b() = sum.b(); - output(i, j).a() = sum.a(); + output(i, j).r() = sum.r() / sumw; + output(i, j).g() = sum.g() / sumw; + output(i, j).b() = sum.b() / sumw; + output(i, j).a() = sum.a() / sumw; } } @@ -433,10 +500,11 @@ class LaplacianPyramid { for (int l = 0; l < _levels.size(); l++) { for (int i = 0; i < _levels[l].Height(); i++) { for (int j = 0; j < _levels[l].Width(); j++) { - if (_weights[l](i, j) < 1e-12) { + if (_weights[l](i, j) < 1e-6) { _levels[l](i, j) = image::RGBAfColor(0.0,0.0,0.0,0.0); continue; } + _levels[l](i, j).r() = _levels[l](i, j).r() / _weights[l](i, j); _levels[l](i, j).g() = _levels[l](i, j).g() / _weights[l](i, j); _levels[l](i, j).b() = _levels[l](i, j).b() / _weights[l](i, j); @@ -518,7 +586,7 @@ class LaplacianPyramid { return true; } - bool merge(const LaplacianPyramid & other) { + bool merge(const LaplacianPyramid & other, size_t offset_x, size_t offset_y) { for (int l = 0; l < _levels.size(); l++) { @@ -528,16 +596,27 @@ class LaplacianPyramid { image::Image & weight = _weights[l]; const image::Image & oweight = other._weights[l]; - for (int i = 0; i < img.Height(); i++) { - for (int j = 0; j < img.Width(); j++) { + for (int i = 0; i < oimg.Height(); i++) { + + int di = i + offset_y; + if (di >= img.Height()) continue; + + for (int j = 0; j < oimg.Width(); j++) { + int dj = j + offset_x; + if (dj >= weight.Width()) { + dj = dj - weight.Width(); + } - img(i, j).r() += oimg(i, j).r() * oweight(i, j); - img(i, j).g() += oimg(i, j).g() * oweight(i, j); - img(i, j).b() += oimg(i, j).b() * oweight(i, j); - weight(i, j) += oweight(i, j); + img(di, dj).r() += oimg(i, j).r() * oweight(i, j); + img(di, dj).g() += oimg(i, j).g() * oweight(i, j); + img(di, dj).b() += oimg(i, j).b() * oweight(i, j); + weight(di, dj) += oweight(i, j); } } + + offset_x /= 2; + offset_y /= 2; } @@ -549,13 +628,56 @@ class LaplacianPyramid { std::vector> _weights; }; -int count = 0; +class DistanceSeams { +public: + DistanceSeams(size_t outputWidth, size_t outputHeight) : + _weights(outputWidth, outputHeight, true, 0.0f) + { + } + + virtual bool append(aliceVision::image::Image & output, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { + + if (inputMask.size() != inputWeights.size()) { + return false; + } + + for (int i = 0; i < inputMask.Height(); i++) { + + int di = i + offset_y; + + for (int j = 0; j < inputMask.Width(); j++) { + + output(i, j) = 0.0f; + + if (!inputMask(i, j)) { + continue; + } + + int dj = j + offset_x; + if (dj >= _weights.Width()) { + dj = dj - _weights.Width(); + } + + if (inputWeights(i, j) > _weights(di, dj)) { + output(i, j) = 1.0f; + _weights(di, dj) = inputWeights(i, j); + } + } + } + + return true; + } -class LaplacianCompositer : public AlphaCompositer { + +private: + image::Image _weights; +}; + +class LaplacianCompositer : public Compositer { public: LaplacianCompositer(size_t outputWidth, size_t outputHeight) : - AlphaCompositer(outputWidth, outputHeight), + Compositer(outputWidth, outputHeight), _pyramid_panorama(outputWidth, outputHeight, 6) { } @@ -667,95 +789,50 @@ class LaplacianCompositer : public AlphaCompositer { virtual bool append(const aliceVision::image::Image & color, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { - aliceVision::image::Image color_big(_panorama.Width(), _panorama.Height(), true, image::RGBfColor(0.0f, 0.0f, 0.0f)); - aliceVision::image::Image inputMask_big(_panorama.Width(), _panorama.Height(), true, 0); - aliceVision::image::Image inputWeights_big(_panorama.Width(), _panorama.Height(), true, 0); aliceVision::image::Image color_big_feathered; + size_t new_offset_x, new_offset_y; + aliceVision::image::Image color_pot; + aliceVision::image::Image mask_pot; + aliceVision::image::Image weights_pot; + makeImagePyramidCompatible(color_pot, new_offset_x, new_offset_y, color, offset_x, offset_y, 6); + makeImagePyramidCompatible(mask_pot, new_offset_x, new_offset_y, inputMask, offset_x, offset_y, 6); + makeImagePyramidCompatible(weights_pot, new_offset_x, new_offset_y, inputWeights, offset_x, offset_y, 6); - for (int i = 0; i < color.Height(); i++) { - - int di = i + offset_y; - - for (int j = 0; j < color.Width(); j++) { - - int dj = j + offset_x; - - if (dj >= color_big.Width()) { - dj = dj - color_big.Width(); - } - - color_big(di, dj).r() = color(i, j).r(); - color_big(di, dj).g() = color(i, j).g(); - color_big(di, dj).b() = color(i, j).b(); - inputMask_big(di, dj) = inputMask(i, j); - inputWeights_big(di, dj) = inputWeights(i, j); - } - } aliceVision::image::Image feathered; - feathering(feathered, color_big, inputMask_big); - + feathering(feathered, color_pot, mask_pot); - aliceVision::image::Image view(_panorama.Width(), _panorama.Height(), true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); + aliceVision::image::Image view(feathered.Width(), feathered.Height(), true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); const image::Image & img = feathered; - for (int i = 0; i < view.Height(); i++) { for (int j = 0; j < view.Width(); j++) { - view(i, j).r() = std::log(std::max(0.001f, feathered(i, j).r())); - view(i, j).g() = std::log(std::max(0.001f, feathered(i, j).g())); - view(i, j).b() = std::log(std::max(0.001f, feathered(i, j).b())); + view(i, j).r() = std::log(std::max(1e-8f, feathered(i, j).r())); + view(i, j).g() = std::log(std::max(1e-8f, feathered(i, j).g())); + view(i, j).b() = std::log(std::max(1e-8f, feathered(i, j).b())); - if (inputMask_big(i ,j)) { + if (mask_pot(i ,j)) { - if (inputWeights_big(i,j) > _weightmap(i, j)) { - view(i, j).a() = 1.0f; - } - else { - view(i, j).a() = 0.0f; - } + view(i, j).a() = weights_pot(i, j); + } } } - LaplacianPyramid pyramid(_panorama.Width(), _panorama.Height(), 6); + LaplacianPyramid pyramid(feathered.Width(), feathered.Height(), 6); pyramid.apply(view); - - - _pyramid_panorama.merge(pyramid); + + _pyramid_panorama.merge(pyramid, new_offset_x, new_offset_y); - if (count == 72) { - image::Image res; - _pyramid_panorama.rebuild(res); - - { - char filename[FILENAME_MAX]; - const image::Image & img = res; - image::Image tmp(img.Width(), img.Height(), true, image::RGBfColor(0.0)); - sprintf(filename, "/home/mmoc/out.exr"); - for (int i = 0; i < img.Height(); i++) { - for (int j = 0; j < img.Width(); j++) { - //if (img(i, j).a() > 1e-3) { - tmp(i, j).r() = std::exp(img(i, j).r()); - tmp(i, j).g() = std::exp(img(i, j).g()); - tmp(i, j).b() = std::exp(img(i, j).b()); - - //} - } - } - - image::writeImage(filename, tmp, image::EImageColorSpace::SRGB); - } - } - count++; + @@ -765,26 +842,29 @@ class LaplacianCompositer : public AlphaCompositer { //_panorama = img.block(0, 0, _panorama.Height(), _panorama.Width()); //_mask = mask.block(0, 0, _panorama.Height(), _panorama.Width());*/ - for (int i = 0; i < inputWeights.Height(); i++) { - int di = i + offset_y; + return true; + } - for (int j = 0; j < inputWeights.Width(); j++) { + virtual bool terminate() { - int dj = j + offset_x; + image::Image res; + _pyramid_panorama.rebuild(res); - if (dj >= _panorama.Width()) { - dj = dj - _panorama.Width(); - } - - if (!inputMask(i, j)) { - continue; - } - - _weightmap(di, dj) = std::max(_weightmap(di, dj), inputWeights(i, j)); + + char filename[FILENAME_MAX]; + const image::Image & img = res; + image::Image tmp(img.Width(), img.Height(), true, image::RGBfColor(0.0)); + sprintf(filename, "/home/mmoc/out.exr"); + for (int i = 0; i < img.Height(); i++) { + for (int j = 0; j < img.Width(); j++) { + tmp(i, j).r() = std::exp(img(i, j).r()); + tmp(i, j).g() = std::exp(img(i, j).g()); + tmp(i, j).b() = std::exp(img(i, j).b()); } } + image::writeImage(filename, tmp, image::EImageColorSpace::SRGB); return true; } @@ -895,7 +975,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - if (1/*pos == 24 || pos == 25*/) + if (pos == 24 || pos == 25) { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); @@ -914,6 +994,8 @@ int main(int argc, char **argv) { else { compositer = std::unique_ptr(new AlphaCompositer(panoramaSize.first, panoramaSize.second)); } + + DistanceSeams distanceseams(panoramaSize.first, panoramaSize.second); for (const ConfigView & cv : configViews) { @@ -941,10 +1023,14 @@ int main(int argc, char **argv) { image::Image weights; image::readImage(weightsPath, weights, image::EImageColorSpace::NO_CONVERSION); + image::Image seams(weights.Width(), weights.Height()); + distanceseams.append(seams, mask, weights, cv.offset_x, cv.offset_y); - compositer->append(source, mask, weights, cv.offset_x, cv.offset_y); + compositer->append(source, mask, seams, cv.offset_x, cv.offset_y); } + compositer->terminate(); + ALICEVISION_LOG_INFO("Write output panorama to file " << outputPanorama); const aliceVision::image::Image & panorama = compositer->getPanorama(); image::writeImage(outputPanorama, panorama, image::EImageColorSpace::SRGB); diff --git a/src/software/pipeline/main_panoramaWarping.cpp b/src/software/pipeline/main_panoramaWarping.cpp index 5044ac8712..3fb4ee06f0 100644 --- a/src/software/pipeline/main_panoramaWarping.cpp +++ b/src/software/pipeline/main_panoramaWarping.cpp @@ -991,7 +991,7 @@ class GaussianWarper : public Warper { /** * Create buffer */ - _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height()); + _color = aliceVision::image::Image(coordinates.Width(), coordinates.Height(), true, image::RGBfColor(1.0, 0.0, 0.0)); /** @@ -1225,7 +1225,7 @@ int main(int argc, char **argv) { */ size_t pos = 0; for (const std::shared_ptr & viewIt: viewsOrderedByName) { - + /** * Retrieve view */ From b86b7f50286f9c44466799d643fc80ebbdc81c46 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 9 Dec 2019 21:28:58 +0100 Subject: [PATCH 146/174] [hdr] laguerre: update input exposures if refined --- src/aliceVision/hdr/LaguerreBACalibration.cpp | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/aliceVision/hdr/LaguerreBACalibration.cpp b/src/aliceVision/hdr/LaguerreBACalibration.cpp index 1fb690879d..82525f6950 100644 --- a/src/aliceVision/hdr/LaguerreBACalibration.cpp +++ b/src/aliceVision/hdr/LaguerreBACalibration.cpp @@ -264,6 +264,24 @@ void LaguerreBACalibration::process( curve[i] = relativeWB[channel] + laguerreFunction(offset + laguerreParam[channel], i * step); } } + + if (refineExposures) + { + { + // TODO: realign exposures on input values? + } + + + // Update input exposures with optimized exposure values + for (int i = 0; i < cameraExposures.size(); ++i) + { + std::vector& camExp = cameraExposures[i]; + for (int j = 0; j < camExp.size(); ++j) + { + camExp[j] = exposures[std::make_pair(0, camExp[j])]; + } + } + } } From 6bf245a37e86fd492e8e884624ac8d5e4ab79138 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 9 Dec 2019 21:29:16 +0100 Subject: [PATCH 147/174] [hdr] laguerre: lock exposures if not refined --- src/aliceVision/hdr/LaguerreBACalibration.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/aliceVision/hdr/LaguerreBACalibration.cpp b/src/aliceVision/hdr/LaguerreBACalibration.cpp index 82525f6950..e6b440fcaa 100644 --- a/src/aliceVision/hdr/LaguerreBACalibration.cpp +++ b/src/aliceVision/hdr/LaguerreBACalibration.cpp @@ -235,6 +235,14 @@ void LaguerreBACalibration::process( } } + if (!refineExposures) + { + for(auto& exp: exposures) + { + problem.SetParameterBlockConstant(&exp.second); + } + } + ALICEVISION_LOG_INFO("BA Solve"); ceres::Solver::Options solverOptions; From 39744c1a248cb1dede5f39774f8c0d9ded7ef1f0 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 9 Dec 2019 21:33:27 +0100 Subject: [PATCH 148/174] [hdr] add weighting function in Laguerre and add image downscale in calibration --- src/aliceVision/hdr/DebevecCalibrate.cpp | 26 +++++++ src/aliceVision/hdr/DebevecCalibrate.hpp | 1 + src/aliceVision/hdr/LaguerreBACalibration.cpp | 73 ++++++++++++++----- src/aliceVision/hdr/LaguerreBACalibration.hpp | 3 +- src/aliceVision/hdr/sampling.cpp | 28 ++++++- src/aliceVision/hdr/sampling.hpp | 5 +- src/software/convert/main_convertLDRToHDR.cpp | 11 ++- 7 files changed, 122 insertions(+), 25 deletions(-) diff --git a/src/aliceVision/hdr/DebevecCalibrate.cpp b/src/aliceVision/hdr/DebevecCalibrate.cpp index 5371688a90..cca17cb04e 100644 --- a/src/aliceVision/hdr/DebevecCalibrate.cpp +++ b/src/aliceVision/hdr/DebevecCalibrate.cpp @@ -13,6 +13,9 @@ #include #include +#include + + namespace aliceVision { namespace hdr { @@ -22,6 +25,7 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im const std::size_t channelQuantization, const std::vector > ×, const int nbPoints, + const int calibrationDownscale, const bool fisheye, const rgbCurve &weight, const float lambda, @@ -59,6 +63,28 @@ bool DebevecCalibrate::process(const std::vector< std::vector> & im for (int i = 0; i < imagePaths.size(); i++) { image::readImage(imagePaths[i], ldrImagesGroup[i], image::EImageColorSpace::SRGB); + if (calibrationDownscale != 1.0f) + { + image::Image& img = ldrImagesGroup[i]; + unsigned int w = img.Width(); + unsigned int h = img.Height(); + unsigned int nw = (unsigned int)(floor(float(w) / calibrationDownscale)); + unsigned int nh = (unsigned int)(floor(float(h) / calibrationDownscale)); + + image::Image rescaled(nw, nh); + + oiio::ImageSpec imageSpecResized(nw, nh, 3, oiio::TypeDesc::FLOAT); + oiio::ImageSpec imageSpecOrigin(w, h, 3, oiio::TypeDesc::FLOAT); + oiio::ImageBuf bufferOrigin(imageSpecOrigin, img.data()); + oiio::ImageBuf bufferResized(imageSpecResized, rescaled.data()); + oiio::ImageBufAlgo::resample(bufferResized, bufferOrigin); + + const oiio::ImageBuf inBuf(oiio::ImageSpec(w, h, 3, oiio::TypeDesc::FLOAT), img.data()); + oiio::ImageBuf outBuf(oiio::ImageSpec(nw, nh, 3, oiio::TypeDesc::FLOAT), rescaled.data()); + + oiio::ImageBufAlgo::resize(outBuf, inBuf); + img.swap(rescaled); + } } const std::vector & ldrTimes = times[g]; diff --git a/src/aliceVision/hdr/DebevecCalibrate.hpp b/src/aliceVision/hdr/DebevecCalibrate.hpp index f8c59c54f9..062eee8f64 100644 --- a/src/aliceVision/hdr/DebevecCalibrate.hpp +++ b/src/aliceVision/hdr/DebevecCalibrate.hpp @@ -31,6 +31,7 @@ class DebevecCalibrate const std::size_t channelQuantization, const std::vector> ×, const int nbPoints, + const int calibrationDownscale, const bool fisheye, const rgbCurve &weight, const float lambda, diff --git a/src/aliceVision/hdr/LaguerreBACalibration.cpp b/src/aliceVision/hdr/LaguerreBACalibration.cpp index e6b440fcaa..2b7ca35fc7 100644 --- a/src/aliceVision/hdr/LaguerreBACalibration.cpp +++ b/src/aliceVision/hdr/LaguerreBACalibration.cpp @@ -42,9 +42,33 @@ T laguerreFunctionInv(const T& a, const T& x) return laguerreFunction(-a, x); } + +struct GaussianWeight +{ + template + T operator()(const T& x) const + { + static const double mu = 0.5; + static const double sigma = 1.0 / (5.0 * std::sqrt(2.0)); + const T v = x - 0.5; + return exp(-v * v / (2.0 * sigma * sigma)); + } +}; + +struct NoWeight +{ + template + T operator()(const T& x) const + { + return T(1.0); + } +}; + + /** * */ +template struct HdrResidual { HdrResidual(const Rgb& a, const Rgb& b) @@ -55,6 +79,7 @@ struct HdrResidual template bool operator()(const T* const laguerreParam, const T* const relativeWB_R, const T* const relativeWB_B, const T* const expA, const T* const expB, T* residual) const { + static const WeightFunc _weightFunc; static const int red = 0; static const int green = 1; static const int blue = 2; @@ -84,48 +109,61 @@ struct HdrResidual { // GREEN T greenParam = laguerreParam[green]; - T a = laguerreFunction(greenParam, T(_colorA(green))); - T b = laguerreFunction(greenParam, T(_colorB(green))); - residual[green] = abs(laguerreFunctionInv(greenParam, laguerreFunction(greenParam, T(_colorA(green))) * (*expB) / (*expA)) - T(_colorB(green))) - + abs(laguerreFunctionInv(greenParam, laguerreFunction(greenParam, T(_colorB(green))) * (*expA) / (*expB)) - T(_colorA(green))); + T colorA = T(_colorA(green)); + T colorB = T(_colorB(green)); + // T a = laguerreFunction(greenParam, colorA); + // T b = laguerreFunction(greenParam, colorB); + T errorCost = abs(laguerreFunctionInv(greenParam, laguerreFunction(greenParam, colorA) * (*expB) / (*expA)) - colorB) + + abs(laguerreFunctionInv(greenParam, laguerreFunction(greenParam, colorB) * (*expA) / (*expB)) - colorA); + T weight = _weightFunc(colorA) * _weightFunc(colorB); // weight by the validity on the input values + residual[green] = errorCost * weight; } { // RED T redParam = laguerreParam[green] + laguerreParam[red]; - T a = *relativeWB_R + laguerreFunction(redParam, T(_colorA(red))); - T b = *relativeWB_R + laguerreFunction(redParam, T(_colorB(red))); - residual[red] = abs(laguerreFunctionInv(redParam, laguerreFunction(redParam, T(_colorA(red))) * (*expB) / (*expA)) - T(_colorB(red))) - + abs(laguerreFunctionInv(redParam, laguerreFunction(redParam, T(_colorB(red))) * (*expA) / (*expB)) - T(_colorA(red))); + T colorA = T(_colorA(red)); + T colorB = T(_colorB(red)); + // T a = *relativeWB_R * laguerreFunction(redParam, colorA); + // T b = *relativeWB_R * laguerreFunction(redParam, colorB); + T errorCost = abs(laguerreFunctionInv(redParam, laguerreFunction(redParam, colorA) * (*expB) / (*expA)) - colorB) + + abs(laguerreFunctionInv(redParam, laguerreFunction(redParam, colorB) * (*expA) / (*expB)) - colorA); + T weight = _weightFunc(colorA) * _weightFunc(colorB); // weight by the validity on the input values + residual[red] = errorCost * weight; } { // BLUE T blueParam = laguerreParam[green] + laguerreParam[blue]; - T a = *relativeWB_B + laguerreFunction(blueParam, T(_colorA(blue))); - T b = *relativeWB_B + laguerreFunction(blueParam, T(_colorB(blue))); - residual[blue] = abs(laguerreFunctionInv(blueParam, laguerreFunction(blueParam, T(_colorA(blue))) * (*expB) / (*expA)) - T(_colorB(blue))) - + abs(laguerreFunctionInv(blueParam, laguerreFunction(blueParam, T(_colorB(blue))) * (*expA) / (*expB)) - T(_colorA(blue))); + T colorA = T(_colorA(blue)); + T colorB = T(_colorB(blue)); + // T a = *relativeWB_B * laguerreFunction(blueParam, colorA); + // T b = *relativeWB_B * laguerreFunction(blueParam, colorB); + T errorCost = abs(laguerreFunctionInv(blueParam, laguerreFunction(blueParam, colorA) * (*expB) / (*expA)) - colorB) + + abs(laguerreFunctionInv(blueParam, laguerreFunction(blueParam, colorB) * (*expA) / (*expB)) - colorA); + T weight = _weightFunc(colorA) * _weightFunc(colorB); // weight by the validity on the input values + residual[blue] = errorCost * weight; } return true; } private: - const Rgb& _colorA; + const Rgb& _colorA; // TODO: T[3] const Rgb& _colorB; }; void LaguerreBACalibration::process( const std::vector>& imagePathsGroups, const std::size_t channelQuantization, - const std::vector>& cameraExposures, + std::vector>& cameraExposures, int nbPoints, + int imageDownscale, bool fisheye, bool refineExposures, rgbCurve &response) { ALICEVISION_LOG_DEBUG("Extract color samples"); std::vector> samples; - extractSamples(samples, imagePathsGroups, cameraExposures, nbPoints, fisheye); + extractSamples(samples, imagePathsGroups, cameraExposures, nbPoints, imageDownscale, fisheye); ALICEVISION_LOG_DEBUG("Create exposure list"); std::map, double> exposures; @@ -204,6 +242,7 @@ void LaguerreBACalibration::process( }*/ } + // Convert selected samples into residual blocks for (int g = 0; g < samples.size(); ++g) { std::vector& hdrSamples = samples[g]; @@ -224,8 +263,8 @@ void LaguerreBACalibration::process( if(hNextNext == h) continue; } - ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction( - new HdrResidual(hdrSamples[h].colors[i], hdrSamples[hNext].colors[i])); + ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction, 3, 3, 1, 1, 1, 1>( + new HdrResidual(hdrSamples[h].colors[i], hdrSamples[hNext].colors[i])); double& expA = exposures[std::make_pair(0, cameraExposures[g][h])]; double& expB = exposures[std::make_pair(0, cameraExposures[g][hNext])]; diff --git a/src/aliceVision/hdr/LaguerreBACalibration.hpp b/src/aliceVision/hdr/LaguerreBACalibration.hpp index f84d52240e..c4951b53ba 100644 --- a/src/aliceVision/hdr/LaguerreBACalibration.hpp +++ b/src/aliceVision/hdr/LaguerreBACalibration.hpp @@ -43,8 +43,9 @@ class LaguerreBACalibration void process( const std::vector>& imagePathsGroups, const std::size_t channelQuantization, - const std::vector>& cameraExposures, + std::vector>& cameraExposures, int nbPoints, + int imageDownscale, bool fisheye, bool refineExposures, rgbCurve &response); diff --git a/src/aliceVision/hdr/sampling.cpp b/src/aliceVision/hdr/sampling.cpp index 108d3ea39c..2e2a1420e3 100644 --- a/src/aliceVision/hdr/sampling.cpp +++ b/src/aliceVision/hdr/sampling.cpp @@ -9,6 +9,8 @@ #include #include +#include + namespace aliceVision { namespace hdr { @@ -19,8 +21,9 @@ void extractSamples( std::vector>& out_samples, const std::vector>& imagePathsGroups, const std::vector< std::vector >& cameraExposures, - const int nbPoints, - const bool fisheye + int nbPoints, + int calibrationDownscale, + bool fisheye ) { const int nbGroups = imagePathsGroups.size(); @@ -53,6 +56,27 @@ void extractSamples( Image img; readImage(imagePaths[i], img, EImageColorSpace::LINEAR); + if (calibrationDownscale != 1.0f) + { + unsigned int w = img.Width(); + unsigned int h = img.Height(); + unsigned int nw = (unsigned int)(floor(float(w) / calibrationDownscale)); + unsigned int nh = (unsigned int)(floor(float(h) / calibrationDownscale)); + + image::Image rescaled(nw, nh); + + oiio::ImageSpec imageSpecResized(nw, nh, 3, oiio::TypeDesc::FLOAT); + oiio::ImageSpec imageSpecOrigin(w, h, 3, oiio::TypeDesc::FLOAT); + oiio::ImageBuf bufferOrigin(imageSpecOrigin, img.data()); + oiio::ImageBuf bufferResized(imageSpecResized, rescaled.data()); + oiio::ImageBufAlgo::resample(bufferResized, bufferOrigin); + + const oiio::ImageBuf inBuf(oiio::ImageSpec(w, h, 3, oiio::TypeDesc::FLOAT), img.data()); + oiio::ImageBuf outBuf(oiio::ImageSpec(nw, nh, 3, oiio::TypeDesc::FLOAT), rescaled.data()); + + oiio::ImageBufAlgo::resize(outBuf, inBuf); + img.swap(rescaled); + } const std::size_t width = img.Width(); const std::size_t height = img.Height(); diff --git a/src/aliceVision/hdr/sampling.hpp b/src/aliceVision/hdr/sampling.hpp index a6441e43c8..c7795f5932 100644 --- a/src/aliceVision/hdr/sampling.hpp +++ b/src/aliceVision/hdr/sampling.hpp @@ -25,8 +25,9 @@ void extractSamples( std::vector>& out_samples, const std::vector>& imagePathsGroups, const std::vector< std::vector >& cameraExposures, - const int nbPoints, - const bool fisheye); + int nbPoints, + int calibrationDownscale, + bool fisheye); } // namespace hdr diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index ea73288a8d..560a769567 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -112,6 +112,8 @@ int main(int argc, char * argv[]) bool fisheye = false; int channelQuantizationPower = 10; int calibrationNbPoints = 0; + int calibrationDownscale = 4; + bool refineExposures = false; std::string calibrationWeightFunction = "default"; hdr::EFunctionType fusionWeightFunction = hdr::EFunctionType::GAUSSIAN; @@ -146,6 +148,10 @@ int main(int argc, char * argv[]) "Weight function used to fuse all LDR images together (gaussian, triangle, plateau).") ("calibrationNbPoints", po::value(&calibrationNbPoints)->default_value(calibrationNbPoints), "Number of points used to calibrate (Use 0 for automatic selection based on the calibration method).") + ("calibrationDownscale", po::value(&calibrationDownscale)->default_value(calibrationDownscale), + "Image downscale used to calibration the response function.") + ("calibrationRefineExposures", po::value(&refineExposures)->default_value(refineExposures), + "Refine exposures provided by metadata (shutter speed, f-number, iso). Only available for 'laguerre' calibration method. Default value is set to 0.") ; po::options_description logParams("Log parameters"); @@ -482,7 +488,7 @@ int main(int argc, char * argv[]) if(calibrationNbPoints <= 0) calibrationNbPoints = 10000; hdr::DebevecCalibrate calibration; - calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, calibrationWeight, lambda, response); + calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, calibrationDownscale, fisheye, calibrationWeight, lambda, response); { std::string methodName = ECalibrationMethod_enumToString(calibrationMethod); @@ -537,8 +543,7 @@ int main(int argc, char * argv[]) if (calibrationNbPoints <= 0) calibrationNbPoints = 1000000; hdr::LaguerreBACalibration calibration; - bool refineExposures = false; - calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, fisheye, refineExposures, response); + calibration.process(groupedFilenames, channelQuantization, groupedExposures, calibrationNbPoints, calibrationDownscale, fisheye, refineExposures, response); } break; } From ee195304e496bb1cbd11f577b12e39b9772515ea Mon Sep 17 00:00:00 2001 From: fabienservant Date: Tue, 10 Dec 2019 09:57:12 +0100 Subject: [PATCH 149/174] bug in pixel additions --- src/aliceVision/image/pixelTypes.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aliceVision/image/pixelTypes.hpp b/src/aliceVision/image/pixelTypes.hpp index b9692cf28f..715280867c 100644 --- a/src/aliceVision/image/pixelTypes.hpp +++ b/src/aliceVision/image/pixelTypes.hpp @@ -166,7 +166,7 @@ namespace aliceVision */ inline Rgb operator +( const Rgb& other ) const { - return Rgb( (*this(0) + other(0)), (*this(1) + other(1)), (*this(2) + other(2))); + return Rgb( ((*this)(0) + other(0)), ((*this)(1) + other(1)), ((*this)(2) + other(2))); } From 7754b81e0e2da5fa3187c0dfb1ca5d20cbc274be Mon Sep 17 00:00:00 2001 From: fabienservant Date: Tue, 10 Dec 2019 14:24:16 +0100 Subject: [PATCH 150/174] memory footprint reduction --- .../pipeline/main_panoramaCompositing.cpp | 514 ++++++++---------- 1 file changed, 227 insertions(+), 287 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 910f38dafe..a68212470e 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -208,99 +208,121 @@ class AlphaCompositer : public Compositer { aliceVision::image::Image _weightmap; }; -bool convolveHorizontal(image::Image & output, const image::Image & input, const Eigen::VectorXf & kernel) { +template +inline void convolveRow(typename image::Image::RowXpr output_row, typename image::Image::ConstRowXpr input_row, const Eigen::Vector & kernel) { - if (output.size() != input.size()) { - return false; - } + const int radius = 2; - if (kernel.size() % 2 == 0) { - return false; - } + for (int j = 0; j < input_row.cols(); j++) { - int radius = kernel.size() / 2; + T sum = T(); + float sumw = 0.0f; - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { + for (int k = 0; k < kernel.size(); k++) { - image::RGBAfColor sum = image::RGBAfColor(0.0,0.0,0.0,0.0); - float sumw = 0.0f; + float w = kernel(k); + int col = j + k - radius; - for (int k = 0; k < kernel.size(); k++) { + /* mirror 5432 | 123456 | 5432 */ + if (col < 0) { + col = - col; + } - float w = kernel(k); - int col = j + k - radius; + if (col >= input_row.cols()) { + col = input_row.cols() - 1 - (col + 1 - input_row.cols()); + } - /* mirror 5432 | 123456 | 5432 */ - if (col < 0) { - col = - col; - } + sum += w * input_row(col); + sumw += w; + } - if (col >= input.Width()) { - col = input.Width() - 1 - (col + 1 - input.Width()); - } + output_row(j) = sum / sumw; + } +} - sum += w * input(i, col); - sumw += w; - } +template +inline void convolveColumns(typename image::Image::RowXpr output_row, const image::Image & input_rows, const Eigen::Vector & kernel) { + + for (int j = 0; j < output_row.cols(); j++) { - output(i, j).r() = sum.r() / sumw; - output(i, j).g() = sum.g() / sumw; - output(i, j).b() = sum.b() / sumw; - output(i, j).a() = sum.a() / sumw; + T sum = T(); + float sumw = 0.0f; + + for (int k = 0; k < kernel.size(); k++) { + + float w = kernel(k); + sum += w * input_rows(k, j); + sumw += w; } - } - return true; + output_row(j) = sum / sumw; + } } -bool convolveVertical(image::Image & output, const image::Image & input, const Eigen::VectorXf & kernel) { +template +bool convolveGaussian5x5(image::Image & output, const image::Image & input) { if (output.size() != input.size()) { return false; } - if (kernel.size() % 2 == 0) { - return false; - } - - int radius = kernel.size() / 2; + Eigen::Vector kernel; + kernel[0] = 1.0f; + kernel[1] = 4.0f; + kernel[2] = 6.0f; + kernel[3] = 4.0f; + kernel[4] = 1.0f; + kernel = kernel / kernel.sum(); - for (int i = 0; i < output.Height(); i++) { - for (int j = 0; j < output.Width(); j++) { + image::Image buf(output.Width(), 5); - image::RGBAfColor sum = image::RGBAfColor(0.0,0.0,0.0,0.0); - float w = 0.0f; - float sumw = 0.0f; + int radius = 2; - for (int k = 0; k < kernel.size(); k++) { + convolveRow(buf.row(0), input.row(2), kernel); + convolveRow(buf.row(1), input.row(1), kernel); + convolveRow(buf.row(2), input.row(0), kernel); + convolveRow(buf.row(3), input.row(1), kernel); + convolveRow(buf.row(4), input.row(2), kernel); - float w = kernel(k); - int row = i + k - radius; + for (int i = 0; i < output.Height() - 3; i++) { - if (row < 0) { - row = -row; - } - - if (row >= input.Height()) { - row = input.Height() - 1 - (row + 1 - input.Height()); - } + convolveColumns(output.row(i), buf, kernel); - sum += w * input(row, j); - sumw += w; - } - output(i, j).r() = sum.r() / sumw; - output(i, j).g() = sum.g() / sumw; - output(i, j).b() = sum.b() / sumw; - output(i, j).a() = sum.a() / sumw; - } + buf.row(0) = buf.row(1); + buf.row(1) = buf.row(2); + buf.row(2) = buf.row(3); + buf.row(3) = buf.row(4); + convolveRow(buf.row(4), input.row(i + 3), kernel); } + /** + current row : -5 -4 -3 -2 -1 + next 1 : -4 -3 -2 -1 -2 + next 2 : -3 -2 -1 -2 -3 + */ + convolveColumns(output.row(output.Height() - 3), buf, kernel); + + buf.row(0) = buf.row(1); + buf.row(1) = buf.row(2); + buf.row(2) = buf.row(3); + buf.row(3) = buf.row(4); + convolveRow(buf.row(4), input.row(output.Height() - 2), kernel); + convolveColumns(output.row(output.Height() - 2), buf, kernel); + + buf.row(0) = buf.row(1); + buf.row(1) = buf.row(2); + buf.row(2) = buf.row(3); + buf.row(3) = buf.row(4); + convolveRow(buf.row(4), input.row(output.Height() - 3), kernel); + convolveColumns(output.row(output.Height() - 1), buf, kernel); + return true; } -bool downscale(aliceVision::image::Image & outputColor, const aliceVision::image::Image & inputColor) { + +template +bool downscale(aliceVision::image::Image & outputColor, const aliceVision::image::Image & inputColor) { size_t width = inputColor.Width(); size_t height = inputColor.Height(); @@ -317,7 +339,8 @@ bool downscale(aliceVision::image::Image & outputColor, const return true; } -bool upscale(aliceVision::image::Image & outputColor, const aliceVision::image::Image & inputColor) { +template +bool upscale(aliceVision::image::Image & outputColor, const aliceVision::image::Image & inputColor) { size_t width = inputColor.Width(); size_t height = inputColor.Height(); @@ -333,35 +356,17 @@ bool upscale(aliceVision::image::Image & outputColor, const a int dj = j * 2; outputColor(di, dj) = inputColor(i, j); - outputColor(di, dj + 1) = image::RGBAfColor(0.0f,0.0f,0.0f,0.0f); - outputColor(di + 1, dj) = image::RGBAfColor(0.0f,0.0f,0.0f,0.0f); - outputColor(di + 1, dj + 1) = image::RGBAfColor(0.0f,0.0f,0.0f,0.0f); + outputColor(di, dj + 1) = T(); + outputColor(di + 1, dj) = T(); + outputColor(di + 1, dj + 1) = T(); } } return true; } -bool divideByAlpha(aliceVision::image::Image & inputOutputColor) { - - size_t width = inputOutputColor.Width(); - size_t height = inputOutputColor.Height(); - - - for (int i = 0; i < height; i++) { - - for (int j = 0; j < width; j++) { - - inputOutputColor(i, j).r() = inputOutputColor(i, j).r() / (inputOutputColor(i, j).a() + 1e-5); - inputOutputColor(i, j).g() = inputOutputColor(i, j).g() / (inputOutputColor(i, j).a() + 1e-5); - inputOutputColor(i, j).b() = inputOutputColor(i, j).b() / (inputOutputColor(i, j).a() + 1e-5); - } - } - - return true; -} - -bool substract(aliceVision::image::Image & AminusB, const aliceVision::image::Image & A, const aliceVision::image::Image & B) { +template +bool substract(aliceVision::image::Image & AminusB, const aliceVision::image::Image & A, const aliceVision::image::Image & B) { size_t width = AminusB.Width(); size_t height = AminusB.Height(); @@ -378,17 +383,15 @@ bool substract(aliceVision::image::Image & AminusB, const ali for (int j = 0; j < width; j++) { - AminusB(i, j).r() = A(i, j).r() - B(i, j).r(); - AminusB(i, j).g() = A(i, j).g() - B(i, j).g(); - AminusB(i, j).b() = A(i, j).b() - B(i, j).b(); - AminusB(i, j).a() = A(i, j).a() - B(i, j).a(); + AminusB(i, j) = A(i, j) - B(i, j); } } return true; } -bool addition(aliceVision::image::Image & AplusB, const aliceVision::image::Image & A, const aliceVision::image::Image & B) { +template +bool addition(aliceVision::image::Image & AplusB, const aliceVision::image::Image & A, const aliceVision::image::Image & B) { size_t width = AplusB.Width(); size_t height = AplusB.Height(); @@ -405,16 +408,37 @@ bool addition(aliceVision::image::Image & AplusB, const alice for (int j = 0; j < width; j++) { - AplusB(i, j).r() = A(i, j).r() + B(i, j).r(); - AplusB(i, j).g() = A(i, j).g() + B(i, j).g(); - AplusB(i, j).b() = A(i, j).b() + B(i, j).b(); - AplusB(i, j).a() = A(i, j).a() + B(i, j).a(); + AplusB(i, j) = A(i, j) + B(i, j); } } return true; } +void removeNegativeValues(aliceVision::image::Image & img) { + for (int i = 0; i < img.Height(); i++) { + for (int j = 0; j < img.Width(); j++) { + image::RGBfColor & pix = img(i, j); + image::RGBfColor rpix; + rpix.r() = std::exp(pix.r()); + rpix.g() = std::exp(pix.g()); + rpix.b() = std::exp(pix.b()); + + if (rpix.r() < 0.0) { + pix.r() = 0.0; + } + + if (rpix.g() < 0.0) { + pix.g() = 0.0; + } + + if (rpix.b() < 0.0) { + pix.b() = 0.0; + } + } + } +} + class LaplacianPyramid { public: LaplacianPyramid(size_t base_width, size_t base_height, size_t max_levels) { @@ -422,176 +446,65 @@ class LaplacianPyramid { size_t width = base_width; size_t height = base_height; + double max_scale = 1.0 / pow(2.0, max_levels - 1); + width = size_t(ceil(double(width) * max_scale) / max_scale); + height = size_t(ceil(double(height) * max_scale) / max_scale); + for (int lvl = 0; lvl < max_levels; lvl++) { - _levels.push_back(aliceVision::image::Image(base_width, base_height, true, image::RGBAfColor(0.0f,0.0f,0.0f,0.0f))); - _weights.push_back(aliceVision::image::Image(base_width, base_height, true, 0.0f)); + _levels.push_back(aliceVision::image::Image(width, height, true, image::RGBfColor(0.0f,0.0f,0.0f))); + _weights.push_back(aliceVision::image::Image(width, height, true, 0.0f)); - base_height /= 2; - base_width /= 2; + height /= 2; + width /= 2; } } - bool apply(const aliceVision::image::Image & source) { + bool apply(const aliceVision::image::Image & source, const aliceVision::image::Image & weights) { - /*Create the gaussian kernel*/ - Eigen::VectorXf kernel(5); - kernel[0] = 1.0f; - kernel[1] = 4.0f; - kernel[2] = 6.0f; - kernel[3] = 4.0f; - kernel[4] = 1.0f; - kernel = kernel / kernel.sum(); _levels[0] = source; - + _weights[0] = weights; for (int l = 1; l < _levels.size(); l++) { - aliceVision::image::Image buf(_levels[l - 1].Width(), _levels[l - 1].Height()); - aliceVision::image::Image buf2(_levels[l - 1].Width(), _levels[l - 1].Height()); - convolveHorizontal(buf, _levels[l - 1], kernel); - convolveVertical(buf2, buf, kernel); - downscale(_levels[l], buf2); - } + aliceVision::image::Image buf(_levels[l - 1].Width(), _levels[l - 1].Height()); + convolveGaussian5x5(buf, _levels[l - 1]); + downscale(_levels[l], buf); - for (int l = 0; l < _levels.size(); l++) { - for (int i = 0; i < _levels[l].Height(); i++) { - for (int j = 0; j < _levels[l].Width(); j++) { - _weights[l](i,j) = _levels[l](i, j).a(); - } - } + aliceVision::image::Image bufw(_weights[l - 1].Width(), _weights[l - 1].Height()); + convolveGaussian5x5(bufw, _weights[l - 1]); + downscale(_weights[l], bufw); } + for (int l = 0; l < _levels.size() - 1; l++) { - aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); - aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); - aliceVision::image::Image buf3(_levels[l].Width(), _levels[l].Height()); + aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); + aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); upscale(buf, _levels[l + 1]); - convolveHorizontal(buf2, buf, kernel ); - convolveVertical(buf3, buf2, kernel ); + convolveGaussian5x5(buf2, buf); - for (int i = 0; i < buf3.Height(); i++) { - for (int j = 0; j < buf3.Width(); j++) { - buf3(i,j) *= 4.0f; + for (int i = 0; i < buf2.Height(); i++) { + for (int j = 0; j < buf2.Width(); j++) { + buf2(i,j) *= 4.0f; } } - substract(_levels[l], _levels[l], buf3); + substract(_levels[l], _levels[l], buf2); } return true; } - - bool rebuild(image::Image & output) { - - /*Create the gaussian kernel*/ - Eigen::VectorXf kernel(5); - kernel[0] = 1.0f; - kernel[1] = 4.0f; - kernel[2] = 6.0f; - kernel[3] = 4.0f; - kernel[4] = 1.0f; - kernel = kernel / kernel.sum(); - - for (int l = 0; l < _levels.size(); l++) { - for (int i = 0; i < _levels[l].Height(); i++) { - for (int j = 0; j < _levels[l].Width(); j++) { - if (_weights[l](i, j) < 1e-6) { - _levels[l](i, j) = image::RGBAfColor(0.0,0.0,0.0,0.0); - continue; - } - - _levels[l](i, j).r() = _levels[l](i, j).r() / _weights[l](i, j); - _levels[l](i, j).g() = _levels[l](i, j).g() / _weights[l](i, j); - _levels[l](i, j).b() = _levels[l](i, j).b() / _weights[l](i, j); - } - } - } - - { - image::Image & img = _levels[_levels.size() - 1]; - for (int i = 0; i < img.Height(); i++) { - for (int j = 0; j < img.Width(); j++) { - image::RGBAfColor & pix = img(i, j); - image::RGBAfColor rpix; - rpix.r() = std::exp(pix.r()); - rpix.g() = std::exp(pix.g()); - rpix.b() = std::exp(pix.b()); - - if (rpix.r() < 0.0) { - pix.r() = 0.0; - } - - if (rpix.g() < 0.0) { - pix.g() = 0.0; - } - - if (rpix.b() < 0.0) { - pix.b() = 0.0; - } - } - } - } - - for (int l = _levels.size() - 2; l >= 0; l--) { - - aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); - aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); - aliceVision::image::Image buf3(_levels[l].Width(), _levels[l].Height()); - - upscale(buf, _levels[l + 1]); - convolveHorizontal(buf2, buf, kernel); - convolveVertical(buf3, buf2, kernel); - - for (int i = 0; i < buf3.Height(); i++) { - for (int j = 0; j < buf3.Width(); j++) { - buf3(i,j) *= 4.0f; - } - } - - addition(_levels[l], _levels[l], buf3); - - { - image::Image & img = _levels[l]; - for (int i = 0; i < img.Height(); i++) { - for (int j = 0; j < img.Width(); j++) { - image::RGBAfColor & pix = img(i, j); - image::RGBAfColor rpix; - rpix.r() = std::exp(pix.r()); - rpix.g() = std::exp(pix.g()); - rpix.b() = std::exp(pix.b()); - - if (rpix.r() < 0.0) { - pix.r() = 0.0; - } - - if (rpix.g() < 0.0) { - pix.g() = 0.0; - } - - if (rpix.b() < 0.0) { - pix.b() = 0.0; - } - } - } - } - } - - output = _levels[0]; - - return true; - } - + bool merge(const LaplacianPyramid & other, size_t offset_x, size_t offset_y) { for (int l = 0; l < _levels.size(); l++) { - image::Image & img = _levels[l]; - const image::Image & oimg = other._levels[l]; + image::Image & img = _levels[l]; + const image::Image & oimg = other._levels[l]; image::Image & weight = _weights[l]; const image::Image & oweight = other._weights[l]; @@ -623,8 +536,60 @@ class LaplacianPyramid { return true; } + bool rebuild(image::Image & output, image::Image & mask) { + + mask.fill(0); + + for (int l = 0; l < _levels.size(); l++) { + for (int i = 0; i < _levels[l].Height(); i++) { + for (int j = 0; j < _levels[l].Width(); j++) { + if (_weights[l](i, j) < 1e-6) { + _levels[l](i, j) = image::RGBfColor(0.0); + continue; + } + + _levels[l](i, j).r() = _levels[l](i, j).r() / _weights[l](i, j); + _levels[l](i, j).g() = _levels[l](i, j).g() / _weights[l](i, j); + _levels[l](i, j).b() = _levels[l](i, j).b() / _weights[l](i, j); + + if (l == 0) { + if (i < mask.Height() && j < mask.Width()) { + mask(i, j) = 1; + } + } + } + } + } + + removeNegativeValues(_levels[_levels.size() - 1]); + + for (int l = _levels.size() - 2; l >= 0; l--) { + + aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); + aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); + + upscale(buf, _levels[l + 1]); + convolveGaussian5x5(buf2, buf); + + for (int i = 0; i < buf2.Height(); i++) { + for (int j = 0; j < buf2.Width(); j++) { + buf2(i,j) *= 4.0f; + } + } + + addition(_levels[l], _levels[l], buf2); + removeNegativeValues(_levels[l]); + } + + output = _levels[0].block(0, 0, output.Height(), output.Width()); + + return true; + } + + + private: - std::vector> _levels; + std::vector> _levels; std::vector> _weights; }; @@ -676,9 +641,10 @@ class DistanceSeams { class LaplacianCompositer : public Compositer { public: - LaplacianCompositer(size_t outputWidth, size_t outputHeight) : + LaplacianCompositer(size_t outputWidth, size_t outputHeight, size_t bands) : Compositer(outputWidth, outputHeight), - _pyramid_panorama(outputWidth, outputHeight, 6) { + _pyramid_panorama(outputWidth, outputHeight, bands), + _bands(bands) { } @@ -795,82 +761,53 @@ class LaplacianCompositer : public Compositer { aliceVision::image::Image color_pot; aliceVision::image::Image mask_pot; aliceVision::image::Image weights_pot; - makeImagePyramidCompatible(color_pot, new_offset_x, new_offset_y, color, offset_x, offset_y, 6); - makeImagePyramidCompatible(mask_pot, new_offset_x, new_offset_y, inputMask, offset_x, offset_y, 6); - makeImagePyramidCompatible(weights_pot, new_offset_x, new_offset_y, inputWeights, offset_x, offset_y, 6); + makeImagePyramidCompatible(color_pot, new_offset_x, new_offset_y, color, offset_x, offset_y, _bands); + makeImagePyramidCompatible(mask_pot, new_offset_x, new_offset_y, inputMask, offset_x, offset_y, _bands); + makeImagePyramidCompatible(weights_pot, new_offset_x, new_offset_y, inputWeights, offset_x, offset_y, _bands); aliceVision::image::Image feathered; feathering(feathered, color_pot, mask_pot); + /*To log space for hdr*/ + for (int i = 0; i < feathered.Height(); i++) { + for (int j = 0; j < feathered.Width(); j++) { - aliceVision::image::Image view(feathered.Width(), feathered.Height(), true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)); - const image::Image & img = feathered; - - for (int i = 0; i < view.Height(); i++) { - - for (int j = 0; j < view.Width(); j++) { - - view(i, j).r() = std::log(std::max(1e-8f, feathered(i, j).r())); - view(i, j).g() = std::log(std::max(1e-8f, feathered(i, j).g())); - view(i, j).b() = std::log(std::max(1e-8f, feathered(i, j).b())); - - if (mask_pot(i ,j)) { - - view(i, j).a() = weights_pot(i, j); - - } + feathered(i, j).r() = std::log(std::max(1e-8f, feathered(i, j).r())); + feathered(i, j).g() = std::log(std::max(1e-8f, feathered(i, j).g())); + feathered(i, j).b() = std::log(std::max(1e-8f, feathered(i, j).b())); } } - LaplacianPyramid pyramid(feathered.Width(), feathered.Height(), 6); - pyramid.apply(view); + LaplacianPyramid pyramid(feathered.Width(), feathered.Height(), _bands); + pyramid.apply(feathered, weights_pot); _pyramid_panorama.merge(pyramid, new_offset_x, new_offset_y); - - - - - - - //const aliceVision::image::Image & img = _pyramid_panorama.getStackResult(); - //const aliceVision::image::Image & mask = _pyramid_panorama.getStackMask(); - - //_panorama = img.block(0, 0, _panorama.Height(), _panorama.Width()); - //_mask = mask.block(0, 0, _panorama.Height(), _panorama.Width());*/ - - return true; } virtual bool terminate() { - image::Image res; - _pyramid_panorama.rebuild(res); + _pyramid_panorama.rebuild(_panorama, _mask); - - char filename[FILENAME_MAX]; - const image::Image & img = res; - image::Image tmp(img.Width(), img.Height(), true, image::RGBfColor(0.0)); - sprintf(filename, "/home/mmoc/out.exr"); - for (int i = 0; i < img.Height(); i++) { - for (int j = 0; j < img.Width(); j++) { - tmp(i, j).r() = std::exp(img(i, j).r()); - tmp(i, j).g() = std::exp(img(i, j).g()); - tmp(i, j).b() = std::exp(img(i, j).b()); + /*Go back to normal space from log space*/ + for (int i = 0; i < _panorama.Height(); i++) { + for (int j = 0; j < _panorama.Width(); j++) { + _panorama(i, j).r() = std::exp(_panorama(i, j).r()); + _panorama(i, j).g() = std::exp(_panorama(i, j).g()); + _panorama(i, j).b() = std::exp(_panorama(i, j).b()); } } - image::writeImage(filename, tmp, image::EImageColorSpace::SRGB); - return true; } protected: LaplacianPyramid _pyramid_panorama; + size_t _bands; }; int main(int argc, char **argv) { @@ -975,7 +912,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - if (pos == 24 || pos == 25) + if (1/*pos == 24 || pos == 25*/) { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); @@ -989,7 +926,7 @@ int main(int argc, char **argv) { std::unique_ptr compositer; if (multiband) { - compositer = std::unique_ptr(new LaplacianCompositer(panoramaSize.first, panoramaSize.second)); + compositer = std::unique_ptr(new LaplacianCompositer(panoramaSize.first, panoramaSize.second, 8)); } else { compositer = std::unique_ptr(new AlphaCompositer(panoramaSize.first, panoramaSize.second)); @@ -1023,20 +960,23 @@ int main(int argc, char **argv) { image::Image weights; image::readImage(weightsPath, weights, image::EImageColorSpace::NO_CONVERSION); + /*Build weight map*/ image::Image seams(weights.Width(), weights.Height()); distanceseams.append(seams, mask, weights, cv.offset_x, cv.offset_y); + /* Composite image into panorama */ compositer->append(source, mask, seams, cv.offset_x, cv.offset_y); } + /* Build image */ compositer->terminate(); + + /* Store output */ ALICEVISION_LOG_INFO("Write output panorama to file " << outputPanorama); const aliceVision::image::Image & panorama = compositer->getPanorama(); image::writeImage(outputPanorama, panorama, image::EImageColorSpace::SRGB); - - return EXIT_SUCCESS; } \ No newline at end of file From 0a6d67489b2fbb796729e8896b9d713e4cffec4f Mon Sep 17 00:00:00 2001 From: fabienservant Date: Tue, 10 Dec 2019 15:41:52 +0100 Subject: [PATCH 151/174] memory reduction --- .../pipeline/main_panoramaCompositing.cpp | 102 +++++++++--------- 1 file changed, 50 insertions(+), 52 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index a68212470e..9a5f785698 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -460,78 +460,80 @@ class LaplacianPyramid { } } - bool apply(const aliceVision::image::Image & source, const aliceVision::image::Image & weights) { + bool apply(const aliceVision::image::Image & source, const aliceVision::image::Image & weights, size_t offset_x, size_t offset_y) { + int width = source.Width(); + int height = source.Height(); - _levels[0] = source; - _weights[0] = weights; + image::Image current_color = source; + image::Image next_color; + image::Image current_weights = weights; + image::Image next_weights; - for (int l = 1; l < _levels.size(); l++) { + for (int l = 0; l < _levels.size() - 1; l++) { - aliceVision::image::Image buf(_levels[l - 1].Width(), _levels[l - 1].Height()); - convolveGaussian5x5(buf, _levels[l - 1]); - downscale(_levels[l], buf); + aliceVision::image::Image buf(width, height); + aliceVision::image::Image buf2(width, height); + aliceVision::image::Image bufw(width, height); + + next_color = aliceVision::image::Image(width / 2, height / 2); + next_weights = aliceVision::image::Image(width / 2, height / 2); - aliceVision::image::Image bufw(_weights[l - 1].Width(), _weights[l - 1].Height()); - convolveGaussian5x5(bufw, _weights[l - 1]); - downscale(_weights[l], bufw); - } + convolveGaussian5x5(buf, current_color); + downscale(next_color, buf); + convolveGaussian5x5(bufw, current_weights); + downscale(next_weights, bufw); - for (int l = 0; l < _levels.size() - 1; l++) { - aliceVision::image::Image buf(_levels[l].Width(), _levels[l].Height()); - aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); - - upscale(buf, _levels[l + 1]); + upscale(buf, next_color); convolveGaussian5x5(buf2, buf); for (int i = 0; i < buf2.Height(); i++) { - for (int j = 0; j < buf2.Width(); j++) { - buf2(i,j) *= 4.0f; - } + for (int j = 0; j < buf2.Width(); j++) { + buf2(i,j) *= 4.0f; + } } - substract(_levels[l], _levels[l], buf2); + substract(current_color, current_color, buf2); + + merge(current_color, current_weights, l, offset_x, offset_y); + + current_color = next_color; + current_weights = next_weights; + width /= 2; + height /= 2; + offset_x /= 2; + offset_y /= 2; } - + merge(current_color, current_weights, _levels.size() - 1, offset_x, offset_y); return true; } - bool merge(const LaplacianPyramid & other, size_t offset_x, size_t offset_y) { - - for (int l = 0; l < _levels.size(); l++) { - - image::Image & img = _levels[l]; - const image::Image & oimg = other._levels[l]; + bool merge(const aliceVision::image::Image & oimg, const aliceVision::image::Image & oweight, size_t level, size_t offset_x, size_t offset_y) { - image::Image & weight = _weights[l]; - const image::Image & oweight = other._weights[l]; + image::Image & img = _levels[level]; + image::Image & weight = _weights[level]; - for (int i = 0; i < oimg.Height(); i++) { + for (int i = 0; i < oimg.Height(); i++) { - int di = i + offset_y; - if (di >= img.Height()) continue; + int di = i + offset_y; + if (di >= img.Height()) continue; - for (int j = 0; j < oimg.Width(); j++) { + for (int j = 0; j < oimg.Width(); j++) { - int dj = j + offset_x; - if (dj >= weight.Width()) { - dj = dj - weight.Width(); - } - - img(di, dj).r() += oimg(i, j).r() * oweight(i, j); - img(di, dj).g() += oimg(i, j).g() * oweight(i, j); - img(di, dj).b() += oimg(i, j).b() * oweight(i, j); - weight(di, dj) += oweight(i, j); + int dj = j + offset_x; + if (dj >= weight.Width()) { + dj = dj - weight.Width(); } - } - offset_x /= 2; - offset_y /= 2; - } - + img(di, dj).r() += oimg(i, j).r() * oweight(i, j); + img(di, dj).g() += oimg(i, j).g() * oweight(i, j); + img(di, dj).b() += oimg(i, j).b() * oweight(i, j); + weight(di, dj) += oweight(i, j); + } + } return true; } @@ -779,12 +781,8 @@ class LaplacianCompositer : public Compositer { feathered(i, j).b() = std::log(std::max(1e-8f, feathered(i, j).b())); } } - - - LaplacianPyramid pyramid(feathered.Width(), feathered.Height(), _bands); - pyramid.apply(feathered, weights_pot); - _pyramid_panorama.merge(pyramid, new_offset_x, new_offset_y); + _pyramid_panorama.apply(feathered, weights_pot, new_offset_x, new_offset_y); return true; } From d967fb83d5b8d326c84dddcf0ce24901df14678a Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 11 Dec 2019 08:53:38 +0100 Subject: [PATCH 152/174] add offsets to rotation for panorama --- src/software/pipeline/main_panoramaEstimation.cpp | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/software/pipeline/main_panoramaEstimation.cpp b/src/software/pipeline/main_panoramaEstimation.cpp index dfb95e6918..84d67e6be1 100644 --- a/src/software/pipeline/main_panoramaEstimation.cpp +++ b/src/software/pipeline/main_panoramaEstimation.cpp @@ -61,6 +61,8 @@ int main(int argc, char **argv) bool refine = true; bool lockAllIntrinsics = false; int orientation = 0; + float offsetLongitude = 0.0f; + float offsetLatitude = 0.0f; po::options_description allParams( "Perform estimation of cameras orientation around a nodal point for 360° panorama.\n" @@ -91,6 +93,10 @@ int main(int argc, char **argv) "* from homography matrix") ("orientation", po::value(&orientation)->default_value(orientation), "Orientation") + ("offsetLongitude", po::value(&offsetLongitude)->default_value(offsetLongitude), + "offset to camera longitude") + ("offsetLatitude", po::value(&offsetLatitude)->default_value(offsetLatitude), + "offset to camera latitude") ("refine", po::value(&refine)->default_value(refine), "Refine cameras with a Bundle Adjustment") ("lockAllIntrinsics", po::value(&lockAllIntrinsics)->default_value(lockAllIntrinsics), @@ -351,6 +357,15 @@ int main(int argc, char **argv) sfm::applyTransform(outSfmData, S, R, t); } + /*Add offsets to rotations*/ + for (auto& pose: outSfmData.getPoses()) { + + geometry::Pose3 p = pose.second.getTransform(); + Eigen::Matrix3d newR = p.rotation() * Eigen::AngleAxisd(degreeToRadian(offsetLongitude), Vec3(0,1,0)) * Eigen::AngleAxisd(degreeToRadian(offsetLatitude), Vec3(1,0,0)); + p.rotation() = newR; + pose.second.setTransform(p); + } + // export to disk computed scene (data & visualizable results) ALICEVISION_LOG_INFO("Export SfMData to disk"); sfmDataIO::Save(outSfmData, outSfMDataFilename, sfmDataIO::ESfMData::ALL); From 900def1b3a4760130b97d8201491ec6decfe615c Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 11 Dec 2019 08:53:51 +0100 Subject: [PATCH 153/174] add bypass to HDR --- src/software/convert/main_convertLDRToHDR.cpp | 29 ++++++++++++++++--- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 560a769567..dcfad11739 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -114,6 +114,7 @@ int main(int argc, char * argv[]) int calibrationNbPoints = 0; int calibrationDownscale = 4; bool refineExposures = false; + bool byPass = false; std::string calibrationWeightFunction = "default"; hdr::EFunctionType fusionWeightFunction = hdr::EFunctionType::GAUSSIAN; @@ -140,6 +141,8 @@ int main(int argc, char * argv[]) "float value between 0 and 1 to correct clamped highlights in dynamic range: use 0 for no correction, 1 for full correction to maxLuminance.") ("fisheyeLens,f", po::value(&fisheye)->default_value(fisheye), "Set to 1 if images are taken with a fisheye lens and to 0 if not. Default value is set to 1.") + ("byPass", po::value(&byPass)->default_value(byPass), + "bypass HDR creation and use medium bracket as input for next steps") ("channelQuantizationPower", po::value(&channelQuantizationPower)->default_value(channelQuantizationPower), "Quantization level like 8 bits or 10 bits.") ("calibrationWeight,w", po::value(&calibrationWeightFunction)->default_value(calibrationWeightFunction), @@ -427,6 +430,28 @@ int main(int argc, char * argv[]) } + sfmData::SfMData outputSfm; + sfmData::Views & vs = outputSfm.getViews(); + outputSfm.getIntrinsics() = sfmData.getIntrinsics(); + + /*If bypass, simply use central bracket*/ + if (byPass) { + for(int g = 0; g < groupedFilenames.size(); ++g) + { + vs[targetViews[g]->getViewId()] = targetViews[g]; + } + + // Export output sfmData + if (!sfmDataIO::Save(outputSfm, sfmOutputDataFilename, sfmDataIO::ESfMData::ALL)) + { + ALICEVISION_LOG_ERROR("Can not save output sfm file at " << sfmOutputDataFilename); + return EXIT_FAILURE; + } + + return EXIT_SUCCESS; + } + + size_t channelQuantization = std::pow(2, channelQuantizationPower); // set the correct weight functions corresponding to the string parameter hdr::rgbCurve calibrationWeight(channelQuantization); @@ -573,10 +598,6 @@ int main(int argc, char * argv[]) calibrationWeight.writeHtml(outputHtmlPath, "Fusion weight: " + methodName); } - sfmData::SfMData outputSfm; - sfmData::Views & vs = outputSfm.getViews(); - outputSfm.getIntrinsics() = sfmData.getIntrinsics(); - image::EImageColorSpace mergeColorspace = image::EImageColorSpace::LINEAR; switch (calibrationMethod) From e5c887719b33e5c203beb2564ce7d4ccf2b5c00a Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 11 Dec 2019 10:12:31 +0100 Subject: [PATCH 154/174] some correction on alpha blending --- .../pipeline/main_panoramaCompositing.cpp | 98 ++++++++++--------- 1 file changed, 54 insertions(+), 44 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 9a5f785698..000e946931 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -100,8 +100,7 @@ bool makeImagePyramidCompatible(image::Image & output, size_t & out_offset_x, class Compositer { public: Compositer(size_t outputWidth, size_t outputHeight) : - _panorama(outputWidth, outputHeight, true, image::RGBfColor(1.0f, 0.0f, 0.0f)), - _mask(outputWidth, outputHeight, true, false) + _panorama(outputWidth, outputHeight, true, image::RGBAfColor(0.0f, 0.0f, 0.0f, 0.0f)) { } @@ -125,8 +124,10 @@ class Compositer { pano_j = pano_j - _panorama.Width(); } - _panorama(pano_i, pano_j) = color(i, j); - _mask(pano_i, pano_j) = 1; + _panorama(pano_i, pano_j).r() = color(i, j).r(); + _panorama(pano_i, pano_j).g() = color(i, j).g(); + _panorama(pano_i, pano_j).b() = color(i, j).b(); + _panorama(pano_i, pano_j).a() = 1.0f; } } @@ -137,25 +138,19 @@ class Compositer { return true; } - const aliceVision::image::Image & getPanorama() const { + const aliceVision::image::Image & getPanorama() const { return _panorama; } - const aliceVision::image::Image & getPanoramaMask() const { - return _mask; - } - protected: - aliceVision::image::Image _panorama; - aliceVision::image::Image _mask; + aliceVision::image::Image _panorama; }; class AlphaCompositer : public Compositer { public: AlphaCompositer(size_t outputWidth, size_t outputHeight) : - Compositer(outputWidth, outputHeight), - _weightmap(outputWidth, outputHeight, true, 0.0f) { + Compositer(outputWidth, outputHeight) { } @@ -179,33 +174,40 @@ class AlphaCompositer : public Compositer { pano_j = pano_j - _panorama.Width(); } - if (!_mask(pano_i, pano_j)) { - _panorama(pano_i, pano_j) = color(i, j); - _weightmap(pano_i, pano_j) = inputWeights(i, j); - } - else { - float wp = _weightmap(pano_i, pano_j); - float wc = inputWeights(i, j); - float wtotal = wp + wc; - wp /= wtotal; - wc /= wtotal; + float wc = inputWeights(i, j); + + _panorama(pano_i, pano_j).r() += wc * color(i, j).r(); + _panorama(pano_i, pano_j).g() += wc * color(i, j).g(); + _panorama(pano_i, pano_j).b() += wc * color(i, j).b(); + _panorama(pano_i, pano_j).a() += wc; + } + } - _panorama(pano_i, pano_j).r() = wp * _panorama(pano_i, pano_j).r() + wc * color(i, j).r(); - _panorama(pano_i, pano_j).g() = wp * _panorama(pano_i, pano_j).g() + wc * color(i, j).g(); - _panorama(pano_i, pano_j).b() = wp * _panorama(pano_i, pano_j).b() + wc * color(i, j).b(); + return true; + } - _weightmap(pano_i, pano_j) = wtotal; - } + virtual bool terminate() { - _mask(pano_i, pano_j) = true; + for (int i = 0; i < _panorama.Height(); i++) { + for (int j = 0; j < _panorama.Width(); j++) { + + if (_panorama(i, j).a() < 1e-6) { + _panorama(i, j).r() = 1.0f; + _panorama(i, j).g() = 0.0f; + _panorama(i, j).b() = 0.0f; + _panorama(i, j).a() = 0.0f; + } + else { + _panorama(i, j).r() = _panorama(i, j).r() / _panorama(i, j).a(); + _panorama(i, j).g() = _panorama(i, j).g() / _panorama(i, j).a(); + _panorama(i, j).b() = _panorama(i, j).b() / _panorama(i, j).a(); + _panorama(i, j).a() = 1.0f; + } } } return true; } - -protected: - aliceVision::image::Image _weightmap; }; template @@ -446,10 +448,12 @@ class LaplacianPyramid { size_t width = base_width; size_t height = base_height; + /*Make sure pyramid size can be divided by 2 on each levels*/ double max_scale = 1.0 / pow(2.0, max_levels - 1); width = size_t(ceil(double(width) * max_scale) / max_scale); height = size_t(ceil(double(height) * max_scale) / max_scale); + /*Prepare pyramid*/ for (int lvl = 0; lvl < max_levels; lvl++) { _levels.push_back(aliceVision::image::Image(width, height, true, image::RGBfColor(0.0f,0.0f,0.0f))); @@ -538,9 +542,7 @@ class LaplacianPyramid { return true; } - bool rebuild(image::Image & output, image::Image & mask) { - - mask.fill(0); + bool rebuild(image::Image & output) { for (int l = 0; l < _levels.size(); l++) { for (int i = 0; i < _levels[l].Height(); i++) { @@ -553,12 +555,6 @@ class LaplacianPyramid { _levels[l](i, j).r() = _levels[l](i, j).r() / _weights[l](i, j); _levels[l](i, j).g() = _levels[l](i, j).g() / _weights[l](i, j); _levels[l](i, j).b() = _levels[l](i, j).b() / _weights[l](i, j); - - if (l == 0) { - if (i < mask.Height() && j < mask.Width()) { - mask(i, j) = 1; - } - } } } } @@ -583,7 +579,21 @@ class LaplacianPyramid { removeNegativeValues(_levels[l]); } - output = _levels[0].block(0, 0, output.Height(), output.Width()); + /*Write output to RGBA*/ + for (int i = 0; i < output.Height(); i++) { + for (int j = 0; j < output.Width(); j++) { + output(i, j).r() = _levels[0](i, j).r(); + output(i, j).g() = _levels[0](i, j).g(); + output(i, j).b() = _levels[0](i, j).b(); + + if (_weights[0](i, j) < 1e-6) { + output(i, j).a() = 0.0f; + } + else { + output(i, j).a() = 1.0f; + } + } + } return true; } @@ -789,7 +799,7 @@ class LaplacianCompositer : public Compositer { virtual bool terminate() { - _pyramid_panorama.rebuild(_panorama, _mask); + _pyramid_panorama.rebuild(_panorama); /*Go back to normal space from log space*/ for (int i = 0; i < _panorama.Height(); i++) { @@ -972,7 +982,7 @@ int main(int argc, char **argv) { /* Store output */ ALICEVISION_LOG_INFO("Write output panorama to file " << outputPanorama); - const aliceVision::image::Image & panorama = compositer->getPanorama(); + const aliceVision::image::Image & panorama = compositer->getPanorama(); image::writeImage(outputPanorama, panorama, image::EImageColorSpace::SRGB); From c23879a2a7b9838fefb07ec2356d4997efdc86fe Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 11 Dec 2019 15:04:29 +0100 Subject: [PATCH 155/174] [hdr] laguerre: remove weighting In fact, it doesn't really make sense to add weighting in the BA context. --- src/aliceVision/hdr/LaguerreBACalibration.cpp | 44 ++++--------------- 1 file changed, 8 insertions(+), 36 deletions(-) diff --git a/src/aliceVision/hdr/LaguerreBACalibration.cpp b/src/aliceVision/hdr/LaguerreBACalibration.cpp index 2b7ca35fc7..8871f41099 100644 --- a/src/aliceVision/hdr/LaguerreBACalibration.cpp +++ b/src/aliceVision/hdr/LaguerreBACalibration.cpp @@ -42,33 +42,9 @@ T laguerreFunctionInv(const T& a, const T& x) return laguerreFunction(-a, x); } - -struct GaussianWeight -{ - template - T operator()(const T& x) const - { - static const double mu = 0.5; - static const double sigma = 1.0 / (5.0 * std::sqrt(2.0)); - const T v = x - 0.5; - return exp(-v * v / (2.0 * sigma * sigma)); - } -}; - -struct NoWeight -{ - template - T operator()(const T& x) const - { - return T(1.0); - } -}; - - /** * */ -template struct HdrResidual { HdrResidual(const Rgb& a, const Rgb& b) @@ -79,7 +55,6 @@ struct HdrResidual template bool operator()(const T* const laguerreParam, const T* const relativeWB_R, const T* const relativeWB_B, const T* const expA, const T* const expB, T* residual) const { - static const WeightFunc _weightFunc; static const int red = 0; static const int green = 1; static const int blue = 2; @@ -115,8 +90,7 @@ struct HdrResidual // T b = laguerreFunction(greenParam, colorB); T errorCost = abs(laguerreFunctionInv(greenParam, laguerreFunction(greenParam, colorA) * (*expB) / (*expA)) - colorB) + abs(laguerreFunctionInv(greenParam, laguerreFunction(greenParam, colorB) * (*expA) / (*expB)) - colorA); - T weight = _weightFunc(colorA) * _weightFunc(colorB); // weight by the validity on the input values - residual[green] = errorCost * weight; + residual[green] = errorCost; } { // RED @@ -127,8 +101,7 @@ struct HdrResidual // T b = *relativeWB_R * laguerreFunction(redParam, colorB); T errorCost = abs(laguerreFunctionInv(redParam, laguerreFunction(redParam, colorA) * (*expB) / (*expA)) - colorB) + abs(laguerreFunctionInv(redParam, laguerreFunction(redParam, colorB) * (*expA) / (*expB)) - colorA); - T weight = _weightFunc(colorA) * _weightFunc(colorB); // weight by the validity on the input values - residual[red] = errorCost * weight; + residual[red] = errorCost; } { // BLUE @@ -139,8 +112,7 @@ struct HdrResidual // T b = *relativeWB_B * laguerreFunction(blueParam, colorB); T errorCost = abs(laguerreFunctionInv(blueParam, laguerreFunction(blueParam, colorA) * (*expB) / (*expA)) - colorB) + abs(laguerreFunctionInv(blueParam, laguerreFunction(blueParam, colorB) * (*expA) / (*expB)) - colorA); - T weight = _weightFunc(colorA) * _weightFunc(colorB); // weight by the validity on the input values - residual[blue] = errorCost * weight; + residual[blue] = errorCost; } return true; @@ -181,7 +153,7 @@ void LaguerreBACalibration::process( } } std::array laguerreParam = { 0.0, 0.0, 0.0 }; - std::array relativeWB = { 0.0, 0.0, 0.0 }; + std::array relativeWB = { 1.0, 1.0, 1.0 }; ALICEVISION_LOG_DEBUG("Create BA problem"); ceres::Problem problem; @@ -263,8 +235,8 @@ void LaguerreBACalibration::process( if(hNextNext == h) continue; } - ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction, 3, 3, 1, 1, 1, 1>( - new HdrResidual(hdrSamples[h].colors[i], hdrSamples[hNext].colors[i])); + ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction( + new HdrResidual(hdrSamples[h].colors[i], hdrSamples[hNext].colors[i])); double& expA = exposures[std::make_pair(0, cameraExposures[g][h])]; double& expB = exposures[std::make_pair(0, cameraExposures[g][hNext])]; @@ -304,11 +276,11 @@ void LaguerreBACalibration::process( for(unsigned int channel = 0; channel < 3; ++channel) { std::vector& curve = response.getCurve(channel); - double step = 1.0f / curve.size(); + const double step = 1.0 / double(curve.size()); for (unsigned int i = 0; i < curve.size(); ++i) { const double offset = ((channel == 1) ? 0 : laguerreParam[1]); // non-green channels are relative to green channel - curve[i] = relativeWB[channel] + laguerreFunction(offset + laguerreParam[channel], i * step); + curve[i] = relativeWB[channel] * laguerreFunction(offset + laguerreParam[channel], i * step); } } From 0002d139b1579d06d1ce81aaab47ca0e819c9d1b Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Wed, 11 Dec 2019 15:36:18 +0100 Subject: [PATCH 156/174] [build] Eigen compatibility: use Matrix instead of Vector --- src/software/pipeline/main_panoramaCompositing.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 9a5f785698..a40d8410d1 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -209,7 +209,7 @@ class AlphaCompositer : public Compositer { }; template -inline void convolveRow(typename image::Image::RowXpr output_row, typename image::Image::ConstRowXpr input_row, const Eigen::Vector & kernel) { +inline void convolveRow(typename image::Image::RowXpr output_row, typename image::Image::ConstRowXpr input_row, const Eigen::Matrix & kernel) { const int radius = 2; @@ -241,7 +241,7 @@ inline void convolveRow(typename image::Image::RowXpr output_row, typename im } template -inline void convolveColumns(typename image::Image::RowXpr output_row, const image::Image & input_rows, const Eigen::Vector & kernel) { +inline void convolveColumns(typename image::Image::RowXpr output_row, const image::Image & input_rows, const Eigen::Matrix & kernel) { for (int j = 0; j < output_row.cols(); j++) { @@ -266,7 +266,7 @@ bool convolveGaussian5x5(image::Image & output, const image::Image & input return false; } - Eigen::Vector kernel; + Eigen::Matrix kernel; kernel[0] = 1.0f; kernel[1] = 4.0f; kernel[2] = 6.0f; @@ -977,4 +977,4 @@ int main(int argc, char **argv) { return EXIT_SUCCESS; -} \ No newline at end of file +} From cbb3e8677ab37b32119d752b05a1884c2e064823 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 12 Dec 2019 20:35:11 +0100 Subject: [PATCH 157/174] [software] LdrToHdr: rename parameters for highlight postprocessing --- src/software/convert/main_convertLDRToHDR.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index dcfad11739..5cfe8f70cc 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -108,7 +108,7 @@ int main(int argc, char * argv[]) int groupSize = 3; ECalibrationMethod calibrationMethod = ECalibrationMethod::LINEAR; float highlightCorrectionFactor = 1.0f; - float highlightTargetLux = 100000.0f; + float highlightTargetLux = 120000.0f; bool fisheye = false; int channelQuantizationPower = 10; int calibrationNbPoints = 0; @@ -135,9 +135,9 @@ int main(int argc, char * argv[]) optionalParams.add_options() ("calibrationMethod,m", po::value(&calibrationMethod)->default_value(calibrationMethod), "Name of method used for camera calibration: linear, robertson (slow), debevec, grossberg, laguerre.") - ("highlightsMaxLuminance", po::value(&highlightTargetLux)->default_value(highlightTargetLux), + ("highlightTargetLux", po::value(&highlightTargetLux)->default_value(highlightTargetLux), "Highlights maximum luminance.") - ("expandDynamicRange"/*"highlightCorrectionFactor"*/, po::value(&highlightCorrectionFactor)->default_value(highlightCorrectionFactor), + ("highlightCorrectionFactor", po::value(&highlightCorrectionFactor)->default_value(highlightCorrectionFactor), "float value between 0 and 1 to correct clamped highlights in dynamic range: use 0 for no correction, 1 for full correction to maxLuminance.") ("fisheyeLens,f", po::value(&fisheye)->default_value(fisheye), "Set to 1 if images are taken with a fisheye lens and to 0 if not. Default value is set to 1.") From 38e469a0fe50c5d10d3760d39c6e533173e386ad Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 12 Dec 2019 20:36:35 +0100 Subject: [PATCH 158/174] [software] LdrToHdr: remove export of debug files --- src/software/convert/main_convertLDRToHDR.cpp | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 5cfe8f70cc..02877ef6ac 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -630,18 +630,6 @@ int main(int argc, char * argv[]) image::Image HDRimage; merge.process(images, groupedExposures[g], fusionWeight, response, HDRimage, targetCameraExposure); - // TO REMOVE: Temporary export for debug - { - // Output image file path - std::stringstream sstream; - sstream << "hdr_" << std::setfill('0') << std::setw(4) << g << "_beforeCorrection.exr"; - std::string hdrImagePath = (fs::path(outputPath) / sstream.str()).string(); - - // Write an image with parameters from the target view - oiio::ParamValueList targetMetadata = image::readImageMetadata(targetView->getImagePath()); - image::writeImage(hdrImagePath, HDRimage, image::EImageColorSpace::AUTO, targetMetadata); - } - if(highlightCorrectionFactor > 0.0) { merge.postProcessHighlight(images, groupedExposures[g], fusionWeight, response, HDRimage, targetCameraExposure, highlightCorrectionFactor, highlightTargetLux); From 2aff5aea6a0a467d138519bf58107e89a266ce8e Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Thu, 12 Dec 2019 20:37:55 +0100 Subject: [PATCH 159/174] [software] panoramaWarping: export images in AUTO (no impact as the output is exr) --- src/software/pipeline/main_panoramaWarping.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/software/pipeline/main_panoramaWarping.cpp b/src/software/pipeline/main_panoramaWarping.cpp index 3fb4ee06f0..7344686d13 100644 --- a/src/software/pipeline/main_panoramaWarping.cpp +++ b/src/software/pipeline/main_panoramaWarping.cpp @@ -1288,7 +1288,7 @@ int main(int argc, char **argv) { path = ss.str(); viewTree.put("filename_view", path); ALICEVISION_LOG_INFO("Store view " << pos << " with path " << path); - image::writeImage(path, cam, image::EImageColorSpace::NO_CONVERSION); + image::writeImage(path, cam, image::EImageColorSpace::AUTO); } { @@ -1306,7 +1306,7 @@ int main(int argc, char **argv) { path = ss.str(); viewTree.put("filename_weights", path); ALICEVISION_LOG_INFO("Store weightmap " << pos << " with path " << path); - image::writeImage(path, weights, image::EImageColorSpace::NO_CONVERSION); + image::writeImage(path, weights, image::EImageColorSpace::AUTO); } /** From a712450ed8817ca444a0badda8161fc801ba3761 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Fri, 13 Dec 2019 08:22:06 +0100 Subject: [PATCH 160/174] Hack to make things work for laplacian on borders ... --- src/software/pipeline/main_panoramaWarping.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/software/pipeline/main_panoramaWarping.cpp b/src/software/pipeline/main_panoramaWarping.cpp index 7344686d13..1755f791dd 100644 --- a/src/software/pipeline/main_panoramaWarping.cpp +++ b/src/software/pipeline/main_panoramaWarping.cpp @@ -1213,9 +1213,13 @@ int main(int argc, char **argv) { } } else { + double max_scale = 1.0 / pow(2.0, 10); + panoramaSize.first = int(ceil(double(panoramaSize.first) * max_scale) / max_scale); panoramaSize.second = panoramaSize.first / 2; } + + ALICEVISION_LOG_INFO("Choosen panorama size : " << panoramaSize.first << "x" << panoramaSize.second); bpt::ptree viewsTree; From 12689a1e393f35aa07108621fab86a60263ede7b Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 13 Dec 2019 13:07:00 +0100 Subject: [PATCH 161/174] [hdri] laguerre: fix incorrect Laguerre formula --- src/aliceVision/hdr/LaguerreBACalibration.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/aliceVision/hdr/LaguerreBACalibration.cpp b/src/aliceVision/hdr/LaguerreBACalibration.cpp index 8871f41099..e5a7e41965 100644 --- a/src/aliceVision/hdr/LaguerreBACalibration.cpp +++ b/src/aliceVision/hdr/LaguerreBACalibration.cpp @@ -34,7 +34,8 @@ T laguerreFunction(const T& a, const T& x) { // https://www.desmos.com/calculator/ib1y06t4pe using namespace boost::math::constants; - return x + 2.0 * pi() * atan((a * sin(pi() * x)) / (1.0 - a * cos(pi() * x))); + constexpr double c = 2.0 / pi(); + return x + c * atan((a * sin(pi() * x)) / (1.0 - a * cos(pi() * x))); } template T laguerreFunctionInv(const T& a, const T& x) From a8fdf6f0309456508efacb19c13a246ad5a944bb Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 13 Dec 2019 20:53:18 +0100 Subject: [PATCH 162/174] [nodes] LDRToHDR: rename param to nbBrackets Also supports value 0 for automatic detection of the number of brackets. --- src/software/convert/main_convertLDRToHDR.cpp | 72 +++++++++++++------ 1 file changed, 51 insertions(+), 21 deletions(-) diff --git a/src/software/convert/main_convertLDRToHDR.cpp b/src/software/convert/main_convertLDRToHDR.cpp index 02877ef6ac..868e019e6a 100644 --- a/src/software/convert/main_convertLDRToHDR.cpp +++ b/src/software/convert/main_convertLDRToHDR.cpp @@ -105,7 +105,7 @@ int main(int argc, char * argv[]) std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel()); std::string sfmInputDataFilename = ""; std::string sfmOutputDataFilename = ""; - int groupSize = 3; + int nbBrackets = 0; ECalibrationMethod calibrationMethod = ECalibrationMethod::LINEAR; float highlightCorrectionFactor = 1.0f; float highlightTargetLux = 120000.0f; @@ -128,11 +128,11 @@ int main(int argc, char * argv[]) requiredParams.add_options() ("input,i", po::value(&sfmInputDataFilename)->required(), "SfMData file input.") ("outSfMDataFilename,o", po::value(&sfmOutputDataFilename)->required(), "SfMData file output.") - ("groupSize,g", po::value(&groupSize)->required(), "bracket count per HDR image.") ; po::options_description optionalParams("Optional parameters"); optionalParams.add_options() + ("nbBrackets,b", po::value(&nbBrackets)->default_value(nbBrackets), "bracket count per HDR image (0 means automatic).") ("calibrationMethod,m", po::value(&calibrationMethod)->default_value(calibrationMethod), "Name of method used for camera calibration: linear, robertson (slow), debevec, grossberg, laguerre.") ("highlightTargetLux", po::value(&highlightTargetLux)->default_value(highlightTargetLux), @@ -194,12 +194,6 @@ int main(int argc, char * argv[]) system::Logger::get()->setLogLevel(verboseLevel); - if (groupSize < 0) - { - ALICEVISION_LOG_ERROR("Invalid number of brackets"); - return EXIT_FAILURE; - } - // Analyze path boost::filesystem::path path(sfmOutputDataFilename); std::string outputPath = path.parent_path().string(); @@ -219,12 +213,11 @@ int main(int argc, char * argv[]) return EXIT_FAILURE; } - if (countImages % groupSize != 0) + if (nbBrackets > 0 && countImages % nbBrackets != 0) { - ALICEVISION_LOG_ERROR("The input SfMData file is not compatible with this bracket size"); + ALICEVISION_LOG_ERROR("The input SfMData file is not compatible with the number of brackets."); return EXIT_FAILURE; } - size_t countGroups = countImages / groupSize; // Make sure there is only one kind of image in dataset if (sfmData.getIntrinsics().size() > 2) @@ -366,15 +359,52 @@ int main(int argc, char * argv[]) // Make groups std::vector>> groupedViews; - std::vector> group; - for (auto & view : viewsOrderedByName) { - group.push_back(view); - if (group.size() == groupSize) - { - groupedViews.push_back(group); - group.clear(); - } + std::vector> group; + std::vector exposures; + for (auto & view : viewsOrderedByName) + { + if (nbBrackets > 0) + { + group.push_back(view); + if (group.size() == nbBrackets) + { + groupedViews.push_back(group); + group.clear(); + } + } + else + { + // Automatically determines the number of brackets + float exp = view->getCameraExposureSetting(); + if (!exposures.empty() && exp != exposures.back() && exp == exposures.front()) + { + groupedViews.push_back(group); + group.clear(); + exposures.clear(); + } + exposures.push_back(exp); + group.push_back(view); + } + } + if (!group.empty()) + groupedViews.push_back(group); + } + if (nbBrackets <= 0) + { + std::set sizeOfGroups; + for (auto & group : groupedViews) + { + sizeOfGroups.insert(group.size()); + } + if (sizeOfGroups.size() == 1) + { + ALICEVISION_LOG_INFO("Number of brackets automatically detected: " << *sizeOfGroups.begin() << ". It will generate " << groupedViews.size() << " hdr images."); + } + else + { + ALICEVISION_LOG_ERROR("Exposure groups do not have a consistent number of brackets."); + } } std::vector> targetViews; @@ -614,11 +644,11 @@ int main(int argc, char * argv[]) } for(int g = 0; g < groupedFilenames.size(); ++g) { - std::vector> images(groupSize); + std::vector> images(groupedViews[g].size()); std::shared_ptr targetView = targetViews[g]; // Load all images of the group - for (int i = 0; i < groupSize; i++) + for (int i = 0; i < images.size(); i++) { ALICEVISION_LOG_INFO("Load " << groupedFilenames[g][i]); image::readImage(groupedFilenames[g][i], images[i], mergeColorspace); From c274b68f874d989f759c3a7359f9d33faa64718f Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 13 Dec 2019 21:29:53 +0100 Subject: [PATCH 163/174] [sfmDataIO] disable resize heuristic as RAW images have a different size in metadata --- src/aliceVision/sfmDataIO/viewIO.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/aliceVision/sfmDataIO/viewIO.cpp b/src/aliceVision/sfmDataIO/viewIO.cpp index 7f1e00fcd6..2001a67fad 100644 --- a/src/aliceVision/sfmDataIO/viewIO.cpp +++ b/src/aliceVision/sfmDataIO/viewIO.cpp @@ -153,12 +153,15 @@ std::shared_ptr getViewIntrinsic(const sfmData::View& vie { intrinsicType = camera::EINTRINSIC_stringToEnum(cameraModel); } + /* + // Warning: This resize heuristic is disabled as RAW images have a different size in metadata. else if(isResized) { // if the image has been resized, we assume that it has been undistorted // and we use a camera without lens distortion. intrinsicType = camera::PINHOLE_CAMERA; } + */ else if((focalLengthIn35mm > 0.0 && focalLengthIn35mm < 18.0) || (defaultFieldOfView > 100.0)) { // if the focal lens is short, the fisheye model should fit better. From 414ef86d13043e3c84a90c3afe101e1281f8488c Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Mon, 16 Dec 2019 09:44:02 +0100 Subject: [PATCH 164/174] Add option for compositer Type --- .../pipeline/main_panoramaCompositing.cpp | 27 +++++++++++++------ 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 95f019beb1..7d862a8d3c 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -842,10 +842,10 @@ int main(int argc, char **argv) { /** * Description of optional parameters */ - bool multiband = false; + std::string compositerType = "multiband"; po::options_description optionalParams("Optional parameters"); optionalParams.add_options() - ("multiband,m", po::value(&multiband)->required(), "Use multi-band blending."); + ("compositerType,c", po::value(&compositerType)->required(), "Compositer Type [replace, alpha, multiband]."); allParams.add(optionalParams); /** @@ -933,12 +933,17 @@ int main(int argc, char **argv) { } std::unique_ptr compositer; - if (multiband) { + bool isMultiBand = false; + if (compositerType == "multiband") { compositer = std::unique_ptr(new LaplacianCompositer(panoramaSize.first, panoramaSize.second, 8)); + isMultiBand = true; } - else { + else if (compositerType == "alpha") { compositer = std::unique_ptr(new AlphaCompositer(panoramaSize.first, panoramaSize.second)); } + else { + compositer = std::unique_ptr(new Compositer(panoramaSize.first, panoramaSize.second)); + } DistanceSeams distanceseams(panoramaSize.first, panoramaSize.second); @@ -968,12 +973,18 @@ int main(int argc, char **argv) { image::Image weights; image::readImage(weightsPath, weights, image::EImageColorSpace::NO_CONVERSION); + /*Build weight map*/ - image::Image seams(weights.Width(), weights.Height()); - distanceseams.append(seams, mask, weights, cv.offset_x, cv.offset_y); + if (isMultiBand) { + image::Image seams(weights.Width(), weights.Height()); + distanceseams.append(seams, mask, weights, cv.offset_x, cv.offset_y); - /* Composite image into panorama */ - compositer->append(source, mask, seams, cv.offset_x, cv.offset_y); + /* Composite image into panorama */ + compositer->append(source, mask, seams, cv.offset_x, cv.offset_y); + } + else { + compositer->append(source, mask, weights, cv.offset_x, cv.offset_y); + } } /* Build image */ From df44721a75f64a94187045a934b91c8f2a4a16df Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Mon, 16 Dec 2019 10:45:10 +0100 Subject: [PATCH 165/174] compute seams globally and not iteratively --- .../pipeline/main_panoramaCompositing.cpp | 87 +++++++++++++++++-- 1 file changed, 78 insertions(+), 9 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 7d862a8d3c..39b91328cc 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -42,6 +42,30 @@ typedef struct { } ConfigView; +void getMaskFromLabels(aliceVision::image::Image & mask, aliceVision::image::Image & labels, unsigned char index, size_t offset_x, size_t offset_y) { + + for (int i = 0; i < mask.Height(); i++) { + + int di = i + offset_y; + + for (int j = 0; j < mask.Width(); j++) { + + int dj = j + offset_x; + if (dj >= labels.Width()) { + dj = dj - labels.Width(); + } + + + if (labels(di, dj) == index) { + mask(i, j) = 1.0f; + } + else { + mask(i, j) = 0.0f; + } + } + } +} + template bool makeImagePyramidCompatible(image::Image & output, size_t & out_offset_x, size_t & out_offset_y, const image::Image & input, size_t offset_x, size_t offset_y, size_t num_levels) { @@ -577,6 +601,14 @@ class LaplacianPyramid { addition(_levels[l], _levels[l], buf2); removeNegativeValues(_levels[l]); + + for (int i = 0; i < _levels[l].Height(); i++) { + for (int j = 0; j < _levels[l].Width(); j++) { + buf(i, j).r() = std::exp(_levels[l](i, j).r()); + buf(i, j).g() = std::exp(_levels[l](i, j).g()); + buf(i, j).b() = std::exp(_levels[l](i, j).b()); + } + } } /*Write output to RGBA*/ @@ -608,11 +640,13 @@ class LaplacianPyramid { class DistanceSeams { public: DistanceSeams(size_t outputWidth, size_t outputHeight) : - _weights(outputWidth, outputHeight, true, 0.0f) + _weights(outputWidth, outputHeight, true, 0.0f), + _labels(outputWidth, outputHeight, true, 255), + currentIndex(0) { } - virtual bool append(aliceVision::image::Image & output, const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { + virtual bool append(const aliceVision::image::Image & inputMask, const aliceVision::image::Image & inputWeights, size_t offset_x, size_t offset_y) { if (inputMask.size() != inputWeights.size()) { return false; @@ -624,8 +658,6 @@ class DistanceSeams { for (int j = 0; j < inputMask.Width(); j++) { - output(i, j) = 0.0f; - if (!inputMask(i, j)) { continue; } @@ -636,18 +668,25 @@ class DistanceSeams { } if (inputWeights(i, j) > _weights(di, dj)) { - output(i, j) = 1.0f; + _labels(di, dj) = currentIndex; _weights(di, dj) = inputWeights(i, j); } } } + currentIndex++; + return true; } + const image::Image & getLabels() { + return _labels; + } private: image::Image _weights; + image::Image _labels; + unsigned char currentIndex; }; class LaplacianCompositer : public Compositer { @@ -920,7 +959,7 @@ int main(int argc, char **argv) { for (auto & item : configTree.get_child("views")) { ConfigView cv; - if (1/*pos == 24 || pos == 25*/) + /*if (pos == 32 || pos == 33 || pos == 34)*/ { cv.img_path = item.second.get("filename_view"); cv.mask_path = item.second.get("filename_mask"); @@ -935,7 +974,7 @@ int main(int argc, char **argv) { std::unique_ptr compositer; bool isMultiBand = false; if (compositerType == "multiband") { - compositer = std::unique_ptr(new LaplacianCompositer(panoramaSize.first, panoramaSize.second, 8)); + compositer = std::unique_ptr(new LaplacianCompositer(panoramaSize.first, panoramaSize.second, 10)); isMultiBand = true; } else if (compositerType == "alpha") { @@ -945,8 +984,36 @@ int main(int argc, char **argv) { compositer = std::unique_ptr(new Compositer(panoramaSize.first, panoramaSize.second)); } - DistanceSeams distanceseams(panoramaSize.first, panoramaSize.second); + /*Compute seams*/ + std::unique_ptr distanceseams(new DistanceSeams(panoramaSize.first, panoramaSize.second)); + if (isMultiBand) { + for (const ConfigView & cv : configViews) { + + /** + * Load mask + */ + std::string maskPath = cv.mask_path; + ALICEVISION_LOG_INFO("Load mask with path " << maskPath); + image::Image mask; + image::readImage(maskPath, mask, image::EImageColorSpace::NO_CONVERSION); + + /** + * Load Weights + */ + std::string weightsPath = cv.weights_path; + ALICEVISION_LOG_INFO("Load weights with path " << weightsPath); + image::Image weights; + image::readImage(weightsPath, weights, image::EImageColorSpace::NO_CONVERSION); + + distanceseams->append(mask, weights, cv.offset_x, cv.offset_y); + } + } + image::Image labels = distanceseams->getLabels(); + distanceseams.reset(); + distanceseams = nullptr; + /*Do compositing*/ + pos = 0; for (const ConfigView & cv : configViews) { /** @@ -977,7 +1044,7 @@ int main(int argc, char **argv) { /*Build weight map*/ if (isMultiBand) { image::Image seams(weights.Width(), weights.Height()); - distanceseams.append(seams, mask, weights, cv.offset_x, cv.offset_y); + getMaskFromLabels(seams, labels, pos, cv.offset_x, cv.offset_y); /* Composite image into panorama */ compositer->append(source, mask, seams, cv.offset_x, cv.offset_y); @@ -985,6 +1052,8 @@ int main(int argc, char **argv) { else { compositer->append(source, mask, weights, cv.offset_x, cv.offset_y); } + + pos++; } /* Build image */ From b41730d311d563d073343bcdf7ebca0ce303cf83 Mon Sep 17 00:00:00 2001 From: Fabien Servant Date: Mon, 16 Dec 2019 11:03:34 +0100 Subject: [PATCH 166/174] forgot a debug output --- src/software/pipeline/main_panoramaCompositing.cpp | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 39b91328cc..82271b49d0 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -601,14 +601,6 @@ class LaplacianPyramid { addition(_levels[l], _levels[l], buf2); removeNegativeValues(_levels[l]); - - for (int i = 0; i < _levels[l].Height(); i++) { - for (int j = 0; j < _levels[l].Width(); j++) { - buf(i, j).r() = std::exp(_levels[l](i, j).r()); - buf(i, j).g() = std::exp(_levels[l](i, j).g()); - buf(i, j).b() = std::exp(_levels[l](i, j).b()); - } - } } /*Write output to RGBA*/ @@ -974,7 +966,7 @@ int main(int argc, char **argv) { std::unique_ptr compositer; bool isMultiBand = false; if (compositerType == "multiband") { - compositer = std::unique_ptr(new LaplacianCompositer(panoramaSize.first, panoramaSize.second, 10)); + compositer = std::unique_ptr(new LaplacianCompositer(panoramaSize.first, panoramaSize.second, 8)); isMultiBand = true; } else if (compositerType == "alpha") { From 587aacf00ba21294edadf37e909db3fd913ee195 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 16 Dec 2019 19:27:10 +0100 Subject: [PATCH 167/174] remove user file --- .vscode/c_cpp_properties.json | 16 ---------------- 1 file changed, 16 deletions(-) delete mode 100644 .vscode/c_cpp_properties.json diff --git a/.vscode/c_cpp_properties.json b/.vscode/c_cpp_properties.json deleted file mode 100644 index dc16256c97..0000000000 --- a/.vscode/c_cpp_properties.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "configurations": [ - { - "name": "Linux", - "includePath": [ - "${workspaceFolder}/**" - ], - "defines": [], - "compilerPath": "/usr/bin/clang", - "configurationProvider": "vector-of-bool.cmake-tools", - "compileCommands": "${workspaceFolder}/build/compile_commands.json", - - } - ], - "version": 4 -} \ No newline at end of file From 2e7d7ac4ebe8e1532dce0226e82ddb0be8a53a5b Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Mon, 16 Dec 2019 19:37:16 +0100 Subject: [PATCH 168/174] [software] panoramaExternalInfo: update log --- src/software/pipeline/main_panoramaExternalInfo.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/software/pipeline/main_panoramaExternalInfo.cpp b/src/software/pipeline/main_panoramaExternalInfo.cpp index e73e512401..101308ef03 100644 --- a/src/software/pipeline/main_panoramaExternalInfo.cpp +++ b/src/software/pipeline/main_panoramaExternalInfo.cpp @@ -104,10 +104,9 @@ int main(int argc, char * argv[]) { /*Make sure we control everything for debug purpose*/ if (lensType != "rectilinear") { - std::cout << "Lens type is unusual !" << std::endl; + ALICEVISION_CERR("Lens type not supported: " << lensType); return EXIT_FAILURE; } - std::map rotations; @@ -118,7 +117,7 @@ int main(int argc, char * argv[]) { int bracket = it.second.get(".bracket"); if (rotations.find(id) != rotations.end()) { - std::cout << "Multiple information with a same id !" << std::endl; + ALICEVISION_CERR("Multiple xml attributes with a same id: " << id); return EXIT_FAILURE; } @@ -189,4 +188,4 @@ int main(int argc, char * argv[]) { } return 0; -} \ No newline at end of file +} From c0cd659fa3ad244757b8d4609d01694c2b17d9be Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 18 Dec 2019 11:40:28 +0100 Subject: [PATCH 169/174] Change upscale function --- src/software/pipeline/main_panoramaCompositing.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 82271b49d0..183c3a0cad 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -381,10 +381,10 @@ bool upscale(aliceVision::image::Image & outputColor, const aliceVision::imag for (int j = 0; j < width; j++) { int dj = j * 2; - outputColor(di, dj) = inputColor(i, j); + outputColor(di, dj) = T(); outputColor(di, dj + 1) = T(); outputColor(di + 1, dj) = T(); - outputColor(di + 1, dj + 1) = T(); + outputColor(di + 1, dj + 1) = inputColor(i, j); } } From ba8082b83e65ffd7e127ee67bb27e085f99380ed Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 18 Dec 2019 13:40:02 +0100 Subject: [PATCH 170/174] numerical problem for bounding box when we exactly cross the loop --- src/software/pipeline/main_panoramaWarping.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/software/pipeline/main_panoramaWarping.cpp b/src/software/pipeline/main_panoramaWarping.cpp index 1755f791dd..8d290bd00d 100644 --- a/src/software/pipeline/main_panoramaWarping.cpp +++ b/src/software/pipeline/main_panoramaWarping.cpp @@ -822,7 +822,7 @@ class CoordinatesMap { double t = - pt1(0) / direction(0); Vec3 cross = pt1 + direction * t; - if (t >= 0.0 && t < 1.0) { + if (t >= 0.0 && t <= 1.0) { if (cross(2) < 0.0) { return true; } From 420d74fd61d36bbd3729e83e02f370b6395eaaa5 Mon Sep 17 00:00:00 2001 From: fabienservant Date: Wed, 18 Dec 2019 16:28:34 +0100 Subject: [PATCH 171/174] add loop on upscale gaussian --- .../pipeline/main_panoramaCompositing.cpp | 44 ++++++++++++------- 1 file changed, 27 insertions(+), 17 deletions(-) diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 183c3a0cad..1c74048857 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -235,7 +235,7 @@ class AlphaCompositer : public Compositer { }; template -inline void convolveRow(typename image::Image::RowXpr output_row, typename image::Image::ConstRowXpr input_row, const Eigen::Matrix & kernel) { +inline void convolveRow(typename image::Image::RowXpr output_row, typename image::Image::ConstRowXpr input_row, const Eigen::Matrix & kernel, bool loop) { const int radius = 2; @@ -250,12 +250,24 @@ inline void convolveRow(typename image::Image::RowXpr output_row, typename im int col = j + k - radius; /* mirror 5432 | 123456 | 5432 */ - if (col < 0) { - col = - col; + + if (!loop) { + if (col < 0) { + col = - col; + } + + if (col >= input_row.cols()) { + col = input_row.cols() - 1 - (col + 1 - input_row.cols()); + } } + else { + if (col < 0) { + col = input_row.cols() + col; + } - if (col >= input_row.cols()) { - col = input_row.cols() - 1 - (col + 1 - input_row.cols()); + if (col >= input_row.cols()) { + col = col - input_row.cols(); + } } sum += w * input_row(col); @@ -286,7 +298,7 @@ inline void convolveColumns(typename image::Image::RowXpr output_row, const i } template -bool convolveGaussian5x5(image::Image & output, const image::Image & input) { +bool convolveGaussian5x5(image::Image & output, const image::Image & input, bool loop = false) { if (output.size() != input.size()) { return false; @@ -304,11 +316,11 @@ bool convolveGaussian5x5(image::Image & output, const image::Image & input int radius = 2; - convolveRow(buf.row(0), input.row(2), kernel); - convolveRow(buf.row(1), input.row(1), kernel); - convolveRow(buf.row(2), input.row(0), kernel); - convolveRow(buf.row(3), input.row(1), kernel); - convolveRow(buf.row(4), input.row(2), kernel); + convolveRow(buf.row(0), input.row(2), kernel, loop); + convolveRow(buf.row(1), input.row(1), kernel, loop); + convolveRow(buf.row(2), input.row(0), kernel, loop); + convolveRow(buf.row(3), input.row(1), kernel, loop); + convolveRow(buf.row(4), input.row(2), kernel, loop); for (int i = 0; i < output.Height() - 3; i++) { @@ -319,7 +331,7 @@ bool convolveGaussian5x5(image::Image & output, const image::Image & input buf.row(1) = buf.row(2); buf.row(2) = buf.row(3); buf.row(3) = buf.row(4); - convolveRow(buf.row(4), input.row(i + 3), kernel); + convolveRow(buf.row(4), input.row(i + 3), kernel, loop); } /** @@ -333,14 +345,14 @@ bool convolveGaussian5x5(image::Image & output, const image::Image & input buf.row(1) = buf.row(2); buf.row(2) = buf.row(3); buf.row(3) = buf.row(4); - convolveRow(buf.row(4), input.row(output.Height() - 2), kernel); + convolveRow(buf.row(4), input.row(output.Height() - 2), kernel, loop); convolveColumns(output.row(output.Height() - 2), buf, kernel); buf.row(0) = buf.row(1); buf.row(1) = buf.row(2); buf.row(2) = buf.row(3); buf.row(3) = buf.row(4); - convolveRow(buf.row(4), input.row(output.Height() - 3), kernel); + convolveRow(buf.row(4), input.row(output.Height() - 3), kernel, loop); convolveColumns(output.row(output.Height() - 1), buf, kernel); return true; @@ -591,7 +603,7 @@ class LaplacianPyramid { aliceVision::image::Image buf2(_levels[l].Width(), _levels[l].Height()); upscale(buf, _levels[l + 1]); - convolveGaussian5x5(buf2, buf); + convolveGaussian5x5(buf2, buf, true); for (int i = 0; i < buf2.Height(); i++) { for (int j = 0; j < buf2.Width(); j++) { @@ -622,8 +634,6 @@ class LaplacianPyramid { return true; } - - private: std::vector> _levels; std::vector> _weights; From 0698c38132134609d449550be7c38c4f1469873f Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 3 Jan 2020 10:37:25 +0100 Subject: [PATCH 172/174] [software] PanoramaWarping: fix potential out-of-range access to array and minor code simplification --- .../pipeline/main_panoramaWarping.cpp | 24 +++++++------------ 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/src/software/pipeline/main_panoramaWarping.cpp b/src/software/pipeline/main_panoramaWarping.cpp index 8d290bd00d..f4c482ed66 100644 --- a/src/software/pipeline/main_panoramaWarping.cpp +++ b/src/software/pipeline/main_panoramaWarping.cpp @@ -658,8 +658,7 @@ class CoordinatesMap { bbox_bottom = 0; for (int i = 0; i < 8; i++) { - int i2 = i + 1; - if (i2 > 7) i2 = 0; + int i2 = (i + 1) % 8; Vec3 extremaY = getExtremaY(rotated_pts[i], rotated_pts[i2]); @@ -702,17 +701,14 @@ class CoordinatesMap { /*Check if we cross the horizontal loop*/ - bool crossH; + bool crossH = false; for (int i = 0; i < 8; i++) { - int i2 = i + 1; - if (i2 > 7) i2 = 0; - + int i2 = (i + 1) % 8; + bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); - if (i == 0) crossH = cross; - else crossH |= cross; + crossH |= cross; } - if (pole) { /*Easy : if we cross the pole, the width is full*/ bbox_left = 0; @@ -723,7 +719,7 @@ class CoordinatesMap { int first_cross = 0; for (int i = 0; i < 8; i++) { - int i2 = i + 1; + int i2 = (i + 1) % 8; bool cross = crossHorizontalLoop(rotated_pts[i], rotated_pts[i2]); if (cross) { first_cross = i; @@ -736,11 +732,8 @@ class CoordinatesMap { bool is_right = true; for (int index = 0; index < 8; index++) { - int i = index + first_cross; - int i2 = i + 1; - - if (i > 7) i = i - 8; - if (i2 > 7) i2 = i2 - 8; + int i = (index + first_cross) % 8; + int i2 = (i + 1) % 8; Vec2 res_1 = SphericalMapping::toEquirectangular(rotated_pts[i], panoramaSize.first, panoramaSize.second); Vec2 res_2 = SphericalMapping::toEquirectangular(rotated_pts[i2], panoramaSize.first, panoramaSize.second); @@ -771,7 +764,6 @@ class CoordinatesMap { } } - bbox_width = bbox_right + (panoramaSize.first - bbox_left); } else { From 66a2bec4f16182d344974f5aaba288730c8f28c0 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 3 Jan 2020 13:11:32 +0100 Subject: [PATCH 173/174] [image] RGBA constructor: avoid auto alpha --- src/aliceVision/image/image_test.cpp | 2 +- src/aliceVision/image/pixelTypes.hpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/aliceVision/image/image_test.cpp b/src/aliceVision/image/image_test.cpp index 3122ede4fc..971b575529 100644 --- a/src/aliceVision/image/image_test.cpp +++ b/src/aliceVision/image/image_test.cpp @@ -71,7 +71,7 @@ BOOST_AUTO_TEST_CASE(Image_PixelTypes) // RGBColor c(0); // Not accepted because can cause bad pixel affectation value (mixed type...) // The following issue must used : (at your own risk) RGBColor b(static_cast(0)); - RGBAColor d(BLACK); + RGBAColor d(BLACK, 255); } BOOST_AUTO_TEST_CASE(Image_ImageConverter) diff --git a/src/aliceVision/image/pixelTypes.hpp b/src/aliceVision/image/pixelTypes.hpp index 715280867c..ad8c9036e7 100644 --- a/src/aliceVision/image/pixelTypes.hpp +++ b/src/aliceVision/image/pixelTypes.hpp @@ -261,7 +261,7 @@ namespace aliceVision * @brief Copy constructor * @param val Source RGBA value */ - inline Rgba( const RGBColor & val, const T alpha = 1 ) + inline Rgba( const RGBColor & val, const T alpha ) : Base( val.r(), val.g(), val.b(), alpha ) { } From 790af0079cd6e933b4cc1727bc3e3238701107a9 Mon Sep 17 00:00:00 2001 From: Fabien Castan Date: Fri, 3 Jan 2020 13:13:19 +0100 Subject: [PATCH 174/174] minor code clean --- .../sfm/ResidualErrorConstraintFunctor.hpp | 2 - .../sfm/ResidualErrorRotationPriorFunctor.hpp | 3 +- .../ReconstructionEngine_panorama.cpp | 13 +- .../pipeline/panorama/panoramaSfM_test.cpp | 130 +++++++++--------- .../pipeline/main_panoramaCompositing.cpp | 10 +- .../pipeline/main_panoramaExternalInfo.cpp | 2 +- .../pipeline/main_panoramaWarping.cpp | 7 - 7 files changed, 75 insertions(+), 92 deletions(-) diff --git a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp index e0b080e8cc..1aa4a0ee6e 100644 --- a/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp +++ b/src/aliceVision/sfm/ResidualErrorConstraintFunctor.hpp @@ -123,8 +123,6 @@ struct ResidualErrorConstraintFunctor_Pinhole * - 3 => the camera extrinsic data block for the second view * */ - - struct ResidualErrorConstraintFunctor_PinholeRadialK1 { ResidualErrorConstraintFunctor_PinholeRadialK1(const Vec3 & pos_2dpoint_first, const Vec3 & pos_2dpoint_second) diff --git a/src/aliceVision/sfm/ResidualErrorRotationPriorFunctor.hpp b/src/aliceVision/sfm/ResidualErrorRotationPriorFunctor.hpp index f54c613bca..d57aef4440 100644 --- a/src/aliceVision/sfm/ResidualErrorRotationPriorFunctor.hpp +++ b/src/aliceVision/sfm/ResidualErrorRotationPriorFunctor.hpp @@ -18,12 +18,11 @@ namespace sfm { */ struct ResidualErrorRotationPriorFunctor { - ResidualErrorRotationPriorFunctor(const Eigen::Matrix3d & two_R_one) + explicit ResidualErrorRotationPriorFunctor(const Eigen::Matrix3d & two_R_one) : m_two_R_one(two_R_one) { } - /** * @param[in] cam_Rt: Camera parameterized using one block of 6 parameters [R;t]: * - 3 for rotation(angle axis), 3 for translation diff --git a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp index ac25def1bc..3a4d195ae9 100644 --- a/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/ReconstructionEngine_panorama.cpp @@ -353,7 +353,6 @@ bool ReconstructionEngine_panorama::Compute_Global_Rotations(const rotationAvera If a view with a pose prior is not found in the relative rotation, make sure we add a fake link to adjust globally everything. */ - size_t count = 0; sfmData::Poses & poses = _sfmData.getPoses(); if (poses.size() > 0) { @@ -377,8 +376,6 @@ bool ReconstructionEngine_panorama::Compute_Global_Rotations(const rotationAvera } } - - // Global Rotation solver: const ERelativeRotationInferenceMethod eRelativeRotationInferenceMethod = TRIPLET_ROTATION_INFERENCE_NONE; // TRIPLET_ROTATION_INFERENCE_COMPOSITION_ERROR; @@ -681,7 +678,8 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging } } // for all relative pose - /*Find best connection with pose prior*/ + /* + // Find best connection with pose prior size_t max_val = 0; IndexT max_index = UndefinedIndexT; for (auto & item : connection_size) { @@ -693,15 +691,16 @@ void ReconstructionEngine_panorama::Compute_Relative_Rotations(rotationAveraging } } - /*If a best view is defined, lock it*/ - /*sfmData::Poses & poses = _sfmData.getPoses(); + // If a best view is defined, lock it + sfmData::Poses & poses = _sfmData.getPoses(); if (max_index != UndefinedIndexT) { sfmData::View & v = _sfmData.getView(max_index); IndexT poseid = v.getPoseId(); if (poseid != UndefinedIndexT) { poses[v.getPoseId()].lock(); } - }*/ + } + */ /*Debug result*/ ALICEVISION_LOG_DEBUG("Compute_Relative_Rotations: vec_relatives_R.size(): " << vec_relatives_R.size()); diff --git a/src/aliceVision/sfm/pipeline/panorama/panoramaSfM_test.cpp b/src/aliceVision/sfm/pipeline/panorama/panoramaSfM_test.cpp index c6a5b98f82..71363b6213 100644 --- a/src/aliceVision/sfm/pipeline/panorama/panoramaSfM_test.cpp +++ b/src/aliceVision/sfm/pipeline/panorama/panoramaSfM_test.cpp @@ -32,90 +32,90 @@ using namespace aliceVision::sfmData; BOOST_AUTO_TEST_CASE(PANORAMA_SFM) { - // rotation between the two views - const Mat3 rotation = aliceVision::rotationXYZ(0.01, -0.001, -0.2); - ALICEVISION_LOG_INFO("Ground truth rotation:\n" << rotation); - - // some random 3D points on the unit sphere - const Mat3X pts3d = Mat3X::Random(3, 20).colwise().normalized(); - ALICEVISION_LOG_INFO("points 3d:\n" << pts3d); - - // calibration 1 - Mat3 K1; - K1 << 1200, 0, 960, - 0, 1200, 540, - 0, 0, 1; - ALICEVISION_LOG_INFO("K1:\n" << K1); - - // calibration 2 - Mat3 K2; - K2 << 1100, 0, 960, - 0, 1100, 540, - 0, 0, 1; - ALICEVISION_LOG_INFO("K2:\n" << K2); - - // 2d points on each side as projection of the 3D points - const Mat2X pts1 = (K1 * pts3d).colwise().hnormalized(); - // ALICEVISION_LOG_INFO("pts1:\n" << pts1); - const Mat2X pts2 = (K2 * rotation * pts3d).colwise().hnormalized(); - // ALICEVISION_LOG_INFO("pts2:\n" << pts2); - - // test the uncalibrated version, just pass the 2d points and compute R - - const double epsilon = 1e-4; - - // Relative Rotation from H - { - RelativeRotationInfo rotationInfo{}; + // rotation between the two views + const Mat3 rotation = aliceVision::rotationXYZ(0.01, -0.001, -0.2); + ALICEVISION_LOG_INFO("Ground truth rotation:\n" << rotation); + + // some random 3D points on the unit sphere + const Mat3X pts3d = Mat3X::Random(3, 20).colwise().normalized(); + ALICEVISION_LOG_INFO("points 3d:\n" << pts3d); + + // calibration 1 + Mat3 K1; + K1 << 1200, 0, 960, + 0, 1200, 540, + 0, 0, 1; + ALICEVISION_LOG_INFO("K1:\n" << K1); + + // calibration 2 + Mat3 K2; + K2 << 1100, 0, 960, + 0, 1100, 540, + 0, 0, 1; + ALICEVISION_LOG_INFO("K2:\n" << K2); + + // 2d points on each side as projection of the 3D points + const Mat2X pts1 = (K1 * pts3d).colwise().hnormalized(); + // ALICEVISION_LOG_INFO("pts1:\n" << pts1); + const Mat2X pts2 = (K2 * rotation * pts3d).colwise().hnormalized(); + // ALICEVISION_LOG_INFO("pts2:\n" << pts2); + + // test the uncalibrated version, just pass the 2d points and compute R + + const double epsilon = 1e-4; + + // Relative Rotation from H + { + RelativeRotationInfo rotationInfo{}; ALICEVISION_LOG_INFO("\n\n###########################################\nUncalibrated from H"); - robustRelativeRotation_fromH(K1, K2, pts1, pts2, - std::make_pair(1920, 1080), - std::make_pair(1920, 1080), - rotationInfo); - ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo._relativeRotation); + robustRelativeRotation_fromH(K1, K2, pts1, pts2, + std::make_pair(1920, 1080), + std::make_pair(1920, 1080), + rotationInfo); + ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo._relativeRotation); - EXPECT_MATRIX_NEAR(rotation, rotationInfo._relativeRotation, epsilon); + EXPECT_MATRIX_NEAR(rotation, rotationInfo._relativeRotation, epsilon); - // test the calibrated version, compute normalized points for each view (by multiplying by the inverse of K) and estimate H and R + // test the calibrated version, compute normalized points for each view (by multiplying by the inverse of K) and estimate H and R ALICEVISION_LOG_INFO("\n\n###########################################\nCalibrated from H"); - robustRelativeRotation_fromH(Mat3::Identity(), Mat3::Identity(), - (K1.inverse()*pts1.colwise().homogeneous()).colwise().hnormalized(), - (K2.inverse()*pts2.colwise().homogeneous()).colwise().hnormalized(), - std::make_pair(1920, 1080), - std::make_pair(1920, 1080), - rotationInfo); - ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo._relativeRotation); + robustRelativeRotation_fromH(Mat3::Identity(), Mat3::Identity(), + (K1.inverse()*pts1.colwise().homogeneous()).colwise().hnormalized(), + (K2.inverse()*pts2.colwise().homogeneous()).colwise().hnormalized(), + std::make_pair(1920, 1080), + std::make_pair(1920, 1080), + rotationInfo); + ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo._relativeRotation); - EXPECT_MATRIX_NEAR(rotation, rotationInfo._relativeRotation, epsilon); - } + EXPECT_MATRIX_NEAR(rotation, rotationInfo._relativeRotation, epsilon); + } /* - // Relative Rotation from E - { + // Relative Rotation from E + { RelativePoseInfo rotationInfo{}; ALICEVISION_LOG_INFO("\n\n###########################################\nUncalibrated from E"); - robustRelativeRotation_fromE(K1, K2, pts1, pts2, - std::make_pair(1920, 1080), - std::make_pair(1920, 1080), - rotationInfo); + robustRelativeRotation_fromE(K1, K2, pts1, pts2, + std::make_pair(1920, 1080), + std::make_pair(1920, 1080), + rotationInfo); ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo.relativePose.rotation()); EXPECT_MATRIX_NEAR(rotation, rotationInfo.relativePose.rotation(), epsilon); - // test the calibrated version, compute normalized points for each view (by multiplying by the inverse of K) and estimate H and R + // test the calibrated version, compute normalized points for each view (by multiplying by the inverse of K) and estimate H and R ALICEVISION_LOG_INFO("\n\n###########################################\nCalibrated from E"); - robustRelativeRotation_fromE(Mat3::Identity(), Mat3::Identity(), - (K1.inverse()*pts1.colwise().homogeneous()).colwise().hnormalized(), - (K2.inverse()*pts2.colwise().homogeneous()).colwise().hnormalized(), - std::make_pair(1920, 1080), - std::make_pair(1920, 1080), - rotationInfo); + robustRelativeRotation_fromE(Mat3::Identity(), Mat3::Identity(), + (K1.inverse()*pts1.colwise().homogeneous()).colwise().hnormalized(), + (K2.inverse()*pts2.colwise().homogeneous()).colwise().hnormalized(), + std::make_pair(1920, 1080), + std::make_pair(1920, 1080), + rotationInfo); ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo.relativePose.rotation()); EXPECT_MATRIX_NEAR(rotation, rotationInfo.relativePose.rotation(), epsilon); - } + } */ } diff --git a/src/software/pipeline/main_panoramaCompositing.cpp b/src/software/pipeline/main_panoramaCompositing.cpp index 1c74048857..05cea4508b 100644 --- a/src/software/pipeline/main_panoramaCompositing.cpp +++ b/src/software/pipeline/main_panoramaCompositing.cpp @@ -362,11 +362,8 @@ bool convolveGaussian5x5(image::Image & output, const image::Image & input template bool downscale(aliceVision::image::Image & outputColor, const aliceVision::image::Image & inputColor) { - size_t width = inputColor.Width(); - size_t height = inputColor.Height(); - - size_t output_width = width / 2; - size_t output_height = height / 2; + size_t output_width = inputColor.Width() / 2; + size_t output_height = inputColor.Height() / 2; for (int i = 0; i < output_height; i++) { for (int j = 0; j < output_width; j++) { @@ -383,9 +380,6 @@ bool upscale(aliceVision::image::Image & outputColor, const aliceVision::imag size_t width = inputColor.Width(); size_t height = inputColor.Height(); - size_t output_width = width * 2; - size_t output_height = height * 2; - for (int i = 0; i < height; i++) { int di = i * 2; diff --git a/src/software/pipeline/main_panoramaExternalInfo.cpp b/src/software/pipeline/main_panoramaExternalInfo.cpp index 101308ef03..ee87cad4e5 100644 --- a/src/software/pipeline/main_panoramaExternalInfo.cpp +++ b/src/software/pipeline/main_panoramaExternalInfo.cpp @@ -100,7 +100,7 @@ int main(int argc, char * argv[]) { pt::ptree shoot = tree.get_child("papywizard.shoot"); std::string lensType = lens.get(".type"); - double lensFocal = lens.get("focal"); + // double lensFocal = lens.get("focal"); /*Make sure we control everything for debug purpose*/ if (lensType != "rectilinear") { diff --git a/src/software/pipeline/main_panoramaWarping.cpp b/src/software/pipeline/main_panoramaWarping.cpp index f4c482ed66..b858182dcd 100644 --- a/src/software/pipeline/main_panoramaWarping.cpp +++ b/src/software/pipeline/main_panoramaWarping.cpp @@ -33,10 +33,6 @@ using namespace aliceVision; namespace po = boost::program_options; namespace bpt = boost::property_tree; -float sigmoid(float x, float sigwidth, float sigMid) -{ - return 1.0f / (1.0f + expf(10.0f * ((x - sigMid) / sigwidth))); -} Eigen::VectorXf gaussian_kernel_vector(size_t kernel_length, float sigma) { @@ -1014,9 +1010,6 @@ class GaussianWarper : public Warper { double dyy = coord_mp(1) - coord_mm(1); double det = std::abs(dxx*dyy - dxy*dyx); double scale = sqrt(det); - - - double flevel = std::max(0.0, log2(scale)); size_t blevel = std::min(max_level, size_t(floor(flevel)));