diff --git a/apps/specular_estimation/CMakeFiles/specular_estimation.dir/src/PhotometricStereo.cc.o b/apps/specular_estimation/CMakeFiles/specular_estimation.dir/src/PhotometricStereo.cc.o index 3c19beeffb37055c5e792956268721b62a69751b..225b08de91ea9c80d27684c24a9f8f2f84aa0862 100644 Binary files a/apps/specular_estimation/CMakeFiles/specular_estimation.dir/src/PhotometricStereo.cc.o and b/apps/specular_estimation/CMakeFiles/specular_estimation.dir/src/PhotometricStereo.cc.o differ diff --git a/apps/specular_estimation/CMakeFiles/specular_estimation.dir/src/specular_estimation.cc.o b/apps/specular_estimation/CMakeFiles/specular_estimation.dir/src/specular_estimation.cc.o index ccb5e78a097fbadb9e441140b89863d2668db26d..8af3decdfcf0ad4177b41bb2147107c08d6bd064 100644 Binary files a/apps/specular_estimation/CMakeFiles/specular_estimation.dir/src/specular_estimation.cc.o and b/apps/specular_estimation/CMakeFiles/specular_estimation.dir/src/specular_estimation.cc.o differ diff --git a/apps/specular_estimation/src/Charuco.h b/apps/specular_estimation/src/Charuco.h index 7e62da306f4c874a3421912cbaa079ba73c8b218..ff37538b7513cdc2b216c1c875ded8a94e8b9f16 100644 --- a/apps/specular_estimation/src/Charuco.h +++ b/apps/specular_estimation/src/Charuco.h @@ -19,17 +19,17 @@ #include <opencv2/aruco/charuco.hpp> #include <opencv2/calib3d.hpp> -void charucoCalibration(int width, int height, cv::Ptr<cv::aruco::CharucoBoard> & charucoBoard, cv::Mat & charucoBoardImage, cv::Mat & cameraMatrix, cv::Mat & distortionCoefficients, const std::string sourcePath, const std::string folderPath); -void charucoAlignment(std::vector< cv::Mat > & charucoImages, std::vector< cv::Mat > textureImages, int numberOfLights, int width, int height, cv::Mat& rotationMatrix, cv::Mat& rotationVector, cv::Mat& perspectiveTransform, cv::Mat lightDirections, cv::Mat& lightDirectionsPerspective, std::vector< cv::Vec3d > & rvecs, std::vector< cv::Vec3d > & tvecs, cv::Ptr<cv::aruco::CharucoBoard> charucoBoard, cv::Mat charucoBoardImage, cv::Mat cameraMatrix, cv::Mat distortionCoefficients); +void charucoCalibration(int width, int height, cv::Ptr<cv::aruco::CharucoBoard>& charucoBoard, cv::Mat& charucoBoardImage, cv::Mat& cameraMatrix, cv::Mat& distortionCoefficients, const std::string sourcePath, const std::string folderPath); +void charucoAlignment(std::vector< cv::Mat > & charucoImages, std::vector< cv::Mat > textureImages, int numberOfLights, int width, int height, cv::Mat& rotationMatrix, cv::Mat& rotationVector, cv::Mat& perspectiveTransform, cv::Mat lightDirections, cv::Mat& lightDirectionsPerspective, std::vector<cv::Vec3d>& rvecs, std::vector<cv::Vec3d>& tvecs, cv::Ptr<cv::aruco::CharucoBoard> charucoBoard, cv::Mat charucoBoardImage, cv::Mat cameraMatrix, cv::Mat distortionCoefficients); cv::Mat createArucoMarker(int dictionaryId, int markerId, int borderBits, int markerSize, bool showMarker); void detectArucoMarkers(int dictionaryId, bool showRejected, bool estimatePose, float markerLength, cv::Mat image, cv::Mat cameraMatrix, cv::Mat distCoeffs); void createArucoDictionary(std::vector< cv::Mat >& arucoMarkers, int dictionaryId); cv::Mat drawCharuco(cv::Ptr<cv::aruco::CharucoBoard> board, int rows, int cols, bool saveCharuco, bool displayCharuco); -void calibrateCamera(int squaresX, int squaresY, float squareLength, float markerLength, int dictionaryId, std::vector< cv::Mat > charucoImages, cv::Mat& cameraMatrix, cv::Mat& distCoeffs); +void calibrateCamera(int squaresX, int squaresY, float squareLength, float markerLength, int dictionaryId, std::vector<cv::Mat> charucoImages, cv::Mat& cameraMatrix, cv::Mat& distCoeffs); void loadCharucoImages(std::vector< cv::Mat >& charucoImages, std::string charucoPath, std::string charucoName); void undistortCharuco(cv::Ptr<cv::aruco::CharucoBoard> board, cv::Mat inputImage, cv::Mat boardImage, cv::Mat& undistortedCharuco, cv::Mat cameraMatrix, cv::Mat distortionCoefficients, cv::Vec3d& rvec, cv::Vec3d& tvec, cv::Mat& perspectiveTransform, bool displayUndistortedCharuco); -void charucoCalibration(int width, int height, cv::Ptr<cv::aruco::CharucoBoard> & charucoBoard, cv::Mat & charucoBoardImage, cv::Mat & cameraMatrix, cv::Mat & distortionCoefficients, const std::string imagesPath, const std::string folderPath) { +void charucoCalibration(int width, int height, cv::Ptr<cv::aruco::CharucoBoard>& charucoBoard, cv::Mat& charucoBoardImage, cv::Mat& cameraMatrix, cv::Mat& distortionCoefficients, const std::string imagesPath, const std::string folderPath) { std::vector< cv::Mat > charucoImages; const std::string charucoPath = imagesPath + folderPath + "/charuco/"; diff --git a/apps/specular_estimation/src/OpenCV.h b/apps/specular_estimation/src/OpenCV.h index 1f8837de0d9f3e4e8bdbd1c73e860132843bc886..c67582c885f923b63275e640fd21a9a88a84b739 100644 --- a/apps/specular_estimation/src/OpenCV.h +++ b/apps/specular_estimation/src/OpenCV.h @@ -75,7 +75,7 @@ void photometricStereo(std::string imageName, std::string calibration, std::stri //cv::waitKey(0); } -void loadImages(std::string imageName, std::string calibration, int& width, int& height, int imageScale, std::vector< cv::Mat > & textureImages, std::string calibrationPath, std::vector< cv::Mat > & calibrationImages, std::string modelPath, std::vector< cv::Mat > & modelImages, cv::Rect & calibrationBoundingBox) { +void loadImages(std::string imageName, std::string calibration, int& width, int& height, int imageScale, std::vector<cv::Mat>& textureImages, std::string calibrationPath, std::vector<cv::Mat>& calibrationImages, std::string modelPath, std::vector< cv::Mat > & modelImages, cv::Rect & calibrationBoundingBox) { // Obtain the height and width of the images prior to resizing int originalHeight; @@ -204,7 +204,7 @@ void loadSurfaceNormals(cv::Mat lightDirections, int width, int height, int imag // Return an image comprised of the median intensities // From https://stackoverflow.com/questions/2114797/compute-median-of-values-stored-in-vector-c -cv::Mat getMedianImage(std::vector< cv::Mat > modelImages, int height, int width) { +cv::Mat getMedianImage(std::vector<cv::Mat> modelImages, int height, int width) { cv::Mat medianImage(height, width, CV_8UC3); int percentage = 0; diff --git a/apps/specular_estimation/src/PhotometricStereo.cc b/apps/specular_estimation/src/PhotometricStereo.cc index d02f19e4c1442e428e2a4d2b2ffc0548b64c1629..5d3c8eb052e8b632784cb71f7666ae421602643d 100755 --- a/apps/specular_estimation/src/PhotometricStereo.cc +++ b/apps/specular_estimation/src/PhotometricStereo.cc @@ -107,37 +107,6 @@ phoSte::light getLightDirection(const phoSte::circle& metalCircle, return phoSte::light(lx, ly, lz); } -cv::Vec3f getLightDirectionFromSphere(cv::Mat Image, cv::Rect boundingBox) { - - const int THRESH = 254; - const float radius = boundingBox.width / 2.0f; - - cv::Mat Binary; - threshold(Image, Binary, THRESH, 255, CV_THRESH_BINARY); - cv::Mat SubImage(Binary, boundingBox); - - // calculate center of pixels - cv::Moments m = moments(SubImage, false); - cv::Point center(int(m.m10 / m.m00), int(m.m01 / m.m00)); - - // x,y are swapped here // TODO check if necessary - float x = (center.y - radius) / radius; - float y = (center.x - radius) / radius; - float z = sqrt(1.0f - pow(x, 2.0f) - pow(y, 2.0f)); - - return cv::Vec3f(x, y, z); -} - -phoSte::light getLightDirection(cv::Mat calibrationImage, cv::Rect calibrationBoundingBox) { - - cv::Vec3f light = getLightDirectionFromSphere(calibrationImage, calibrationBoundingBox); - double lx = light[0]; - double ly = light[1]; - double lz = light[2]; - - return phoSte::light(lx, ly, lz); -} - void swapNum(double& a, double& b) { double tmp = a; a = b; @@ -250,7 +219,7 @@ photometryStero::photometryStero(int imageNumber, std::string modelPath, std::st std::string indexString = stm.str(); std::string calibrationNameStr = calibrationPath + indexString + ".png"; - cv::Mat calibrationImage = cv::imread(calibrationPath + indexString + ".png", cv::IMREAD_COLOR); + cv::Mat calibrationImage = cv::imread(calibrationNameStr, cv::IMREAD_COLOR); if (!calibrationImage.data) { // Check if any images failed to load std::cout << imageNumber << " calibration images have been loaded." << std::endl; @@ -340,34 +309,25 @@ bool photometryStero::readImage(std::string modelPath, std::string calibrationPa // read the image for(int i = 0; i < mImageNames.size(); i++) { - std::ostringstream stm; - stm << i; - std::string indexString = stm.str(); - - cv::Mat modelImage = cv::imread(modelPath + indexString + ".png", cv::IMREAD_COLOR); + cv::Mat modelImage = cv::imread(mImageNames[i], cv::IMREAD_GRAYSCALE); + modelImage.convertTo(modelImage, CV_32F); mp2Images.push_back(modelImage); } // read the calibration image for(int i = 0; i < mCalibrationNames.size(); i++) { - std::ostringstream stm; - stm << i; - std::string indexString = stm.str(); - - cv::Mat calibrationImage = cv::imread(calibrationPath + indexString + ".png", cv::IMREAD_COLOR); + cv::Mat calibrationImage = cv::imread(mCalibrationNames[i], cv::IMREAD_GRAYSCALE); + calibrationImage.convertTo(calibrationImage, CV_32F); mp2CalibrationImages.push_back(calibrationImage); } // read the macbeth image for(int i = 0; i < mMacbethNames.size(); i++) { - std::ostringstream stm; - stm << i; - std::string indexString = stm.str(); - - cv::Mat macbethImage = cv::imread(macbethPath + indexString + ".png", cv::IMREAD_COLOR); + cv::Mat macbethImage = cv::imread(mMacbethNames[i], cv::IMREAD_GRAYSCALE); + macbethImage.convertTo(macbethImage, CV_32F); mp2MacbethImages.push_back(macbethImage); } // read the maskImage for(int i = 0; i < mMaskNames.size(); i++) { - cv::Mat p2Mask = cv::imread(mMaskNames[0], cv::IMREAD_GRAYSCALE); + cv::Mat p2Mask = cv::imread(mMaskNames[i], cv::IMREAD_GRAYSCALE); if(! p2Mask.data ) return false; mp2Mask.push_back(p2Mask); @@ -436,6 +396,22 @@ void photometryStero::getLightInformation(const int metalIndex, cv::Rect boundin m_metalSphere = getCircle(metalContour[mostPointIdx]);*/ m_metalSphere = phoSte::circle(boundingBox.x + (boundingBox.width/2), boundingBox.y + (boundingBox.height/2), (boundingBox.width/2)); + m_light.push_back(phoSte::light(0.447712, 0.138562, 0.883377)); + m_light.push_back(phoSte::light(0.228758, -0.106536, 0.967636)); + m_light.push_back(phoSte::light(0.1, 0.0705882, 0.99248)); + m_light.push_back(phoSte::light(0.000653595, -0.0718954, 0.997412)); + m_light.push_back(phoSte::light(-0.139216, -0.12549, 0.982279)); + m_light.push_back(phoSte::light(-0.494771, 0.115033, 0.861376)); + + /* + Light 1 position: x = 0.138562, y = 0.447712, z = 0.883377 + Light 2 position: x = -0.106536, y = 0.228758, z = 0.967636 + Light 3 position: x = 0.0705882, y = 0.1, z = 0.99248 + Light 4 position: x = -0.0718954, y = 0.000653595, z = 0.997412 + Light 5 position: x = -0.12549, y = -0.139216, z = 0.982279 + Light 6 position: x = 0.115033, y = -0.494771, z = 0.861376 + */ + for(int i = 0; i < mp2CalibrationImages.size(); i++) { //cv::Point metalMaxPoint; @@ -443,7 +419,7 @@ void photometryStero::getLightInformation(const int metalIndex, cv::Rect boundin //cv::minMaxLoc(mp2CalibrationImages[i], NULL, NULL, NULL, &metalMaxPoint); //m_light.push_back(getLightDirection(m_metalSphere, metalMaxPoint)); - + /* const int THRESH = 254; const float radius = boundingBox.width / 2.0f; @@ -455,26 +431,28 @@ void photometryStero::getLightInformation(const int metalIndex, cv::Rect boundin // calculate center of pixels cv::Moments m = moments(SubImage, false); cv::Point center(int(m.m10 / m.m00), int(m.m01 / m.m00)); - /* + // x,y are swapped here // TODO check if necessary float x = (center.y - radius) / radius; float y = (center.x - radius) / radius; float z = sqrt(1.0f - pow(x, 2.0f) - pow(y, 2.0f)); - m_light.push_back(phoSte::light(x, y, z)); - */ - m_light.push_back(getLightDirection(m_metalSphere, center)); + m_light.push_back(phoSte::light(x, y, z));*/ + + //m_light.push_back(getLightDirection(m_metalSphere, center)); - //double lightIntensity; + double lightIntensity; //detectMacbeth(mMacbethNames[0]); - //cv::minMaxLoc(mp2CalibrationImages[i], NULL, &lightIntensity, NULL, NULL, mp2Mask[lambIndex]); - m_light[i].mIntensity = macbethIntensity[i]; + cv::minMaxLoc(mp2MacbethImages[i], NULL, &lightIntensity, NULL, NULL); + m_light[i].mIntensity = lightIntensity; + //m_light[i].mIntensity = macbethIntensity[i]; + //std::cout << "Light intensity: " << m_light[i].mIntensity << std::endl; } } // calculate the norm and albedo for every pixel // here, threshold is for the dark discard -void photometryStero::getPixelNormAndAlbedo(const int objectIndex) { +void photometryStero::getPixelNormAndAlbedo(const int objectIndex, cv::Mat lightDirections) { // calculate the pixel num //std::vector<double> pixelThreshold; for(int i = 0; i < mp2Mask[objectIndex].rows; i++) { @@ -519,7 +497,7 @@ void photometryStero::getPixelNormAndAlbedo(const int objectIndex) { cv::Mat I(imageNum, objectPixelNum, CV_64F); cv::Mat L(imageNum, 3, CV_64F); - + /* // Determine the light direction from each image of the chrome sphere for (int i = 0; i < imageNum; i++) { // Write the coordinates of the light directions into the lightDirections Mat @@ -534,6 +512,118 @@ void photometryStero::getPixelNormAndAlbedo(const int objectIndex) { std::cout << "Inverted\n"; + */ + double *pL = (double *)L.data; + for(int i = 0; i < imageNum; i++) { + *pL = m_light[i].mx; + *(pL + 1) = m_light[i].my; + *(pL + 2) = m_light[i].mz; + pL += 3; + } + cv::Mat LPseudoInvert; + cv::invert(L, LPseudoInvert, cv::DECOMP_SVD); //std::cout << "2.1\n"; + //lightDirections.convertTo(LPseudoInvert, CV_64F); + //LPseudoInvert = lightDirections; + double *pI = (double *)I.data; + double *pN = (double *)mN.data; + + + for(int i = 0; i < objectPixelNum; i++) { + int inValidNum = 0; + cv::Mat specificL; + for(int j = 0; j < imageNum; j++) { + if(allPixelValue[j][i] < allThreshold[j]) { + inValidNum ++; + if(inValidNum == 1) { + L.copyTo(specificL); + } + double *p2LRow = specificL.ptr<double>(j); + *(p2LRow) = 0; + *(p2LRow + 1) = 0; + *(p2LRow + 2) = 0; + *(pI + j * objectPixelNum + i) = 0; + } else { + *(pI + j * objectPixelNum + i) = allPixelValue[j][i] / m_light[j].mIntensity; + } + if(imageNum - inValidNum < 3) { + break; + } + } + //std::cout << "3\n"; + if(inValidNum == 0) { + //std::cout << "4.0 inValidNum == 0\n"; + mN.col(i) = LPseudoInvert * I.col(i); //std::cout << "4.1\n"; + double nx = mN.col(i).at<double>(0, 0); //std::cout << "4.2\n"; + double ny = mN.col(i).at<double>(1, 0); //std::cout << "4.3\n"; + double nz = mN.col(i).at<double>(2, 0); //std::cout << "4.4\n"; + mAlbedo.at<double>(0, i) = sqrt(nx * nx + ny * ny + nz * nz); //std::cout << "4.5\n"; + } else if(imageNum - inValidNum >= 3) { + //std::cout << "5.0 imageNum - inValidNum >= 3\n"; + cv::Mat specificLPseudoInvert; //std::cout << "5.1\n"; + cv::invert(specificL, specificLPseudoInvert, cv::DECOMP_SVD); //std::cout << "5.2\n"; + mN.col(i) = specificLPseudoInvert * I.col(i); //std::cout << "5.2\n"; + double nx = mN.col(i).at<double>(0, 0); //std::cout << "5.3\n"; + double ny = mN.col(i).at<double>(1, 0); //std::cout << "5.4\n"; + double nz = mN.col(i).at<double>(2, 0); //std::cout << "5.5\n"; + mAlbedo.at<double>(0, i) = sqrt(nx * nx + ny * ny + nz * nz); //std::cout << "5.6\n"; + } else { + //std::cout << "6.0 else\n"; + mN.at<double>(0, i) = 0; //std::cout << "6.1\n"; + mN.at<double>(1, i) = 0; //std::cout << "6.2\n"; + mN.at<double>(2, i) = 0; //std::cout << "6.3\n"; + mAlbedo.at<double>(0, i) = 0; //std::cout << "6.4\n"; + mInvalidIndex.push_back(i); //std::cout << "6.5\n"; + } + } +} + +// calculate the norm and albedo for every pixel +// here, threshold is for the dark discard +void photometryStero::getPixelNormAndAlbedo(const int objectIndex) { + // calculate the pixel num + //std::vector<double> pixelThreshold; + for(int i = 0; i < mp2Mask[objectIndex].rows; i++) { + for(int j = 0; j < mp2Mask[objectIndex].cols; j++) { + if(mp2Mask[objectIndex].at<unsigned char>(i, j) >= 255) { + mObjectX.push_back(j); + mObjectY.push_back(i); + } + } + } + + //std::cout << "Calculated the pixel coordinates within the mask\n"; + + std::vector<std::vector<double>> allPixelValue; + allPixelValue.reserve(imageNum); + int objectPixelNum = mObjectX.size(); + for(int i = 0; i < imageNum; i++) { + std::vector<double> pixelValue; + allPixelValue.push_back(pixelValue); + allPixelValue[i].reserve(objectPixelNum); + for(int j = 0; j < objectPixelNum; j++) { + allPixelValue[i].push_back(mp2Images[i].at<float>(mObjectY[j], mObjectX[j])); + } + } + + //std::cout << "Store all pixel values in a vector\n"; + + // threshold for every image; + std::vector<double> allThreshold; + allThreshold.reserve(imageNum); + int thresholdIndex = mDiscardRatio * objectPixelNum; + for(int i = 0; i < imageNum; i++) { + std::vector<double> tmpPixelValue = allPixelValue[i]; + allThreshold.push_back( + quickSelect(tmpPixelValue, 0, objectPixelNum - 1, thresholdIndex)); + } + + //std::cout << "Threshold for every image\n"; + + mN.create(3, objectPixelNum, CV_64F); + mAlbedo.create(1, objectPixelNum, CV_64F); + cv::Mat I(imageNum, objectPixelNum, CV_64F); + cv::Mat L(imageNum, 3, CV_64F); + double *pL = (double *)L.data; for(int i = 0; i < imageNum; i++) { *pL = m_light[i].mx; @@ -542,10 +632,10 @@ void photometryStero::getPixelNormAndAlbedo(const int objectIndex) { pL += 3; } - cv::Mat LPseudoInvert; std::cout << "2\n"; - cv::invert(L, LPseudoInvert, cv::DECOMP_SVD); std::cout << "2.1\n"; - double *pI = (double *)I.data; std::cout << "2.2\n"; - double *pN = (double *)mN.data; std::cout << "2.3\n"; + cv::Mat LPseudoInvert; + cv::invert(L, LPseudoInvert, cv::DECOMP_SVD); + double *pI = (double *)I.data; + double *pN = (double *)mN.data; for(int i = 0; i < objectPixelNum; i++) { @@ -569,14 +659,12 @@ void photometryStero::getPixelNormAndAlbedo(const int objectIndex) { break; } } - std::cout << "3\n"; if(inValidNum == 0) { mN.col(i) = LPseudoInvert * I.col(i); double nx = mN.col(i).at<double>(0, 0); double ny = mN.col(i).at<double>(1, 0); double nz = mN.col(i).at<double>(2, 0); mAlbedo.at<double>(0, i) = sqrt(nx * nx + ny * ny + nz * nz); - std::cout << "4\n"; } else if(imageNum - inValidNum >= 3) { cv::Mat specificLPseudoInvert; cv::invert(specificL, specificLPseudoInvert, cv::DECOMP_SVD); @@ -585,14 +673,12 @@ void photometryStero::getPixelNormAndAlbedo(const int objectIndex) { double ny = mN.col(i).at<double>(1, 0); double nz = mN.col(i).at<double>(2, 0); mAlbedo.at<double>(0, i) = sqrt(nx * nx + ny * ny + nz * nz); - std::cout << "5\n"; } else { mN.at<double>(0, i) = 0; mN.at<double>(1, i) = 0; mN.at<double>(2, i) = 0; mAlbedo.at<double>(0, i) = 0; mInvalidIndex.push_back(i); - std::cout << "6\n"; } } } @@ -749,7 +835,8 @@ cv::Mat photometryStero::getHeightMap(int mode, double parameter) { // I use the 3*3 window to get average normal cv::Mat NForHeight; mN.copyTo(NForHeight); - cv::Mat heightMap = cv::Mat::zeros(mp2Images[0].rows, mp2Images[0].rows, CV_32F); + //cv::Mat heightMap = cv::Mat::zeros(mp2Images[0].rows, mp2Images[0].rows, CV_32F); + cv::Mat heightMap = cv::Mat::zeros(mp2Images[0].rows, mp2Images[0].cols, CV_32F); int nInvalid = mInvalidIndex.size(); int iterateTime = 0; while (nInvalid > 0) { diff --git a/apps/specular_estimation/src/PhotometricStereo.h b/apps/specular_estimation/src/PhotometricStereo.h index e37a2faf1ba73b2e04fbbb7548441f15fa9b9505..65158231a54a99b053a26cc6255ec0973fc79bd7 100755 --- a/apps/specular_estimation/src/PhotometricStereo.h +++ b/apps/specular_estimation/src/PhotometricStereo.h @@ -55,6 +55,7 @@ class photometryStero { void getLightInformation(const int metalIndex, const int lambIndex); void getLightInformation(const int metalIndex, cv::Rect boundingBox, std::vector<double> macbethIntensity); void getPixelNormAndAlbedo(const int objectIndex); + void getPixelNormAndAlbedo(const int objectIndex, cv::Mat lightDirections); cv::Mat outputNormalImage(int objectIndex); cv::Mat outputAlbedoImage(int objectIndex); cv::Mat outputNormalWithAlbedo(int objectIndex); diff --git a/apps/specular_estimation/src/specular_estimation.cc b/apps/specular_estimation/src/specular_estimation.cc index 38be5f8e48d689282bf301fe093e99435d8bca81..2e9284fc80b1c60c3a1616bb44c859979718e6ba 100644 --- a/apps/specular_estimation/src/specular_estimation.cc +++ b/apps/specular_estimation/src/specular_estimation.cc @@ -50,7 +50,8 @@ int main(int argc, char** argv) { const std::string imagesPath = "/home/thomas/Documents/"; const std::string folderPath = "2018-08-31"; const std::string modelPath = imagesPath + folderPath + "/" + imageName + "/" + imageName + "."; - const std::string calibrationPath = imagesPath + folderPath + "/" + calibration + "/" + calibration + "."; + //const std::string calibrationPath = imagesPath + folderPath + "/" + calibration + "/" + calibration + "."; + const std::string calibrationPath = imagesPath + "2017-12-04" + "/" + calibration + "/" + calibration + "."; const std::string macbethPath = imagesPath + folderPath + "/macbeth/macbeth."; const std::string texturePath = modelPath + "texture.png"; @@ -68,7 +69,7 @@ int main(int argc, char** argv) { macbethIntensity.push_back(brightness); //std::cout << "Brightness of white Macbeth square: " << brightness << std::endl; } - + // A vector of Mats allow multiple images to be allocated to the same object // textureImages are the full colour photographs that are used to create the texture // modelImages are the greyscale version of textureImages which are used to calculate the surface normals @@ -78,12 +79,13 @@ int main(int argc, char** argv) { cv::Mat heightMap, normalMap, texture, lightDirections; cv::Rect calibrationBoundingBox; int width, height; -/* + // Load the textureImages, modelImages, calibrationImages and charucoImages, then determine the bounding box around the chrome sphere loadImages(imageName, calibration, width, height, imageScale, textureImages, calibrationPath, calibrationImages, modelPath, modelImages, calibrationBoundingBox); // The number of lights is equal to the number of calibration images int numberOfLights = int(calibrationImages.size()); + // The rotation matrix is created from the rotation and translation vectors, and describes the rotation of the ChArUco board // The inverse of this transformation is applied to the light directions // The perspective transform warps the model images to align them with the ChArUco board @@ -92,7 +94,7 @@ int main(int argc, char** argv) { cv::Ptr<cv::aruco::CharucoBoard> charucoBoard; charucoCalibration(width, height, charucoBoard, charucoBoardImage, cameraMatrix, distortionCoefficients, imagesPath, folderPath); - + //charucoAlignment(charucoImages, textureImages, numberOfLights, width, height, rotationMatrix, rotationVector, perspectiveTransform, lightDirections, lightDirectionsPerspective, rvecs, tvecs, charucoBoard, charucoBoardImage, cameraMatrix, distortionCoefficients); @@ -105,167 +107,75 @@ int main(int argc, char** argv) { photometricStereo(imageName, calibration, modelPath, imageScale, heightMap, normalMap, texture, lightDirections, lightDirectionsPerspective, textureImages, modelImages, calibrationImages, calibrationBoundingBox, width, height, rvecs, tvecs); -*/ + //phoSte::photometryStero B(21, 2, 22, "/home/thomas/Documents/Photometric-Stereo/Assignment_1/Apple/", "mask_dir_1.png", "mask_dir_2.png", "mask_I.png", "applemask.png"); //B.readImage(); - - phoSte::photometryStero A(0, modelPath, calibrationPath, macbethPath, imageName, calibration, 0.1); - A.readImage(modelPath, calibrationPath, macbethPath); - cv::Mat calibrationMask = loadCalibrationMask(calibrationPath, height, width); + const std::string calibrationPath2 = imagesPath + folderPath + "/" + calibration + "/" + calibration + "."; + + phoSte::photometryStero A(numberOfLights, modelPath, calibrationPath2, macbethPath, imageName, calibration, 0); + A.readImage(modelPath, calibrationPath2, macbethPath); + cv::Mat calibrationMask = loadCalibrationMask(calibrationPath2, height, width); calibrationBoundingBox = getBoundingBox(calibrationMask); A.getLightInformation(0, calibrationBoundingBox, macbethIntensity); std::cout << "Got light information\n"; A.getPixelNormAndAlbedo(1); std::cout << "Got Normal and Albedo\n"; - cv::Mat result = A.outputNormalImage(3); - cv::imshow("apple normal image", result); - cv::imwrite(modelPath + "normal", result); + + normalMap = A.outputNormalImage(1); + cv::imshow("Normal Map", normalMap); + texture = A.outputAlbedoImage(1); + cv::imshow("Albedo", texture); + cv::Mat normalAlbedo = A.outputNormalWithAlbedo(1); + cv::imshow("Normal with Albedo", normalAlbedo); + //heightMap = A.getHeightMap(2, -0.1); + //cv::imshow("Height Map", heightMap); + //cv::imwrite(modelPath + "normal", result); cv::waitKey(0); - int numberOfLights = 6; + //normalMap.convertTo(normalMap, CV_8UC1); + texture.convertTo(texture, CV_8UC1); + cv::cvtColor(texture, texture, CV_GRAY2BGR); + //texture.convertTo(texture, CV_GRAY2BGR); + //normalAlbedo.convertTo(normalAlbedo, CV_8UC3); + + //int numberOfLights = 6; -/* + /* int imageNum = numberOfLights; int objectPixelNum = width; std::vector<phoSte::light> m_light; for (int i = 0; i < numberOfLights; i++) { m_light.push_back(phoSte::light(lightDirections.at<float>(i, 0), lightDirections.at<float>(i, 1), lightDirections.at<float>(i, 2))); } - - cv::Mat I(imageNum, objectPixelNum, CV_64F); - cv::Mat L(imageNum, 3, CV_64F); - cv::Mat mN(3, objectPixelNum, CV_64F); - cv::Mat mAlbedo(1, objectPixelNum, CV_64F); - double *pL = (double *)L.data; - for(int i = 0; i < imageNum; i++) { - *pL = m_light[i].mx; - *(pL + 1) = m_light[i].my; - *(pL + 2) = m_light[i].mz; - pL += 3; - } - cv::Mat LPseudoInvert; - cv::invert(L, LPseudoInvert, cv::DECOMP_SVD); - double *pI = (double *)I.data; - double *pN = (double *)mN.data; - - for(int i = 0; i < objectPixelNum; i++) { - int inValidNum = 0; - cv::Mat specificL; - for(int j = 0; j < imageNum; j++) { - //if(allPixelValue[j][i] < allThreshold[j]) { - inValidNum ++; - if(inValidNum == 1) { - L.copyTo(specificL); - } - double *p2LRow = specificL.ptr<double>(j); - *(p2LRow) = 0; - *(p2LRow + 1) = 0; - *(p2LRow + 2) = 0; - *(pI + j * objectPixelNum + i) = 0; - } else { - *(pI + j * objectPixelNum + i) = allPixelValue[j][i] / m_light[j].mIntensity; - } - if(imageNum - inValidNum < 3) { - break; - } - } - if(inValidNum == 0) { - mN.col(i) = LPseudoInvert * I.col(i); - double nx = mN.col(i).at<double>(0, 0); - double ny = mN.col(i).at<double>(1, 0); - double nz = mN.col(i).at<double>(2, 0); - mAlbedo.at<double>(0, i) = sqrt(nx * nx + ny * ny + nz * nz); - } else if(imageNum - inValidNum >= 3) { - cv::Mat specificLPseudoInvert; - cv::invert(specificL, specificLPseudoInvert, cv::DECOMP_SVD); - mN.col(i) = specificLPseudoInvert * I.col(i); - double nx = mN.col(i).at<double>(0, 0); - double ny = mN.col(i).at<double>(1, 0); - double nz = mN.col(i).at<double>(2, 0); - mAlbedo.at<double>(0, i) = sqrt(nx * nx + ny * ny + nz * nz); - } else { - mN.at<double>(0, i) = 0; - mN.at<double>(1, i) = 0; - mN.at<double>(2, i) = 0; - mAlbedo.at<double>(0, i) = 0; - //mInvalidIndex.push_back(i); - } - } - - //cv::Mat normal = A.outputNormalImage(3); - cv::Mat normal = cv::Mat::zeros(height, width, CV_32FC3); - //int nextInvalid = mInvalidIndex.empty() ? INT_MAX : mInvalidIndex[0]; - int nextInvalidIndex = 0; - int invalidPixelNum = 0; - for (int i = 0; i < width; i++) { - for (int j = 0; j < height; j++) { - double nx = mN.at<double>(0, i); - double ny = mN.at<double>(1, i); - double nz = mN.at<double>(2, i); - if (i == nextInvalid) { - invalidPixelNum++; - normal.at<cv::Vec3f>(j, i)[0] = 0; - normal.at<cv::Vec3f>(j, i)[1] = 0; - normal.at<cv::Vec3f>(j, i)[2] = 0; - if (nextInvalidIndex + 1 == mInvalidIndex.size()) { - nextInvalid = INT_MAX; - } else { - nextInvalidIndex++; - nextInvalid = mInvalidIndex[nextInvalidIndex]; - } - } else { - double rootsquareSum = sqrt(nx * nx + ny * ny + nz * nz); - float nxf = (1 + nx / rootsquareSum) / 2; - float nyf = (1 + ny / rootsquareSum) / 2; - float nzf = (1 + nz / rootsquareSum) / 2; - normal.at<cv::Vec3f>(j, i)[0] = nxf; - normal.at<cv::Vec3f>(j, i)[1] = nyf; - normal.at<cv::Vec3f>(j, i)[2] = nzf; - //} - } - } - - cv::imshow("Surface Normals", normal); - //cv::imwrite("/home/thomas/Documents/Photometric-Stereo/Assignment_1/Apple/normal.jpg", normal); - cv::waitKey(0);*/ -/* - cv::Mat albedo = A.outputAlbedoImage(3); - cv::imshow("Albedo", albedo); - cv::waitKey(0); - - cv::Mat normalWithAlbedo = A.outputNormalWithAlbedo(3); - cv::imshow("Albedo with Surface Normals", normalWithAlbedo); - cv::waitKey(0); - - cv::Mat heights = A.getHeightMap(2, -0.1); - //std::string fileName = "appleDepth.png"; - //cv::imwrite(path + fileName, heightMap); - cv::imshow("Height Map", heights); - cv::waitKey(0); -*/ - /*phoSte::photometryStero A(21, 2, 22, "/home/thomas/Documents/Photometric-Stereo/Assignment_1/Apple/", "mask_dir_1.png", "mask_dir_2.png", "mask_I.png", "applemask.png"); - A.readImage(); - A.getLightInformation(1, 2); - A.getPixelNormAndAlbedo(3); - cv::Mat normal = A.outputNormalImage(3); - cv::imshow("Surface Normals", normal); + */ + /* + phoSte::photometryStero B(21, 2, 22, "/home/thomas/Documents/Photometric-Stereo/Assignment_1/Apple/", "mask_dir_1.png", "mask_dir_2.png", "mask_I.png", "applemask.png"); + std::cout << "1\n"; + B.readImage(); + std::cout << "2\n"; + B.getLightInformation(1, 2); + std::cout << "3\n"; + B.getPixelNormAndAlbedo(3); + std::cout << "4\n"; + cv::Mat applenormal = B.outputNormalImage(3); + cv::imshow("Surface Normals", applenormal); //cv::imwrite("/home/thomas/Documents/Photometric-Stereo/Assignment_1/Apple/normal.jpg", normal); - cv::waitKey(0); + //cv::waitKey(0); - cv::Mat albedo = A.outputAlbedoImage(3); - cv::imshow("Albedo", albedo); - cv::waitKey(0); + cv::Mat applealbedo = B.outputAlbedoImage(3); + cv::imshow("Albedo", applealbedo); + //cv::waitKey(0); - cv::Mat normalWithAlbedo = A.outputNormalWithAlbedo(3); - cv::imshow("Albedo with Surface Normals", normalWithAlbedo); - cv::waitKey(0); + cv::Mat applenormalWithAlbedo = B.outputNormalWithAlbedo(3); + cv::imshow("Albedo with Surface Normals", applenormalWithAlbedo); + //cv::waitKey(0); - cv::Mat heights = A.getHeightMap(2, -0.1); + cv::Mat appleheights = B.getHeightMap(2, -0.1); //std::string fileName = "appleDepth.png"; //cv::imwrite(path + fileName, heightMap); - cv::imshow("Height Map", heights); + cv::imshow("Height Map", appleheights); cv::waitKey(0); */ @@ -287,6 +197,16 @@ int main(int argc, char** argv) { bool calculateResidual = false, perspectiveProjection, shadowControl; double totalResidual, residualValue, SpecularIntensity = 0.5, SpecularPower = 2.0; cv::Mat residualImage; + + /*lightDirections = (Mat_<double>(3,3) << 0.447712 << 0.138562 << 0.883377 + cv::invert(L, lightDirectionsInverted, cv::DECOMP_SVD); + + lightInvDirs.push_back(phoSte::light(0.447712, 0.138562, 0.883377)); + lightInvDirs.push_back(phoSte::light(0.228758, -0.106536, 0.967636)); + lightInvDirs.push_back(phoSte::light(0.1, 0.0705882, 0.99248)); + lightInvDirs.push_back(phoSte::light(0.000653595, -0.0718954, 0.997412)); + lightInvDirs.push_back(phoSte::light(-0.139216, -0.12549, 0.982279)); + lightInvDirs.push_back(phoSte::light(-0.494771, 0.115033, 0.861376));*/ std::vector<glm::vec3> lightInvDirs; for (int i = 0; i < numberOfLights; i++) diff --git a/bin/specular_estimation b/bin/specular_estimation index 9a5925c7eb19cd512a79b34dbf193684672cd910..efa94b177119278adadeafefb949cb6cb69f1a91 100755 Binary files a/bin/specular_estimation and b/bin/specular_estimation differ