Commit 9f75b1d4 authored by Matt Clarkson's avatar Matt Clarkson

Issue #68: Query replace all cvCreateMat with cv::Mat::zero

parent 8ad570de
......@@ -63,8 +63,8 @@ double CalculateRMSOnUndistortedPoints(const niftk::Model3D& model,
cv::Mat greyLeft;
cv::cvtColor(colourLeftImages[i], greyLeft, CV_BGR2GRAY);
cv::Mat mapX = cvCreateMat(colourLeftImages[i].rows, colourLeftImages[i].cols, CV_32FC1);
cv::Mat mapY = cvCreateMat(colourLeftImages[i].rows, colourLeftImages[i].cols, CV_32FC1);
cv::Mat mapX = cv::Mat::zeros(colourLeftImages[i].rows, colourLeftImages[i].cols, CV_32FC1);
cv::Mat mapY = cv::Mat::zeros(colourLeftImages[i].rows, colourLeftImages[i].cols, CV_32FC1);
cv::initUndistortRectifyMap(intrinsicLeft, distortionLeft, cv::noArray(), intrinsicLeft, colourLeftImages[i].size(), CV_32FC1, mapX, mapY);
cv::Mat undistortedLeft;
......@@ -78,8 +78,8 @@ double CalculateRMSOnUndistortedPoints(const niftk::Model3D& model,
cv::Mat greyRight;
cv::cvtColor(colourRightImages[i], greyRight, CV_BGR2GRAY);
mapX = cvCreateMat(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
mapY = cvCreateMat(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
mapX = cv::Mat::zeros(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
mapY = cv::Mat::zeros(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
cv::initUndistortRectifyMap(intrinsicRight, distortionRight, cv::noArray(), intrinsicRight, colourRightImages[i].size(), CV_32FC1, mapX, mapY);
cv::Mat undistortedRight;
......@@ -340,8 +340,8 @@ int main(int argc, char ** argv)
std::ostringstream videoFileName;
videoFileName << "/tmp/matt.rhs.normal.video." << i << ".png";
cv::Mat mapX = cvCreateMat(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
cv::Mat mapY = cvCreateMat(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
cv::Mat mapX = cv::Mat::zeros(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
cv::Mat mapY = cv::Mat::zeros(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
cv::initUndistortRectifyMap(intrinsicRight, distortionRight, cv::noArray(), intrinsicRight, imageSize, CV_32FC1, mapX, mapY);
cv::Mat undistorted;
......@@ -495,8 +495,8 @@ int main(int argc, char ** argv)
std::ostringstream videoFileName;
videoFileName << "/tmp/matt.rhs.optimised.video." << i << ".png";
cv::Mat mapX = cvCreateMat(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
cv::Mat mapY = cvCreateMat(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
cv::Mat mapX = cv::Mat::zeros(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
cv::Mat mapY = cv::Mat::zeros(colourRightImages[i].rows, colourRightImages[i].cols, CV_32FC1);
cv::initUndistortRectifyMap(intrinsicRight, distortionRight, cv::noArray(), intrinsicRight, imageSize, CV_32FC1, mapX, mapY);
cv::Mat undistorted;
......
......@@ -313,7 +313,7 @@ int main(int argc, char ** argv)
for (int s = 0; s < numberOfSteps; s++)
{
cv::Mat tmpLeftToRightTrans = cvCreateMat(3, 1, CV_64FC1);
cv::Mat tmpLeftToRightTrans = cv::Mat::zeros(3, 1, CV_64FC1);
tmpLeftToRightTrans.at<double>(0, 0) = parameters[0];
tmpLeftToRightTrans.at<double>(1, 0) = parameters[1];
tmpLeftToRightTrans.at<double>(2, 0) = parameters[2];
......@@ -322,7 +322,7 @@ int main(int argc, char ** argv)
tmpAxisAngle(0, 3) = parameters[3];
cv::Mat tmpLeftToRightRotVec = niftk::AxisAngleToRodrigues(tmpAxisAngle);
cv::Mat tmpLeftToRightRotMat = cvCreateMat(3, 3, CV_64FC1);
cv::Mat tmpLeftToRightRotMat = cv::Mat::zeros(3, 3, CV_64FC1);
cv::Rodrigues(tmpLeftToRightRotVec, tmpLeftToRightRotMat);
cv::Point3d rmsPerAxis;
......
......@@ -60,22 +60,22 @@ NonLinearMaltiHandEyeCostFunction::InternalGetValue(const ParametersType& parame
internalParameters[i] = parameters[i];
}
cv::Mat handEyeRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
handEyeRotationVector.at<double>(0, 0) = parameters[9];
handEyeRotationVector.at<double>(0, 1) = parameters[10];
handEyeRotationVector.at<double>(0, 2) = parameters[11];
cv::Mat handEyeTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
handEyeTranslationVector.at<double>(0, 0) = parameters[12];
handEyeTranslationVector.at<double>(0, 1) = parameters[13];
handEyeTranslationVector.at<double>(0, 2) = parameters[14];
cv::Mat modelToWorldRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
modelToWorldRotationVector.at<double>(0, 0) = parameters[15];
modelToWorldRotationVector.at<double>(0, 1) = parameters[16];
modelToWorldRotationVector.at<double>(0, 2) = parameters[17];
cv::Mat modelToWorldTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
modelToWorldTranslationVector.at<double>(0, 0) = parameters[18];
modelToWorldTranslationVector.at<double>(0, 1) = parameters[19];
modelToWorldTranslationVector.at<double>(0, 2) = parameters[20];
......@@ -95,8 +95,8 @@ NonLinearMaltiHandEyeCostFunction::InternalGetValue(const ParametersType& parame
cv::Matx44d worldToHand = handToWorld.inv();
cv::Matx44d cameraMatrix = handEye * worldToHand * modelToWorld;
cv::Mat rvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat tvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat rvec = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat tvec = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(cameraMatrix, rvec, tvec);
internalParameters[parameterCounter++] = rvec.at<double>(0, 0);
......
......@@ -77,12 +77,12 @@ double NonLinearMaltiHandEyeOptimiser::Optimise(cv::Matx44d& modelToWorld,
<< distortion.rows << ", " << distortion.cols << ")";
}
cv::Mat modelToWorldRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(modelToWorld, modelToWorldRotationVector, modelToWorldTranslationVector);
cv::Mat handEyeRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(handEye, handEyeRotationVector, handEyeTranslationVector);
niftk::NonLinearMaltiHandEyeCostFunction::ParametersType initialParameters;
......
......@@ -65,22 +65,22 @@ NonLinearNDOFHandEyeCostFunction::InternalGetValue(const ParametersType& paramet
internalParameters[7] = (*m_Distortion).at<double>(0, 3);
internalParameters[8] = (*m_Distortion).at<double>(0, 4);
cv::Mat handEyeRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
handEyeRotationVector.at<double>(0, 0) = parameters[0];
handEyeRotationVector.at<double>(0, 1) = parameters[1];
handEyeRotationVector.at<double>(0, 2) = parameters[2];
cv::Mat handEyeTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
handEyeTranslationVector.at<double>(0, 0) = parameters[3];
handEyeTranslationVector.at<double>(0, 1) = parameters[4];
handEyeTranslationVector.at<double>(0, 2) = parameters[5];
cv::Mat modelToWorldRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
modelToWorldRotationVector.at<double>(0, 0) = parameters[6];
modelToWorldRotationVector.at<double>(0, 1) = parameters[7];
modelToWorldRotationVector.at<double>(0, 2) = parameters[8];
cv::Mat modelToWorldTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
modelToWorldTranslationVector.at<double>(0, 0) = parameters[9];
modelToWorldTranslationVector.at<double>(0, 1) = parameters[10];
modelToWorldTranslationVector.at<double>(0, 2) = parameters[11];
......@@ -97,12 +97,12 @@ NonLinearNDOFHandEyeCostFunction::InternalGetValue(const ParametersType& paramet
++matrixIter
)
{
cv::Mat rvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat rvec = cv::Mat::zeros(1, 3, CV_64FC1);
rvec.at<double>(0, 0) = parameters[parameterCounter++];
rvec.at<double>(0, 1) = parameters[parameterCounter++];
rvec.at<double>(0, 2) = parameters[parameterCounter++];
cv::Mat tvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat tvec = cv::Mat::zeros(1, 3, CV_64FC1);
tvec.at<double>(0, 0) = parameters[parameterCounter++];
tvec.at<double>(0, 1) = parameters[parameterCounter++];
tvec.at<double>(0, 2) = parameters[parameterCounter++];
......
......@@ -85,12 +85,12 @@ double NonLinearNDOFHandEyeOptimiser::Optimise(cv::Matx44d& modelToWorld,
niftkNiftyCalThrow() << "Hand (tracking) matrices are null.";
}
cv::Mat modelToWorldRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(modelToWorld, modelToWorldRotationVector, modelToWorldTranslationVector);
cv::Mat handEyeRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(handEye, handEyeRotationVector, handEyeTranslationVector);
niftk::NonLinearNDOFHandEyeCostFunction::ParametersType initialParameters;
......@@ -120,8 +120,8 @@ double NonLinearNDOFHandEyeOptimiser::Optimise(cv::Matx44d& modelToWorld,
++iter
)
{
cv::Mat trackingRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat trackingTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(*iter, trackingRotationVector, trackingTranslationVector);
initialParameters[counter++] = trackingRotationVector.at<double>(0, 0);
......
......@@ -100,22 +100,22 @@ NonLinearStereoHandEye2DCostFunction::InternalGetValue(const ParametersType& par
internalParameters[22] = leftToRightTranslationVector.at<double>(0, 1);
internalParameters[23] = leftToRightTranslationVector.at<double>(0, 2);
cv::Mat handEyeRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
handEyeRotationVector.at<double>(0, 0) = parameters[0];
handEyeRotationVector.at<double>(0, 1) = parameters[1];
handEyeRotationVector.at<double>(0, 2) = parameters[2];
cv::Mat handEyeTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
handEyeTranslationVector.at<double>(0, 0) = parameters[3];
handEyeTranslationVector.at<double>(0, 1) = parameters[4];
handEyeTranslationVector.at<double>(0, 2) = parameters[5];
cv::Mat modelToWorldRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
modelToWorldRotationVector.at<double>(0, 0) = parameters[6];
modelToWorldRotationVector.at<double>(0, 1) = parameters[7];
modelToWorldRotationVector.at<double>(0, 2) = parameters[8];
cv::Mat modelToWorldTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
modelToWorldTranslationVector.at<double>(0, 0) = parameters[9];
modelToWorldTranslationVector.at<double>(0, 1) = parameters[10];
modelToWorldTranslationVector.at<double>(0, 2) = parameters[11];
......@@ -132,12 +132,12 @@ NonLinearStereoHandEye2DCostFunction::InternalGetValue(const ParametersType& par
++matrixIter
)
{
cv::Mat trackingRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
trackingRotationVector.at<double>(0, 0) = parameters[parameterCounter++];
trackingRotationVector.at<double>(0, 1) = parameters[parameterCounter++];
trackingRotationVector.at<double>(0, 2) = parameters[parameterCounter++];
cv::Mat trackingTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
trackingTranslationVector.at<double>(0, 0) = parameters[parameterCounter++];
trackingTranslationVector.at<double>(0, 1) = parameters[parameterCounter++];
trackingTranslationVector.at<double>(0, 2) = parameters[parameterCounter++];
......@@ -147,8 +147,8 @@ NonLinearStereoHandEye2DCostFunction::InternalGetValue(const ParametersType& par
cv::Matx44d leftCameraMatrix = handEye * worldToHand * modelToWorld;
cv::Mat rvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat tvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat rvec = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat tvec = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(leftCameraMatrix, rvec, tvec);
internalParameters[internalParameterCounter++] = rvec.at<double>(0, 0);
......
......@@ -116,12 +116,12 @@ double NonLinearStereoHandEye2DOptimiser::Optimise(cv::Matx44d& modelToWorld,
niftkNiftyCalThrow() << "Hand (tracking) matrices are null.";
}
cv::Mat modelToWorldRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(modelToWorld, modelToWorldRotationVector, modelToWorldTranslationVector);
cv::Mat handEyeRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(handEye, handEyeRotationVector, handEyeTranslationVector);
niftk::NonLinearStereoHandEye2DCostFunction::ParametersType initialParameters;
......@@ -151,8 +151,8 @@ double NonLinearStereoHandEye2DOptimiser::Optimise(cv::Matx44d& modelToWorld,
++iter
)
{
cv::Mat trackingRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat trackingTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(*iter, trackingRotationVector, trackingTranslationVector);
initialParameters[counter++] = trackingRotationVector.at<double>(0, 0);
......
......@@ -100,22 +100,22 @@ NonLinearStereoHandEye3DCostFunction::InternalGetValue(const ParametersType& par
internalParameters[22] = leftToRightTranslationVector.at<double>(0, 1);
internalParameters[23] = leftToRightTranslationVector.at<double>(0, 2);
cv::Mat handEyeRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
handEyeRotationVector.at<double>(0, 0) = parameters[0];
handEyeRotationVector.at<double>(0, 1) = parameters[1];
handEyeRotationVector.at<double>(0, 2) = parameters[2];
cv::Mat handEyeTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
handEyeTranslationVector.at<double>(0, 0) = parameters[3];
handEyeTranslationVector.at<double>(0, 1) = parameters[4];
handEyeTranslationVector.at<double>(0, 2) = parameters[5];
cv::Mat modelToWorldRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
modelToWorldRotationVector.at<double>(0, 0) = parameters[6];
modelToWorldRotationVector.at<double>(0, 1) = parameters[7];
modelToWorldRotationVector.at<double>(0, 2) = parameters[8];
cv::Mat modelToWorldTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
modelToWorldTranslationVector.at<double>(0, 0) = parameters[9];
modelToWorldTranslationVector.at<double>(0, 1) = parameters[10];
modelToWorldTranslationVector.at<double>(0, 2) = parameters[11];
......@@ -132,12 +132,12 @@ NonLinearStereoHandEye3DCostFunction::InternalGetValue(const ParametersType& par
++matrixIter
)
{
cv::Mat trackingRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
trackingRotationVector.at<double>(0, 0) = parameters[parameterCounter++];
trackingRotationVector.at<double>(0, 1) = parameters[parameterCounter++];
trackingRotationVector.at<double>(0, 2) = parameters[parameterCounter++];
cv::Mat trackingTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
trackingTranslationVector.at<double>(0, 0) = parameters[parameterCounter++];
trackingTranslationVector.at<double>(0, 1) = parameters[parameterCounter++];
trackingTranslationVector.at<double>(0, 2) = parameters[parameterCounter++];
......@@ -147,8 +147,8 @@ NonLinearStereoHandEye3DCostFunction::InternalGetValue(const ParametersType& par
cv::Matx44d leftCameraMatrix = handEye * worldToHand * modelToWorld;
cv::Mat rvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat tvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat rvec = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat tvec = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(leftCameraMatrix, rvec, tvec);
internalParameters[internalParameterCounter++] = rvec.at<double>(0, 0);
......
......@@ -116,12 +116,12 @@ double NonLinearStereoHandEye3DOptimiser::Optimise(cv::Matx44d& modelToWorld,
niftkNiftyCalThrow() << "Hand (tracking) matrices are null.";
}
cv::Mat modelToWorldRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat modelToWorldRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat modelToWorldTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(modelToWorld, modelToWorldRotationVector, modelToWorldTranslationVector);
cv::Mat handEyeRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat handEyeRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat handEyeTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(handEye, handEyeRotationVector, handEyeTranslationVector);
niftk::NonLinearStereoHandEye3DCostFunction::ParametersType initialParameters;
......@@ -151,8 +151,8 @@ double NonLinearStereoHandEye3DOptimiser::Optimise(cv::Matx44d& modelToWorld,
++iter
)
{
cv::Mat trackingRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat trackingRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat trackingTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(*iter, trackingRotationVector, trackingTranslationVector);
initialParameters[counter++] = trackingRotationVector.at<double>(0, 0);
......
......@@ -167,8 +167,8 @@ double CeresMonoCameraCalibration(const std::vector<std::vector<cv::Vec3f> >& mo
// So the parameters for the first camera that are optimised, start at the identity transform.
cv::Matx44d identity = cv::Matx44d::eye();
cv::Mat firstCameraRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat firstCameraTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat firstCameraRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat firstCameraTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(identity, firstCameraRotationVector, firstCameraTranslationVector);
parameters[parameterCounter++] = firstCameraRotationVector.at<double>(0, 0);
parameters[parameterCounter++] = firstCameraRotationVector.at<double>(0, 1);
......@@ -184,8 +184,8 @@ double CeresMonoCameraCalibration(const std::vector<std::vector<cv::Vec3f> >& mo
cv::Matx44d currentCamera = niftk::RodriguesToMatrix(rvecs[i], tvecs[i]);
cv::Matx44d previousCameraToCurrentCamera = currentCamera * previousCamera.inv();
cv::Mat previousCameraToCurrentCameraRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat previousCameraToCurrentCameraTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat previousCameraToCurrentCameraRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat previousCameraToCurrentCameraTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::MatrixToRodrigues(previousCameraToCurrentCamera,
previousCameraToCurrentCameraRotationVector,
previousCameraToCurrentCameraTranslationVector);
......@@ -256,8 +256,8 @@ double CeresMonoCameraCalibration(const std::vector<std::vector<cv::Vec3f> >& mo
for (unsigned int i = 0; i < rvecs.size(); i++)
{
cv::Matx44d cumulativeTransform = firstCamera;
cv::Mat currentCameraRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat currentCameraTranslationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat currentCameraRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat currentCameraTranslationVector = cv::Mat::zeros(1, 3, CV_64FC1);
unsigned int parameterCounterInsideLoop = parameterCounter;
......
......@@ -502,8 +502,8 @@ double CeresStereoCameraCalibration(const std::vector<std::vector<cv::Vec3f> >&
std::list<cv::Matx44d> toLeftCameraExtrinsics;
std::list<cv::Matx44d> toRightCameraExtrinsics;
cv::Mat rvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat tvec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat rvec = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat tvec = cv::Mat::zeros(1, 3, CV_64FC1);
// Base frame rotation (4) and translation (3), in that order.
for (unsigned int i = 0; i < rvecsLeft.size(); i++)
......@@ -587,7 +587,7 @@ double CeresStereoCameraCalibration(const std::vector<std::vector<cv::Vec3f> >&
parameters[parameterCounter++] = quaternion[3];
// Right camera position
cv::Mat leftToRightRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat leftToRightRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Rodrigues(leftToRightRotationMatrix, leftToRightRotationVector);
cv::Matx44d leftToRight = niftk::RodriguesToMatrix(leftToRightRotationVector, leftToRightTranslationVector);
cv::Matx44d rightToLeft = leftToRight.inv();
......@@ -980,7 +980,7 @@ double CeresStereoCameraCalibration(const std::vector<std::vector<cv::Vec3f> >&
quaternion[2] = parameters[parameterCounter++];
quaternion[3] = parameters[parameterCounter++];
ceres::QuaternionToAngleAxis(quaternion, axisAngle);
cv::Mat leftRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat leftRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
leftRotationVector.at<double>(0, 0) = axisAngle[0];
leftRotationVector.at<double>(0, 1) = axisAngle[1];
leftRotationVector.at<double>(0, 2) = axisAngle[2];
......@@ -1031,12 +1031,12 @@ double CeresStereoCameraCalibration(const std::vector<std::vector<cv::Vec3f> >&
}
// Compute final left hand translation and rotation.
cv::Mat tmpRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat tmpRotationMatrix = cvCreateMat(3, 3, CV_64FC1);
cv::Mat tmpTranslationVector = cvCreateMat(3, 1, CV_64FC1);
cv::Mat transformedTranslationVector = cvCreateMat(3, 1, CV_64FC1);
cv::Mat leftRotation = cvCreateMat(3, 3, CV_64FC1);
cv::Mat baselineRotation = cvCreateMat(3, 3, CV_64FC1);
cv::Mat tmpRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat tmpRotationMatrix = cv::Mat::zeros(3, 3, CV_64FC1);
cv::Mat tmpTranslationVector = cv::Mat::zeros(3, 1, CV_64FC1);
cv::Mat transformedTranslationVector = cv::Mat::zeros(3, 1, CV_64FC1);
cv::Mat leftRotation = cv::Mat::zeros(3, 3, CV_64FC1);
cv::Mat baselineRotation = cv::Mat::zeros(3, 3, CV_64FC1);
cv::Mat baselineTransform = cv::Mat::eye(4, 4, CV_64FC1);
cv::Mat leftTransform = cv::Mat::eye(4, 4, CV_64FC1);
cv::Mat combinedTransform = cv::Mat::eye(4, 4, CV_64FC1);
......
......@@ -60,7 +60,7 @@ NonLinearMonoCameraCalibration3DCostFunction::InternalGetValue(const ParametersT
rightIntrinsic.at<double>(0, 2) = m_Intrinsic.at<double>(0, 2);
rightIntrinsic.at<double>(1, 2) = m_Intrinsic.at<double>(1, 2);
cv::Mat rightDistortion = cvCreateMat(1, 5, CV_64FC1);
cv::Mat rightDistortion = cv::Mat::zeros(1, 5, CV_64FC1);
rightDistortion.at<double>(0, 0) = m_Distortion.at<double>(0, 0);
rightDistortion.at<double>(0, 1) = m_Distortion.at<double>(0, 1);
rightDistortion.at<double>(0, 2) = m_Distortion.at<double>(0, 2);
......@@ -84,24 +84,24 @@ NonLinearMonoCameraCalibration3DCostFunction::InternalGetValue(const ParametersT
++ rightPointsIter
)
{
cv::Mat leftExtrinsicRVec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat leftExtrinsicRVec = cv::Mat::zeros(1, 3, CV_64FC1);
leftExtrinsicRVec.at<double>(0, 0) = parameters[leftCounter*6 + 0 + parameterOffset];
leftExtrinsicRVec.at<double>(0, 1) = parameters[leftCounter*6 + 1 + parameterOffset];
leftExtrinsicRVec.at<double>(0, 2) = parameters[leftCounter*6 + 2 + parameterOffset];
cv::Mat leftExtrinsicTVec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat leftExtrinsicTVec = cv::Mat::zeros(1, 3, CV_64FC1);
leftExtrinsicTVec.at<double>(0, 0) = parameters[leftCounter*6 + 3 + parameterOffset];
leftExtrinsicTVec.at<double>(0, 1) = parameters[leftCounter*6 + 4 + parameterOffset];
leftExtrinsicTVec.at<double>(0, 2) = parameters[leftCounter*6 + 5 + parameterOffset];
cv::Matx44d leftExtrinsic = niftk::RodriguesToMatrix(leftExtrinsicRVec, leftExtrinsicTVec);
cv::Mat rightExtrinsicRVec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat rightExtrinsicRVec = cv::Mat::zeros(1, 3, CV_64FC1);
rightExtrinsicRVec.at<double>(0, 0) = parameters[rightCounter*6 + 0 + parameterOffset];
rightExtrinsicRVec.at<double>(0, 1) = parameters[rightCounter*6 + 1 + parameterOffset];
rightExtrinsicRVec.at<double>(0, 2) = parameters[rightCounter*6 + 2 + parameterOffset];
cv::Mat rightExtrinsicTVec = cvCreateMat(1, 3, CV_64FC1);
cv::Mat rightExtrinsicTVec = cv::Mat::zeros(1, 3, CV_64FC1);
rightExtrinsicTVec.at<double>(0, 0) = parameters[rightCounter*6 + 3 + parameterOffset];
rightExtrinsicTVec.at<double>(0, 1) = parameters[rightCounter*6 + 4 + parameterOffset];
rightExtrinsicTVec.at<double>(0, 2) = parameters[rightCounter*6 + 5 + parameterOffset];
......@@ -110,9 +110,9 @@ NonLinearMonoCameraCalibration3DCostFunction::InternalGetValue(const ParametersT
cv::Matx44d leftToRight = rightExtrinsic * leftExtrinsic.inv();
cv::Mat leftToRightRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat leftToRightTranslationVector = cvCreateMat(3, 1, CV_64FC1);
cv::Mat leftToRightRotationMatrix = cvCreateMat(3, 3, CV_64FC1);
cv::Mat leftToRightRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Mat leftToRightTranslationVector = cv::Mat::zeros(3, 1, CV_64FC1);
cv::Mat leftToRightRotationMatrix = cv::Mat::zeros(3, 3, CV_64FC1);
niftk::MatrixToRodrigues(leftToRight, leftToRightRotationVector, leftToRightTranslationVector);
cv::Rodrigues(leftToRightRotationVector, leftToRightRotationMatrix);
......
......@@ -181,7 +181,7 @@ double NonLinearStereoCameraCalibration2DOptimiser::Optimise(cv::Mat& leftIntrin
niftkNiftyCalThrow() << "ForceUnitVectorAxes and OptimiseR2L are mutually exclusive";
}
cv::Mat leftToRightRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat leftToRightRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
niftk::SafeRodrigues(leftToRightRotationMatrix, leftToRightRotationVector);
int numberOfParameters = 0;
......
......@@ -111,7 +111,7 @@ double NonLinearStereoExtrinsicsCalibration3DOptimiser::Optimise(std::vector<cv:
<< ", versus " << tvecsLeft.size();
}
cv::Mat leftToRightRotationVector = cvCreateMat(1, 3, CV_64FC1);
cv::Mat leftToRightRotationVector = cv::Mat::zeros(1, 3, CV_64FC1);
cv::Rodrigues(leftToRightRotationMatrix, leftToRightRotationVector);
unsigned int numberOfParameters = 0;
......
......@@ -38,8 +38,8 @@ NonLinearTsai2ParamCostFunction::InternalGetValue(const ParametersType& paramete
MeasureType result;
result.SetSize(this->GetNumberOfValues());
cv::Mat rvec = cvCreateMat ( 1, 3, CV_64FC1 );
cv::Mat tvec = cvCreateMat ( 1, 3, CV_64FC1 );
cv::Mat rvec = cv::Mat::zeros ( 1, 3, CV_64FC1 );
cv::Mat tvec = cv::Mat::zeros ( 1, 3, CV_64FC1 );
niftk::MatrixToRodrigues(*m_Extrinsic, rvec, tvec);
cv::Point2d imageCentre;
......
......@@ -38,8 +38,8 @@ NonLinearTsai3ParamCostFunction::InternalGetValue(const ParametersType& paramete
MeasureType result;
result.SetSize(this->GetNumberOfValues());
cv::Mat rvec = cvCreateMat ( 1, 3, CV_64FC1 );
cv::Mat tvec = cvCreateMat ( 1, 3, CV_64FC1 );
cv::Mat rvec = cv::Mat::zeros ( 1, 3, CV_64FC1 );
cv::Mat tvec = cv::Mat::zeros ( 1, 3, CV_64FC1 );
niftk::MatrixToRodrigues(*m_Extrinsic, rvec, tvec);
ParametersType internalParameters;
......
......@@ -39,8 +39,8 @@ NonLinearTsai5ParamCostFunction::InternalGetValue(const ParametersType& paramete
MeasureType result;
result.SetSize(this->GetNumberOfValues());
cv::Mat rvec = cvCreateMat ( 1, 3, CV_64FC1 );