Newer
Older
/*
* PeTrack - Software for tracking pedestrians movement in videos
* Copyright (C) 2022 Forschungszentrum Jülich GmbH, IAS-7
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
#include "pMessageBox.h"
#include <QFileDialog>
#include <QtWidgets>
ExtrCalibration::ExtrCalibration(PersonStorage &storage) : mPersonStorage(storage)
ExtrCalibration::~ExtrCalibration() {}
mControlWidget = mw->getControlWidget();
init();
}
void ExtrCalibration::init()
{
rotation_matrix = new double[9];
translation_vector = new double[3];
distValues = new double[8];
isExtCalib = false;
}
bool ExtrCalibration::isEmptyExtrCalibFile()
{
return mExtrCalibFile.isEmpty();
}
void ExtrCalibration::setExtrCalibFile(const QString &f)
{
mExtrCalibFile = f;
}
QString ExtrCalibration::getExtrCalibFile()
{
bool ExtrCalibration::openExtrCalibFile()
{
if(mMainWindow)
QString extrCalibFile = QFileDialog::getOpenFileName(
mMainWindow,
Petrack::tr("Open extrinisc calibration file with point correspondences"),
lastDir,
"3D-Calibration-File (*.3dc);;Text (*.txt);;All supported types (*.3dc *.txt);;All files (*.*)");
if(!extrCalibFile.isEmpty())
{
mExtrCalibFile = extrCalibFile;
return loadExtrCalibFile();
}
}
return false;
}
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
// following function copied from OpenCV
static bool isPlanarObjectPoints(cv::InputArray _objectPoints, double threshold = 1e-3)
{
CV_CheckType(
_objectPoints.type(),
_objectPoints.type() == CV_32FC3 || _objectPoints.type() == CV_64FC3,
"Type of _objectPoints must be CV_32FC3 or CV_64FC3");
cv::Mat objectPoints;
if(_objectPoints.type() == CV_32FC3)
{
_objectPoints.getMat().convertTo(objectPoints, CV_64F);
}
else
{
objectPoints = _objectPoints.getMat();
}
cv::Scalar meanValues = mean(objectPoints);
int nbPts = objectPoints.checkVector(3, CV_64F);
cv::Mat objectPointsCentred = objectPoints - meanValues;
objectPointsCentred = objectPointsCentred.reshape(1, nbPts);
cv::Mat w, u, vt;
cv::Mat MM = objectPointsCentred.t() * objectPointsCentred;
SVDecomp(MM, w, u, vt);
return (w.at<double>(2) < w.at<double>(1) * threshold);
}
/**
* @brief Loads the extrinsic calibration from mExtrCalibFile
*
* This methods reads an extrinsic calibration in one of two formats:
* First: 3D coordinates followed by corresponding 2D coordinates
*
* x y z px py
*
* Second: Just 3D coordinates
*
* x y z
*
* It is possible to optionally start the file with the number of lines:
*
* 2
* x1 y1 z1
* x2 y2 z2
*
* This is just going to be ignored. Comments start with "#".
*
* @return
*/
bool ExtrCalibration::loadExtrCalibFile()
{
if(mExtrCalibFile.right(4) == ".3dc" || mExtrCalibFile.right(4) == ".txt")
if(!file.open(QIODevice::ReadOnly | QIODevice::Text))
PCritical(
mMainWindow,
QObject::tr("Petrack"),
QObject::tr("Error: Cannot open %1:\n%2.").arg(mExtrCalibFile, file.errorString()));
SPDLOG_INFO("Reading 3D calibration data from {} ...", mExtrCalibFile);
std::vector<cv::Point3f> points3D_tmp;
std::vector<cv::Point2f> points2D_tmp;
QString line;
int line_counter = 0, counter;
float x, y, z, px, py;
float zahl;
bool with_2D_data = false, with_3D_data = false, end_loop = false;
// Exit loop when reaching the end of the file
{
// Neue Zeile einlesen
line = in.readLine();
++line_counter;
// Kommentare ueberlesen
if(line.startsWith("#", Qt::CaseInsensitive) || line.startsWith(";;", Qt::CaseInsensitive) ||
line.startsWith("//", Qt::CaseInsensitive) || line.startsWith("!", Qt::CaseInsensitive))
while(!stream.atEnd() && !end_loop)
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
case 1:
x = zahl;
if(!with_3D_data)
{
points3D_tmp.clear();
with_3D_data = true;
}
break;
case 2:
y = zahl;
break;
case 3:
z = zahl;
break;
case 4:
px = zahl;
if(!with_2D_data)
{
points2D_tmp.clear();
with_2D_data = true;
}
break;
case 5:
py = zahl;
break;
default:
end_loop = true;
SPDLOG_INFO("Optional number of points in line {} ignored.", line_counter);
}
else if(counter != 3 && counter != 5)
SPDLOG_INFO("Something wrong in line {} ({})! Ignored. (counter={})", line_counter, line, counter);
if(with_3D_data && (counter == 3 || counter == 5))
points3D_tmp.push_back(cv::Point3f(x, y, z));
points2D_tmp.push_back(cv::Point2f(px, py));
}
}
// Check if there are more than 4 points for calibration in the file
PCritical(
mMainWindow,
QObject::tr("PeTrack"),
QObject::tr("Error: Not enough points given: %1 (minimum 4 (coplanar) or 6 (not coplanar) "
"needed!). Please check your extrinsic "
"calibration file!")
.arg(points3D_tmp.size()));
all_ok = false;
}
else if(!isPlanarObjectPoints(points3D_tmp) && points3D_tmp.size() < 6)
{
// Non-planar points use DLT - we need at least 6 points; not only 4
PCritical(
mMainWindow,
QObject::tr("PeTrack"),
QObject::tr("Error: Not enough points given: %1 (minimum 4 (coplanar) or 6 (not coplanar) "
"needed!). Please check your extrinsic "
"calibration file!")
.arg(points3D_tmp.size()));
all_ok = false;
}
// Check if 2D points delivered and if the number of 2D and 3D points agree
else if(points2D_tmp.size() > 0 && points2D_tmp.size() != points3D_tmp.size())
PCritical(
mMainWindow,
QObject::tr("PeTrack"),
QObject::tr(
"Error: Unsupported File Format in: %1 (number of 3D (%2) and 2D (%3) points disagree!)")
.arg(mExtrCalibFile)
.arg(points3D_tmp.size())
.arg(points2D_tmp.size()));
all_ok = false;
}
// Check if number of loaded 3D points agree with stored 2D points
else if(!with_2D_data && points2D.size() > 0 && points3D_tmp.size() != points2D.size())
int result = PWarning(
mMainWindow,
QObject::tr("PeTrack"),
QObject::tr("Number of 3D points (%1) disagree with number of stored 2D points (%2)!<br />The 2D "
"points will be deleted! You have to fetch new ones from the image!")
.arg(points3D_tmp.size())
.arg(points2D.size()),
PMessageBox::StandardButton::Ok | PMessageBox::StandardButton::Abort);
if(result != PMessageBox::StandardButton::Ok)
SPDLOG_WARN("unsupported file extension (supported: .3dc, .txt)");
if(all_ok && !mMainWindow->isLoading())
/**
* @brief Uses manually set TrackPoints as 2D points for extrinsic calibration
*
* @pre loaded at least 4 3D-points
*
* @return true if calibration did take place
*/
bool ExtrCalibration::fetch2DPoints()
{
bool all_ok = true;
if(!mMainWindow->getTracker() || mPersonStorage.nbPersons() < 4)
PCritical(
mMainWindow,
QObject::tr("Petrack"),
QObject::tr("Error: At minimum four 3D calibration points needed for 3D calibration."));
all_ok = false;
}
else
size_t sz_2d = mPersonStorage.nbPersons();
if(points3D.size() > 0 && sz_2d != points3D.size())
{
PCritical(
mMainWindow,
QObject::tr("Petrack"),
QObject::tr("Count of 2D-Points (%1) and 3D-Points (%2) disagree").arg(sz_2d).arg(points3D.size()));
// debout << "Marked 2D-Image-Points: " << endl;
if(all_ok)
for(int i = 0; i < static_cast<int>(sz_2d); i++)
// debout << "[" << i << "]: (" << mMainWindow->getTracker()->at(i).at(0).x() << ", " <<
// mMainWindow->getTracker()->at(i).at(0).y() << ")" << endl;
// Info: Tracker->TrackPerson->TrackPoint->Vec2F
points2D.push_back(cv::Point2f(mPersonStorage.at(i).at(0).x(), mPersonStorage.at(i).at(0).y()));
/**
* @brief Saves points used for extrinsic calibration
*
* Saves the points used for extrinsic calibration in the format:
*
* n
* x y z px py
*
* With n as number of points, x,y,z as 3D coordianted and px,py as 2D coordinates.
* @return
*/
bool ExtrCalibration::saveExtrCalibPoints()
{
bool all_okay = false;
for(size_t i = 0; i < points3D.size(); ++i)
out << "[" << QString::number(i + 1, 'i', 0) << "]: " << QString::number(points3D.at(i).x, 'f', 1) << " "
<< QString::number(points3D.at(i).y, 'f', 1) << " " << QString::number(points3D.at(i).z, 'f', 1) << " "
<< QString::number(points2D.at(i).x, 'f', 3) << " " << QString::number(points2D.at(i).y, 'f', 3)
<< Qt::endl;
QIcon icon = QApplication::style()->standardIcon(QStyle::SP_MessageBoxWarning);
PMessageBox msgBox{
nullptr,
"PeTrack",
"The corresponding calibration points have been changed.\n"
"Do you want to save your changes?",
QIcon(),
out_str,
PMessageBox::StandardButton::Save | PMessageBox::StandardButton::Cancel,
PMessageBox::StandardButton::Save};
case PMessageBox::StandardButton::Save:
if(!file.open(QIODevice::WriteOnly | QIODevice::Text))
PCritical(
mMainWindow,
QObject::tr("Petrack"),
QObject::tr("Cannot open %1:\n%2.").arg(mExtrCalibFile).arg(file.errorString()));
return false;
file_out << points3D.size() << Qt::endl;
for(size_t i = 0; i < points3D.size(); ++i)
file_out << points3D.at(i).x << " " << points3D.at(i).y << " " << points3D.at(i).z << " "
<< points2D.at(i).x << " " << points2D.at(i).y << Qt::endl;
case PMessageBox::StandardButton::Discard:
// Don't Save was clicked
break;
case PMessageBox::StandardButton::Cancel:
// Cancel was clicked
break;
default:
// should never be reached
break;
bool ExtrCalibration::isSetExtrCalib()
{
/**
* @brief Extrinsic calibration with help of cv::solvePnP
*/
if(!points3D.empty() && !points2D.empty() && points2D.size() == points3D.size())
{
int bS = mMainWindow->getImageBorderSize();
/* Create Camera-Matrix form Camera-Params in the Petrack-GUI */
cv::Mat camMat = (cv::Mat_<double>(3,3) <<
mControlWidget->getCalibFx(), 0, mControlWidget->getCalibCx()-bS,
0, mControlWidget->getCalibFy(), mControlWidget->getCalibCy()-bS,
// clang-format on
cv::Mat distMat =
(cv::Mat_<double>(8, 1) << 0, // mControlWidget->r2->value(),
0, // mControlWidget->r4->value(),
0, // mControlWidget->tx->value(),
0, // mControlWidget->ty->value(),
// r^>4 not supported
0, // mControlWidget->r6->value(),
0, // mControlWidget->k4->value(),
0, // mControlWidget->k5->value(),
0 // mControlWidget->k6->value()
);
cv::Mat op(points3D);
cv::Mat ip(points2D);
/* Mat-objects for result rotation and translation vectors */
cv::Mat rvec(3, 1, CV_64F), /*,0),*/ tvec(3, 1, CV_64F); //,0);
// Solve the PnP-Problem to calibrate the camera to its environment
cv::solvePnP(op, ip, camMat, distMat, rvec, tvec, false, cv::SOLVEPNP_ITERATIVE);
cv::Mat rot_mat(3, 3, CV_64F); //, 0);
// Transform the rotation vector into a rotation matrix with opencvs rodrigues method
Rodrigues(rvec, rot_mat);
rotation_matrix[0] = rot_mat.at<double>(0, 0);
rotation_matrix[1] = rot_mat.at<double>(0, 1);
rotation_matrix[2] = rot_mat.at<double>(0, 2);
rotation_matrix[3] = rot_mat.at<double>(1, 0);
rotation_matrix[4] = rot_mat.at<double>(1, 1);
rotation_matrix[5] = rot_mat.at<double>(1, 2);
rotation_matrix[6] = rot_mat.at<double>(2, 0);
rotation_matrix[7] = rot_mat.at<double>(2, 1);
rotation_matrix[8] = rot_mat.at<double>(2, 2);
translation_vector[0] = tvec.at<double>(0, 0);
translation_vector[1] = tvec.at<double>(0, 1);
translation_vector[2] = tvec.at<double>(0, 2);
translation_vector2[0] = rotation_matrix[0] * translation_vector[0] +
rotation_matrix[3] * translation_vector[1] +
rotation_matrix[6] * translation_vector[2];
translation_vector2[1] = rotation_matrix[1] * translation_vector[0] +
rotation_matrix[4] * translation_vector[1] +
rotation_matrix[7] * translation_vector[2];
translation_vector2[2] = rotation_matrix[2] * translation_vector[0] +
rotation_matrix[5] * translation_vector[1] +
rotation_matrix[8] * translation_vector[2];
SPDLOG_INFO("-.- ESTIMATED ROTATION -.-");
SPDLOG_INFO("{}, {}, {}", rotation_matrix[p * 3], rotation_matrix[p * 3 + 1], rotation_matrix[p * 3] + 2);
SPDLOG_INFO("-.- ESTIMATED TRANSLATION -.-");
SPDLOG_INFO("{}, {}, {}", translation_vector[0], translation_vector[1], translation_vector[2]);
SPDLOG_INFO("-.- Translation vector -.-");
SPDLOG_INFO("{}, {}, {}", translation_vector2[0], translation_vector2[1], translation_vector2[2]);
SPDLOG_INFO("-.- Rotation vector -.-");
SPDLOG_INFO("{}, {}, {}", rvec.at<double>(0, 0), rvec.at<double>(1, 0), rvec.at<double>(2, 0));
camHeight = translation_vector2[2] < 0 ? -translation_vector2[2] : translation_vector2[2];
mControlWidget->setCalibExtrRot1(rvec.at<double>(0, 0));
mControlWidget->setCalibExtrRot2(rvec.at<double>(1, 0));
mControlWidget->setCalibExtrRot3(rvec.at<double>(2, 0));
mControlWidget->setCalibExtrTrans1(translation_vector2[0]);
mControlWidget->setCalibExtrTrans2(translation_vector2[1]);
mControlWidget->setCalibExtrTrans3(translation_vector2[2]);
SPDLOG_WARN("Extrinsic calibration not possible! Please select other 2D/3D points!");
mControlWidget->setCalibExtrRot1(0);
mControlWidget->setCalibExtrRot2(0);
mControlWidget->setCalibExtrRot3(0);
translation_vector2[0] = 0;
translation_vector2[1] = 0;
translation_vector2[2] = 0;
rotation_matrix[0] = 0;
rotation_matrix[1] = 0;
rotation_matrix[2] = 0;
mControlWidget->setCalibExtrTrans1(translation_vector2[0]);
mControlWidget->setCalibExtrTrans2(translation_vector2[1]);
mControlWidget->setCalibExtrTrans3(translation_vector2[2]);
reprojectionError = ReprojectionError{};
PCritical(
mMainWindow,
QObject::tr("Petrack"),
QObject::tr("Error: Could not calculate extrinsic calibration. Please select other 2D/3D point "
"correspondences for extrinsic calibration!"));
isExtCalib = false;
return;
}
isExtCalib = true;
SPDLOG_INFO("End of extern calibration!");
std::cerr << "# Warning: invalid point correspondences for camera calibration." << std::endl;
std::cerr << "# 2D points:" << points2D.size() << ", 3D points: " << points3D.size() << std::endl;
/**
* @brief Calculates the reprojection Error
*
* This method calculates following errors and their variance:
* <ul>
* <li>2D Point to 3D Point against 3D Point - using calibration points</li>
* <li>3D to 2D to 3D against 2D to 3D - using default height for calib. points</li>
* <li>3D to 2D against 2D - using calib. points</li>
* </ul>
* @return
*/
bool ExtrCalibration::calcReprojectionError()
{
//////
/// \brief error measurements
///
double val, max_px = -1.0, max_pH = -1.0, max_dH = -1.0, var_px = 0, sd_px = 0, var_pH = 0, sd_pH = 0, var_dH = 0,
sd_dH = 0, sum_px = 0, sum_pH = 0, sum_dH = 0;
size_t num_points = get2DList().size();
if(num_points == 0 || num_points != get3DList().size())
{
reprojectionError = ReprojectionError{};
for(size_t i = 0; i < num_points; i++)
cv::Point2f p2d = get2DList().at(i);
cv::Point3f p3d = get3DList().at(i);
p3d.x -= mControlWidget->getCalibCoord3DTransX();
p3d.y -= mControlWidget->getCalibCoord3DTransY();
p3d.z -= mControlWidget->getCalibCoord3DTransZ();
cv::Point2f p3dTo2d = getImagePoint(p3d);
cv::Point3f p2dTo3d = get3DPoint(p2d, p3d.z);
cv::Point3f p2dTo3dMapDefaultHeight = get3DPoint(p2d, mControlWidget->getDefaultHeight());
cv::Point3f p3dTo2dTo3dMapDefaultHeight = get3DPoint(p3dTo2d, mControlWidget->getDefaultHeight());
val = sqrt(pow(p3d.x - p2dTo3d.x, 2) + pow(p3d.y - p2dTo3d.y, 2));
if(val > max_pH)
val = sqrt(
pow(p3dTo2dTo3dMapDefaultHeight.x - p2dTo3dMapDefaultHeight.x, 2) +
pow(p3dTo2dTo3dMapDefaultHeight.y - p2dTo3dMapDefaultHeight.y, 2));
if(val > max_dH)
val = sqrt(pow(p3dTo2d.x - p2d.x, 2) + pow(p3dTo2d.y - p2d.y, 2));
for(size_t i = 0; i < num_points; i++)
cv::Point2f p2d = get2DList().at(i);
cv::Point3f p3d = get3DList().at(i);
p3d.x -= mControlWidget->getCalibCoord3DTransX();
p3d.y -= mControlWidget->getCalibCoord3DTransY();
p3d.z -= mControlWidget->getCalibCoord3DTransZ();
cv::Point2f p3d_to_2d = getImagePoint(p3d);
cv::Point3f p2d_to_3d = get3DPoint(p2d, p3d.z);
cv::Point3f p2d_to_3d_mapDefaultHeight =
get3DPoint(p2d, mControlWidget->getDefaultHeight()); // mStatusPosRealHeight->value()); ?
cv::Point3f p3d_to2d_to3d_mapDefaultHeight = get3DPoint(p3d_to_2d, mControlWidget->getDefaultHeight());
val = pow(sqrt(pow(p3d.x - p2d_to_3d.x, 2) + pow(p3d.y - p2d_to_3d.y, 2)) - (sum_pH / num_points), 2);
val =
pow(sqrt(
pow(p3d_to2d_to3d_mapDefaultHeight.x - p2d_to_3d_mapDefaultHeight.x, 2) +
pow(p3d_to2d_to3d_mapDefaultHeight.y - p2d_to_3d_mapDefaultHeight.y, 2)) -
(sum_dH / num_points),
2);
val = pow(sqrt(pow(p3d_to_2d.x - p2d.x, 2) + pow(p3d_to_2d.y - p2d.y, 2)) - (sum_px / num_points), 2);
var_px += val;
}
// average
sum_pH /= num_points;
var_pH /= num_points;
sd_pH = sqrt(var_pH);
SPDLOG_INFO(
"Reprojection error (pointHeight) average: {}cm (standard deviation: {}, variance: {}, max error: {}cm)",
sum_pH,
sd_pH,
var_pH,
max_pH);
// average
sum_dH /= num_points;
var_dH /= num_points;
sd_dH = sqrt(var_dH);
SPDLOG_INFO(
"Reprojection error (defaultHeight={}) average: {}cm (standard deviation: {}, variance: {}, max error: {}cm)",
mControlWidget->getDefaultHeight(),
sum_dH,
sd_dH,
var_dH,
max_dH);
// average
sum_px /= num_points;
var_px /= num_points;
sd_px = sqrt(var_px);
SPDLOG_INFO(
"Reprojection error (Pixel) average: {}px (standard deviation: {}, variance: {}, max error: {}px)",
sum_px,
sd_px,
var_px,
max_px);
reprojectionError = ReprojectionError{
sum_pH,
sd_pH,
var_pH,
max_pH,
sum_dH,
sd_dH,
var_dH,
max_dH,
sum_px,
sd_px,
var_px,
max_px,
mControlWidget->getDefaultHeight()};
return reprojectionError.pointHeightAvg() > MAX_AV_ERROR ?
false :
true; // Falls pixel fehler im schnitt > 20 ist das Ergebnis nicht akzeptabel
/**
* @brief Projects the 3D point to the image plane
*
* Projection is done by multiplying with the external camera matrix
* composed out of rotation and translation aquired in ExtrCalibration::calibExtrParams().
* After that, the internal camera matrix is applied.
*
* @return calculated 2D projection of p3d
*/
cv::Point2f ExtrCalibration::getImagePoint(cv::Point3f p3d)
{
p3d.x *= mControlWidget->getCalibCoord3DSwapX() ? -1 : 1;
p3d.y *= mControlWidget->getCalibCoord3DSwapY() ? -1 : 1;
p3d.z *= mControlWidget->getCalibCoord3DSwapZ() ? -1 : 1;
// Adding the coordsystem translation from petrack window
p3d.x += mControlWidget->getCalibCoord3DTransX();
p3d.y += mControlWidget->getCalibCoord3DTransY();
p3d.z += mControlWidget->getCalibCoord3DTransZ();
// ToDo: use projectPoints();
int bS = mMainWindow->getImage() ? mMainWindow->getImageBorderSize() : 0;
double rvec_array[3], translation_vector[3];
rvec_array[0] = mControlWidget->getCalibExtrRot1();
rvec_array[1] = mControlWidget->getCalibExtrRot2();
rvec_array[2] = mControlWidget->getCalibExtrRot3();
cv::Mat rvec(3, 1, CV_64F, rvec_array), rot_inv;
cv::Mat rot_mat(3, 3, CV_64F), e(3, 3, CV_64F);
// Transform the rotation vector into a rotation matrix with opencvs rodrigues method
rot_inv = rot_mat.inv(cv::DECOMP_SVD);
e = rot_inv * rot_mat;
translation_vector[0] = rot_mat.at<double>(0, 0) * mControlWidget->getCalibExtrTrans1() +
rot_mat.at<double>(0, 1) * mControlWidget->getCalibExtrTrans2() +
rot_mat.at<double>(0, 2) * mControlWidget->getCalibExtrTrans3();
translation_vector[1] = rot_mat.at<double>(1, 0) * mControlWidget->getCalibExtrTrans1() +
rot_mat.at<double>(1, 1) * mControlWidget->getCalibExtrTrans2() +
rot_mat.at<double>(1, 2) * mControlWidget->getCalibExtrTrans3();
translation_vector[2] = rot_mat.at<double>(2, 0) * mControlWidget->getCalibExtrTrans1() +
rot_mat.at<double>(2, 1) * mControlWidget->getCalibExtrTrans2() +
rot_mat.at<double>(2, 2) * mControlWidget->getCalibExtrTrans3();
point3D.x = rot_mat.at<double>(0, 0) * p3d.x + rot_mat.at<double>(0, 1) * p3d.y + rot_mat.at<double>(0, 2) * p3d.z +
translation_vector[0];
point3D.y = rot_mat.at<double>(1, 0) * p3d.x + rot_mat.at<double>(1, 1) * p3d.y + rot_mat.at<double>(1, 2) * p3d.z +
translation_vector[1];
point3D.z = rot_mat.at<double>(2, 0) * p3d.x + rot_mat.at<double>(2, 1) * p3d.y + rot_mat.at<double>(2, 2) * p3d.z +
translation_vector[2];
cv::Point2f point2D = cv::Point2f(0.0, 0.0);
if(point3D.z != 0)
point2D.x = (mControlWidget->getCalibFx() * point3D.x) / point3D.z + (mControlWidget->getCalibCx() - bS);
point2D.y = (mControlWidget->getCalibFy() * point3D.y) / point3D.z + (mControlWidget->getCalibCy() - bS);
{
point2D.x += bS;
point2D.y += bS;
}
return point2D;
}

Tobias Arens
committed
/**
* @brief Rotate a given vector from camera coordinate system to world coordinate system
*
* When the world coordinate system is not aligned to the camera-system,
* some direction dependent calculations (like head orientation) have to be rotated to be correctly exported.
*
* @param camVec the Vector to be rotated matching the camera coordinate system.
* @return the rotated vector.
*/
cv::Vec3d ExtrCalibration::camToWorldRotation(const cv::Vec3d &camVec) const
{
// Transform the rotation vector into a rotation matrix with opencvs rodrigues method
cv::Matx<double, 3, 3> rotMat(3, 3, CV_64F);
const auto rvec = cv::Vec3d(
mControlWidget->getCalibExtrRot1(), mControlWidget->getCalibExtrRot2(), mControlWidget->getCalibExtrRot3());
Rodrigues(rvec, rotMat);
auto rotInv = rotMat.inv(cv::DECOMP_LU);
cv::Vec3d worldVec = rotInv * camVec;
return worldVec;
}
/**
* @brief Tranforms a 2D point into a 3D point with given height.
*
* @param p2d 2D pixel point (without border)
* @param h height i.e. distance to xy-plane in cm
* @return calculated 3D point in cm
cv::Point3f ExtrCalibration::get3DPoint(const cv::Point2f &p2d, double h) const
int bS = mMainWindow->getImage() ? mMainWindow->getImageBorderSize() : 0;
cv::Point3f resultPoint, tmpPoint;
// Transform the rotation vector into a rotation matrix with opencvs rodrigues method
cv::Matx<double, 3, 3> rot_inv;
cv::Matx<double, 3, 3> rot_mat(3, 3, CV_64F);
const cv::Mat rvec =
(cv::Mat_<double>(3, 1) << mControlWidget->getCalibExtrRot1(),
mControlWidget->getCalibExtrRot2(),
mControlWidget->getCalibExtrRot3());
rot_inv = rot_mat.inv(cv::DECOMP_LU, nullptr);
// Create translation vector
cv::Vec3d translation{
mControlWidget->getCalibExtrTrans1(),
mControlWidget->getCalibExtrTrans2(),
mControlWidget->getCalibExtrTrans3()};
// Subtract principal point and border, so we can assume pinhole camera
const cv::Vec2d centeredImagePoint{
p2d.x - (mControlWidget->getCalibCx() - bS), p2d.y - (mControlWidget->getCalibCy() - bS)};
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
/* Basic Idea:
* All points projecting onto a point on the image plane lie on the same
* line (cf. pinhole camera model). We can determine this line in the form:
*
* g: x = lambda * v
*
* This line exists in camera coordinates. Let v be the projection with
* depth 1 (i.e. v_3 = 1). Then lambda is the depth of the resulting point.
* We'll continue to call lambda z instead, to show this.
* We now want to determine the depth at which the resulting point has height h
* in world coordinates. The transformation from cam to world is:
*
* W = R * C - T
* W := Point in World Coords
* C := Point in Cam Coords
* R,T := Rotation and Translation of Cam
*
* By putting in our x = z * v, we get:
* W = R * (z * v) - T
* <=> W = z * Rv - T
* <=> W + T = z * Rv
* <=> (W + T)/Rv = z
* We select the third row of this to solve for z. Finally g(z) is transformed
* into World Coords.
*/
// calc Rv, (R = rot_inv)
// rotatedProj = Rv
const cv::Vec2d focalLength{mControlWidget->getCalibFx(), mControlWidget->getCalibFy()};
const cv::Vec2d pinholeProjectionXY = cv::Mat(centeredImagePoint.div(focalLength));
const cv::Vec3d pinholeProjectionXY1{pinholeProjectionXY[0], pinholeProjectionXY[1], 1};
const cv::Vec3d rotatedProj = rot_inv * pinholeProjectionXY1;
// determine z via formula from comment above; using 3rd row
double z = (h + translation[2]) / rotatedProj[2];
// Evaluate line at depth z; calc point in camera coords
// written this way instead of z * pinholeProjectionXY1 (i.e. z * v) to not change test results due to floating
// point precision diff
resultPoint.x = (p2d.x - (mControlWidget->getCalibCx() - bS));
resultPoint.y = (p2d.y - (mControlWidget->getCalibCy() - bS));
resultPoint.z = z;
resultPoint.x = resultPoint.x * z / mControlWidget->getCalibFx();
resultPoint.y = resultPoint.y * z / mControlWidget->getCalibFy();
// We transform from cam coords to world coords with W = R * C - T
// we now calc: W = R * (C - R^-1*T), which is equivalent
translation = rot_mat * translation;
tmpPoint.x = resultPoint.x - translation[0];
tmpPoint.y = resultPoint.y - translation[1];
tmpPoint.z = resultPoint.z - translation[2];
resultPoint.x = rot_inv(0, 0) * (tmpPoint.x) + rot_inv(0, 1) * (tmpPoint.y) + rot_inv(0, 2) * (tmpPoint.z);
resultPoint.y = rot_inv(1, 0) * (tmpPoint.x) + rot_inv(1, 1) * (tmpPoint.y) + rot_inv(1, 2) * (tmpPoint.z);
resultPoint.z = rot_inv(2, 0) * (tmpPoint.x) + rot_inv(2, 1) * (tmpPoint.y) + rot_inv(2, 2) * (tmpPoint.z);
// Coordinate Transformations
resultPoint.x -= mControlWidget->getCalibCoord3DTransX();
resultPoint.y -= mControlWidget->getCalibCoord3DTransY();
resultPoint.z -= mControlWidget->getCalibCoord3DTransZ();
resultPoint.x *= mControlWidget->getCalibCoord3DSwapX() ? -1 : 1;
resultPoint.y *= mControlWidget->getCalibCoord3DSwapY() ? -1 : 1;
resultPoint.z *= mControlWidget->getCalibCoord3DSwapZ() ? -1 : 1;
bool ExtrCalibration::isOutsideImage(cv::Point2f p2d)
{
int bS = mMainWindow->getImage() ? mMainWindow->getImageBorderSize() : 0;
if(!isnormal(p2d.x) || !isnormal(p2d.y) || !isnormal(p2d.x) || !isnormal(p2d.y))
if(isnan(p2d.x) || isnan(p2d.y) || isinf(p2d.x) || isinf(p2d.y))
return p2d.x < -bS || p2d.x > mMainWindow->getImage()->width() - bS || p2d.y < -bS ||
p2d.y > mMainWindow->getImage()->height() - bS;
}
else
void ExtrCalibration::setXml(QDomElement &elem)
{
reprojectionError.setXml(elem);
}
void ExtrCalibration::getXml(QDomElement &elem)
{
QDomElement subElem;