Skip to content

Commit

Permalink
improved classes and features
Browse files Browse the repository at this point in the history
- WebARKitTrackingInfo became a class:
- new invertPose, setScale, and getScale method for the scale of the pattern
- m_camMatrix as Matx33d
  • Loading branch information
kalwalt committed Nov 4, 2023
1 parent d13feef commit bfcebc9
Show file tree
Hide file tree
Showing 3 changed files with 69 additions and 63 deletions.
26 changes: 22 additions & 4 deletions WebARKit/WebARKitPattern.cpp
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
#include <WebARKitPattern.h>
#include <iostream>
#include <opencv2/calib3d.hpp>

void WebARKitPatternTrackingInfo::computePose(std::vector<cv::Point3f>& treeDPoints, std::vector<cv::Point2f>& imgPoints,
cv::Mat& caMatrix, cv::Mat& distCoeffs) {
WebARKitPatternTrackingInfo::WebARKitPatternTrackingInfo() {
pose3d = cv::Mat::zeros(3, 4, CV_64FC1);
m_scale = 1.0f;
}

void WebARKitPatternTrackingInfo::computePose(std::vector<cv::Point3f>& treeDPoints,
std::vector<cv::Point2f>& imgPoints, cv::Mat& caMatrix,
cv::Mat& distCoeffs) {
cv::Mat rvec = cv::Mat::zeros(3, 1, CV_64FC1); // output rotation vector
cv::Mat tvec = cv::Mat::zeros(3, 1, CV_64FC1); // output translation vector

Expand All @@ -13,5 +18,18 @@ void WebARKitPatternTrackingInfo::computePose(std::vector<cv::Point3f>& treeDPoi
cv::Rodrigues(rvec, rMat);
cv::hconcat(rMat, tvec, pose3d);

std::cout << "pose3d: " << pose3d.rows << " x " << pose3d.cols << std::endl;
invertPose();
}

void WebARKitPatternTrackingInfo::invertPose() {

cv::Mat invertPose(3, 4, CV_64FC1);
for (auto j = 0; j < 3; j++) {
invertPose.at<double>(j, 0) = pose3d.at<double>(j, 0);
invertPose.at<double>(j, 1) = -pose3d.at<double>(j, 1);
invertPose.at<double>(j, 2) = -pose3d.at<double>(j, 2);
invertPose.at<double>(j, 3) = pose3d.at<double>(j, 3) * m_scale * 0.001f * 1.64f;
}

pose3d = invertPose;
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,8 @@ class WebARKitTracker::WebARKitTrackerImpl {
WebARKitTrackerImpl()
: corners(4), initialized(false), output(17, 0.0), _valid(false), _isDetected(false), numMatches(0),
minNumMatches(MIN_NUM_MATCHES), _nn_match_ratio(0.7f) {
m_camMatrix = cv::Mat(3,3, cv::DataType<double>::type);
//m_distortionCoeff = cv::Mat();
//_patternTrackingInfo.pose3d = cv::Mat::zeros(3, 4, CV_64FC1);
//m_distortionCoeff = cv::Mat::zeros(6, 1, CV_64FC1);
m_distortionCoeff = cv::Mat::zeros(4,1,cv::DataType<double>::type);
m_camMatrix = cv::Matx33d::zeros();
m_distortionCoeff = cv::Mat::zeros(4, 1, cv::DataType<double>::type);
};

~WebARKitTrackerImpl() = default;
Expand All @@ -30,41 +27,25 @@ class WebARKitTracker::WebARKitTrackerImpl {
}
_camera->setupCamera(frameWidth, frameHeight);
_camera->printSettings();
//m_camMatrix = cv::Mat(3, 3, CV_64FC1, _camera->getCameraData().data());
//m_camMatrix = cv::Mat(3, 3, CV_64FC1);

std::array<double, 9> camData = _camera->getCameraData();
for(auto i = 0; i < 3; i++) {
for(auto j = 0; j < 3; j++) {
//WEBARKIT_LOGi("Camera Matrix: %d\n", camData[i*3+j]);
m_camMatrix.at<double>(i, j) = camData[i*3+j];
for (auto i = 0; i < 3; i++) {
for (auto j = 0; j < 3; j++) {
m_camMatrix(i, j) = camData[i * 3 + j];
}
}

/*m_camMatrix.at<double>(0,0) = 833.63;
m_camMatrix.at<double>(0,1) = 0.0;
m_camMatrix.at<double>(0,2) = 353.50;
m_camMatrix.at<double>(1,0) = 0.0;
m_camMatrix.at<double>(1,1) = 833.63;
m_camMatrix.at<double>(1,2) = 464.50;
m_camMatrix.at<double>(2,0) = 0.0;
m_camMatrix.at<double>(2,1) = 0.0;
m_camMatrix.at<double>(2,2) = 1.0;*/


for(auto i = 0; i < 3; i++) {
for(auto j = 0; j < 3; j++) {
WEBARKIT_LOGi("Camera Matrix: %.2f\n", m_camMatrix.at<double>(i, j));
for (auto i = 0; i < 3; i++) {
for (auto j = 0; j < 3; j++) {
WEBARKIT_LOGi("Camera Matrix: %.2f\n", m_camMatrix(i, j));
}
}

for(auto i = 0; i < 6; i++) {
for(auto j = 0; j < 1; j++) {
for (auto i = 0; i < 6; i++) {
for (auto j = 0; j < 1; j++) {
WEBARKIT_LOGi("Distortion coefficients: %.2f\n", m_distortionCoeff.at<double>(i, j));
}
}

//m_distortionCoeff = cv::Mat(6, 1, CV_64FC1, _camera->getDistortionCoefficients().data());
//m_distortionCoeff = cv::Mat::zeros(6, 1, CV_64FC1);
}

void initTracker(uchar* refData, size_t refCols, size_t refRows) {
Expand Down Expand Up @@ -247,17 +228,14 @@ class WebARKitTracker::WebARKitTrackerImpl {
// set old points to new points
framePts = goodPtsCurr;
std::vector<cv::Point2f> warpedCorners;
//std::vector<cv::Point3f> treeDPoints;

if ((valid = homographyValid(m_H))) {
fill_output(m_H);

warpedCorners = getSelectedFeaturesWarped(m_H);
//treeDPoints = getSelectedFeatures3D(m_H);
if(m_camMatrix.empty()) {
WEBARKIT_LOGi("Camera Matrix is empty!\n");
}else {
WEBARKIT_LOGi("Camera Matrix: %d\n", m_camMatrix.at<double>(0, 0));
}
_patternTrackingInfo.computePose(_pattern.points3d, warpedCorners, m_camMatrix, m_distortionCoeff);
auto camM = cv::Mat(m_camMatrix);
_patternTrackingInfo.computePose(_pattern.points3d, warpedCorners, camM, m_distortionCoeff);

_isDetected = true;
} else {
_isDetected = false;
Expand Down Expand Up @@ -348,13 +326,12 @@ class WebARKitTracker::WebARKitTrackerImpl {
return featureMask;
}

std::vector<cv::Point2f> getSelectedFeaturesWarped(cv::Mat& H)
{
std::vector<cv::Point2f> warpedPoints;
perspectiveTransform(_pattern.points2d, warpedPoints, H);
WEBARKIT_LOGi("warpedPoint(0,0): %.2f, %.2f\n", warpedPoints[0].x, warpedPoints[0].y);
return warpedPoints;
}
std::vector<cv::Point2f> getSelectedFeaturesWarped(cv::Mat& H) {
std::vector<cv::Point2f> warpedPoints;
perspectiveTransform(_pattern.points2d, warpedPoints, H);
WEBARKIT_LOGi("warpedPoint(0,0): %.2f, %.2f\n", warpedPoints[0].x, warpedPoints[0].y);
return warpedPoints;
}

bool _valid;

Expand All @@ -380,7 +357,7 @@ class WebARKitTracker::WebARKitTrackerImpl {

WebARKitPatternTrackingInfo _patternTrackingInfo;

cv::Mat m_camMatrix;
cv::Matx33d m_camMatrix;
cv::Mat m_distortionCoeff;

private:
Expand Down
37 changes: 24 additions & 13 deletions WebARKit/include/WebARKitPattern.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@
struct WebARKitPattern {
cv::Size size;

//cv::Mat grayImg;
// cv::Mat grayImg;

//std::vector<cv::KeyPoint> keypoints;
//cv::Mat descriptors;
// std::vector<cv::KeyPoint> keypoints;
// cv::Mat descriptors;

std::vector<cv::Point2f> points2d;
std::vector<cv::Point3f> points3d;
Expand All @@ -18,16 +18,27 @@ struct WebARKitPattern {
/**
* Intermediate pattern tracking info structure
*/
struct WebARKitPatternTrackingInfo
{
cv::Mat homography;
std::vector<cv::Point2f> points2d;
cv::Mat pose3d;

/**
* Compute pattern pose using PnP algorithm
*/
void computePose(std::vector<cv::Point3f>& treeDPoints, std::vector<cv::Point2f>& imgPoints, cv::Mat& caMatrix, cv::Mat& distCoeffs);
class WebARKitPatternTrackingInfo {
public:
WebARKitPatternTrackingInfo();

cv::Mat homography;
std::vector<cv::Point2f> points2d;
cv::Mat pose3d;

void setScale(const float scale) { m_scale = scale; }

float getScale() { return m_scale; }

/**
* Compute pattern pose using PnP algorithm
*/
void computePose(std::vector<cv::Point3f>& treeDPoints, std::vector<cv::Point2f>& imgPoints, cv::Mat& caMatrix,
cv::Mat& distCoeffs);

private:
float m_scale;
void invertPose();
};

#endif // WEBARKITPATTERN_H

0 comments on commit bfcebc9

Please sign in to comment.