Added marker pose estimation. Not tested yet.

This commit is contained in:
2014-05-13 18:30:32 -04:30
parent ea33f1e725
commit b0151e85e9
4 changed files with 166 additions and 63 deletions

View File

@@ -21,6 +21,9 @@
#include "marker.hpp"
//#define LOG_ENABLED
#define MAX_MARKERS 5
#define TRANSLATION_VECTOR_POINTS 3
#define ROTATION_MATRIX_SIZE 9
#define POINTS_PER_CALIBRATION_SAMPLE 54
#define CALIBRATION_SAMPLES 10
@@ -37,17 +40,21 @@ extern "C"{
/**
* JNI wrapper around the nxtar::getAllMarkers() method.
*/
JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes){
char codeMsg[128];
std::vector<int> vCodes;
cv::Mat temp;
log(TAG, "getMarkerCodesAndLocations(): Requesting native data.");
JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes, jlong camMat, jlong distMat, jfloatArray translations, jfloatArray rotations){
char codeMsg[128];
std::vector<int> vCodes;
nxtar::markers_vector vMarkers;
cv::Mat temp;
// Get the native object addresses.
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
log(TAG, "getMarkerCodesAndLocations(): Requesting native data.");
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
cv::Mat& mCam = *(cv::Mat*)camMat;
cv::Mat& mDist = *(cv::Mat*)distMat;
jint * _codes = env->GetIntArrayElements(codes, 0);
jfloat * _tr = env->GetFloatArrayElements(translations, 0);
jfloat * _rt = env->GetFloatArrayElements(rotations, 0);
// Convert the input image to the BGR color space.
log(TAG, "getMarkerCodesAndLocations(): Converting color space before processing.");
@@ -55,14 +62,32 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
// Find all markers in the input image.
log(TAG, "getMarkerCodesAndLocations(): Finding markers.");
nxtar::getAllMarkers(vCodes, temp);
nxtar::getAllMarkers(vMarkers, temp);
nxtar::estimateMarkerPosition(vMarkers, mCam, mDist);
// Copy the marker codes to the output vector.
log(TAG, "getMarkerCodesAndLocations(): Copying marker codes.");
for(int i = 0; i < vCodes.size() && i < 15; i++){
_codes[i] = (jint)vCodes[i];
for(size_t i = 0; i < vMarkers.size() && i < MAX_MARKERS; i++){
_codes[i] = (jint)vMarkers[i].code;
}
vCodes.clear();
// Copy the geometric transformations to the output vectors.
for(int i = 0, p = 0; i < vMarkers.size(); i++, p += 3){
_tr[p ] = vMarkers[i].translation.at<jfloat>(0);
_tr[p + 1] = vMarkers[i].translation.at<jfloat>(1);
_tr[p + 2] = vMarkers[i].translation.at<jfloat>(2);
}
for(int k = 0; k < vMarkers.size(); k++){
for(int row = 0; row < 3; row++){
for(int col = 0; col < 3; col++){
_rt[col + (row * 3) + (9 * k)] = vMarkers[k].rotation.at<jfloat>(row, col);
}
}
}
// Clear marker memory.
vMarkers.clear();
// Convert the output image back to the RGB color space.
cv::cvtColor(temp, mbgr, CV_BGR2RGB);
@@ -70,6 +95,8 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
// Release native data.
log(TAG, "getMarkerCodesAndLocations(): Releasing native data.");
env->ReleaseIntArrayElements(codes, _codes, 0);
env->ReleaseFloatArrayElements(translations, _tr, 0);
env->ReleaseFloatArrayElements(rotations, _rt, 0);
}
/**
@@ -77,14 +104,13 @@ JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAn
*/
JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrationPattern(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jfloatArray points){
nxtar::points_vector v_points;
bool found;
cv::Mat temp;
log(TAG, "findCalibrationPattern(): Requesting native data.");
bool found;
cv::Mat temp;
// Get the native object addresses.
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
log(TAG, "findCalibrationPattern(): Requesting native data.");
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
jfloat * _points = env->GetFloatArrayElements(points, 0);
// Convert the input image to the BGR color space.
@@ -118,13 +144,13 @@ JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrat
* JNI wrapper around the nxtar::getCameraParameters() method.
*/
JNIEXPORT jdouble JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_calibrateCameraParameters(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jlong addrMatFrame, jfloatArray points){
double error;
double error;
std::vector<nxtar::points_vector> imagePoints;
// Get native object addresses.
log(TAG, "calibrateCameraParameters(): Requesting native data.");
cv::Mat& mIn = *(cv::Mat*)addrMatIn;
cv::Mat& mOut = *(cv::Mat*)addrMatOut;
cv::Mat& mIn = *(cv::Mat*)addrMatIn;
cv::Mat& mOut = *(cv::Mat*)addrMatOut;
cv::Mat& mFrame = *(cv::Mat*)addrMatFrame;
jfloat * _points = env->GetFloatArrayElements(points, 0);

View File

@@ -27,7 +27,6 @@ namespace nxtar{
typedef std::vector<cv::Point3f> points_vector_3D;
typedef std::vector<std::vector<cv::Point> > contours_vector;
typedef std::vector<Marker> markers_vector;
/******************************************************************************
* PRIVATE CONSTANTS *
@@ -92,16 +91,15 @@ void warpMarker(Marker &, cv::Mat &, cv::Mat &);
* PUBLIC API *
******************************************************************************/
void getAllMarkers(std::vector<int> & codes, cv::Mat & img){
void getAllMarkers(markers_vector & valid_markers, cv::Mat & img){
cv::Mat gray, thresh, cont, mark;
contours_vector contours;
markers_vector markers;
markers_vector valid_markers;
#ifdef DESKTOP
std::ostringstream oss;
#endif
codes.clear();
valid_markers.clear();
// Find all marker candidates in the input image.
// 1) First, convert the image to grayscale.
@@ -147,7 +145,6 @@ void getAllMarkers(std::vector<int> & codes, cv::Mat & img){
// Fix the detected corners to better approximate the markers. And
// push their codes to the output vector.
cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), TERM_CRITERIA);
codes.push_back(valid_markers[i].code);
}
// Render the detected markers on top of the input image.
@@ -158,7 +155,6 @@ void getAllMarkers(std::vector<int> & codes, cv::Mat & img){
// Clear the local vectors.
markers.clear();
contours.clear();
valid_markers.clear();
}
bool findCalibrationPattern(points_vector & corners, cv::Mat & img){
@@ -204,6 +200,35 @@ double getCameraParameters(cv::Mat & camera_matrix, cv::Mat & dist_coeffs, std::
return cv::calibrateCamera(object_points, image_points, image_size, camera_matrix, dist_coeffs, rvecs, tvecs, 0, TERM_CRITERIA);
}
void estimateMarkerPosition(markers_vector & markers, cv::Mat & camMatrix, cv::Mat & distCoeff){
cv::Mat rVec, rAux, tAux;
cv::Mat_<float> tVec, rotation(3,3);
points_vector_3D objectPoints;
// Assemble a unitary CCW oriented reference polygon.
objectPoints.push_back(cv::Point3f(-0.5f, -0.5f, 0.0f));
objectPoints.push_back(cv::Point3f(-0.5f, 0.5f, 0.0f));
objectPoints.push_back(cv::Point3f( 0.5f, 0.5f, 0.0f));
objectPoints.push_back(cv::Point3f( 0.5f, -0.5f, 0.0f));
for(size_t i = 0; i < markers.size(); i++){
// Obtain the translation and rotation vectors.
cv::solvePnP(objectPoints, markers[i].points, camMatrix, distCoeff, rAux, tAux);
// Convert the obtained vectors to float.
rAux.convertTo(rVec, CV_32F);
tAux.convertTo(tVec, CV_32F);
// Convert the rotation vector to a rotation matrix.
cv::Rodrigues(rVec, rotation);
// Make the rotation and translation relative to the "camera" and save
// the results to the output marker.
markers[i].rotation = cv::Mat(rotation.t());
markers[i].translation = cv::Mat(-tVec);
}
}
/******************************************************************************
* PRIVATE HELPER FUNCTIONS *
******************************************************************************/
@@ -357,6 +382,7 @@ void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
/**
* Calculate the hamming distance of a 5x5 bit matrix.
* Function by Daniel Lelis Baggio for "Mastering OpenCV with Practical Computer Vision Projects".
*/
int hammDistMarker(cv::Mat bits){
int ids[4][5] = {
@@ -397,7 +423,7 @@ cv::Mat rotate(cv::Mat in){
in.copyTo(out);
for (int i = 0; i < in.rows; i++){
for (int j = 0; j < in.cols; j++){
out.at<uchar>(i, j)=in.at<uchar>(in.cols-j - 1, i);
out.at<uchar>(i, j) = in.at<uchar>(in.cols-j - 1, i);
}
}

View File

@@ -22,20 +22,30 @@
namespace nxtar{
typedef std::vector<cv::Point2f> points_vector;
class Marker;
typedef std::vector<cv::Point2f> points_vector;
typedef std::vector<Marker> markers_vector;
/**
* A class representing a marker with the geometric transformations needed to
* render something on top of it.
*/
class Marker{
public:
~Marker();
int code;
points_vector points;
int code;
cv::Mat translation;
cv::Mat rotation;
};
/**
* Detect all 5x5 markers in the input image and return their codes in the
* output vector.
*/
void getAllMarkers(std::vector<int> &, cv::Mat &);
void getAllMarkers(markers_vector &, cv::Mat &);
/**
* Find a chessboard calibration pattern in the input image. Returns true
@@ -51,6 +61,14 @@ bool findCalibrationPattern(points_vector &, cv::Mat &);
*/
double getCameraParameters(cv::Mat &, cv::Mat &, std::vector<points_vector> &, cv::Size);
/**
* Obtains the necesary geometric transformations necessary to move a reference
* unitary polygon to the position and rotation of the markers passed as input.
* The obtained transformations are given relative to a camera centered in the
* origin and are saved inside the input markers.
*/
void estimateMarkerPosition(markers_vector &, cv::Mat &, cv::Mat &);
} // namespace nxtar
#endif // MARKER_HPP