Started porting the camera calibration functions.

This commit is contained in:
2014-04-10 17:55:31 -04:30
parent 453c3a36e5
commit 249e6e30a4
6 changed files with 493 additions and 380 deletions

View File

@@ -4,7 +4,7 @@ include $(CLEAR_VARS)
OPENCV_CAMERA_MODULES:=off OPENCV_CAMERA_MODULES:=off
OPENCV_LIB_TYPE:=STATIC OPENCV_LIB_TYPE:=STATIC
include /home/miky/Escritorio/OpenCV-2.4.7-android-sdk/sdk/native/jni/OpenCV.mk include C:\Users\miguel.astor\Documents\OpenCV-2.4.8-android-sdk\sdk\native\jni\OpenCV.mk
LOCAL_MODULE := cvproc LOCAL_MODULE := cvproc
LOCAL_SRC_FILES := cv_proc.cpp marker.cpp LOCAL_SRC_FILES := cv_proc.cpp marker.cpp

View File

@@ -21,8 +21,6 @@
//#define CAN_LOG //#define CAN_LOG
extern "C"{
#ifdef CAN_LOG #ifdef CAN_LOG
#define log(TAG, MSG) (__android_log_write(ANDROID_LOG_DEBUG, TAG, MSG)) #define log(TAG, MSG) (__android_log_write(ANDROID_LOG_DEBUG, TAG, MSG))
const char * TAG = "CVPROC_NATIVE"; const char * TAG = "CVPROC_NATIVE";
@@ -30,57 +28,61 @@ const char * TAG = "CVPROC_NATIVE";
#define log(TAG, MSG) (1 + 1) #define log(TAG, MSG) (1 + 1)
#endif #endif
JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations( extern "C"{
JNIEnv* env,
jobject jobj, JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes){
jlong addrMatIn,
jlong addrMatOut,
jintArray codes
){
char codeMsg[128]; char codeMsg[128];
std::vector<int> vCodes; std::vector<int> vCodes;
log(TAG, "Requesting native data."); log(TAG, "getMarkerCodesAndLocations(): Requesting native data.");
cv::Mat& myuv = *(cv::Mat*)addrMatIn; cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut; cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
jint * _codes = env->GetIntArrayElements(codes, 0); jint * _codes = env->GetIntArrayElements(codes, 0);
log(TAG, "Converting color space before processing."); log(TAG, "getMarkerCodesAndLocations(): Converting color space before processing.");
cv::cvtColor(myuv, mbgr, CV_RGB2BGR); cv::cvtColor(myuv, mbgr, CV_RGB2BGR);
log(TAG, "Finding markers."); log(TAG, "getMarkerCodesAndLocations(): Finding markers.");
nxtar::getAllMarkers(vCodes, mbgr); nxtar::getAllMarkers(vCodes, mbgr);
log(TAG, "Copying marker codes."); log(TAG, "getMarkerCodesAndLocations(): Copying marker codes.");
for(int i = 0; i < vCodes.size() && i < 15; i++){ for(int i = 0; i < vCodes.size() && i < 15; i++){
_codes[i] = vCodes[i]; _codes[i] = (jint)vCodes[i];
sprintf(codeMsg, "Code [%d] = %d", i, vCodes[i]);
log(TAG, codeMsg);
} }
vCodes.clear(); vCodes.clear();
log(TAG, "Releasing native data."); log(TAG, "getMarkerCodesAndLocations(): Releasing native data.");
env->ReleaseIntArrayElements(codes, _codes, 0); env->ReleaseIntArrayElements(codes, _codes, 0);
} }
JNIEXPORT void JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrationPattern( JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_ccg_nxtar_MainActivity_findCalibrationPattern(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jfloatArray points){
JNIEnv* env, nxtar::points_vector v_points;
jobject jobj, bool found;
jlong addrMatIn,
jlong addrMatOut log(TAG, "findCalibrationPattern(): Requesting native data.");
){
log(TAG, "Requesting native data.");
cv::Mat& myuv = *(cv::Mat*)addrMatIn; cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut; cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
jfloat * _points = env->GetFloatArrayElements(points, 0);
log(TAG, "Converting color space before processing."); log(TAG, "findCalibrationPattern(): Converting color space before processing.");
cv::cvtColor(myuv, mbgr, CV_RGB2BGR); cv::cvtColor(myuv, mbgr, CV_RGB2BGR);
log(TAG, "Finding markers."); log(TAG, "findCalibrationPattern(): Finding calibration pattern.");
nxtar::calibrateCamera(mbgr); found = nxtar::findCalibrationPattern(v_points, mbgr);
log(TAG, "findCalibrationPattern(): Copying calibration points.");
for(size_t i = 0, p = 0; i < v_points.size(); i++, p += 2){
_points[p] = (jfloat)v_points[i].x;
_points[p + 1] = (jfloat)v_points[i].y;
}
log(TAG, "findCalibrationPattern(): Releasing native data.");
env->ReleaseFloatArrayElements(points, _points, 0);
return (jboolean)found;
} }
} }

View File

@@ -16,29 +16,101 @@
#include <algorithm> #include <algorithm>
#include <utility> #include <utility>
#include <limits> #include <limits>
#ifdef DESKTOP
#include <iostream> #include <iostream>
#endif
#include "marker.hpp" #include "marker.hpp"
#define MIN_CONTOUR_LENGTH 0.1
namespace nxtar{ namespace nxtar{
static int PATTERN_DETECTION_PARAMS = cv::CALIB_CB_ADAPTIVE_THRESH + cv::CALIB_CB_NORMALIZE_IMAGE + cv::CALIB_CB_FAST_CHECK; typedef std::vector<cv::Point3f> points_vector_3D;
static const cv::TermCriteria termCriteria = cv::TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1); typedef std::vector<std::vector<cv::Point> > contours_vector;
static const cv::Scalar COLOR = cv::Scalar(255, 255, 255); typedef std::vector<Marker> markers_vector;
static const cv::Size checkersPatternSize(6, 9);
float perimeter (points_vector &); /******************************************************************************
int hammDistMarker (cv::Mat); * PRIVATE CONSTANTS *
cv::Mat rotate (cv::Mat); ******************************************************************************/
int decodeMarker (cv::Mat &);
void renderMarkers (markers_vector &, cv::Mat &);
void isolateMarkers (const contours_vector &, markers_vector &);
void findContours (cv::Mat &, contours_vector &, int);
void warpMarker (Marker &, cv::Mat &, cv::Mat &);
void getAllMarkers(std::vector<int> & codes, cv::Mat & img){ /**
* Size of a square cell in the calibration pattern.
*/
static const float SQUARE_SIZE = 1.0f;
/**
* Minimal lenght of a contour to be considered as a marker candidate.
*/
static const float MIN_CONTOUR_LENGTH = 0.1;
/**
* Flags for the calibration pattern detecion function.
*/
static const int PATTERN_DETECTION_FLAGS = cv::CALIB_CB_ADAPTIVE_THRESH + cv::CALIB_CB_NORMALIZE_IMAGE + cv::CALIB_CB_FAST_CHECK;
/**
* Color for rendering the marker outlines.
*/
static const cv::Scalar COLOR = cv::Scalar(255, 255, 255);
/**
* Size of the chessboard pattern image (columns, rows).
*/
static const cv::Size CHESSBOARD_PATTERN_SIZE(6, 9);
/**
* Termination criteria for OpenCV's iterative algorithms.
*/
static const cv::TermCriteria TERM_CRITERIA = cv::TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1);
/******************************************************************************
* PRIVATE FUNCTION PROTOTYPES *
******************************************************************************/
/**
* Calculates the perimeter of a points vector defining a polygon.
*/
float perimeter(points_vector &);
/**
* Calculates the Hamming distance of a 5x5 marker.
*/
int hammDistMarker(cv::Mat);
/**
* Rotates an OpenCV matrix in place by 90 degrees clockwise.
*/
cv::Mat rotate(cv::Mat);
/**
* Returns the code of a 5x5 marker or -1 if the marker is not valid.
*/
int decodeMarker(cv::Mat &);
/**
* Renders the polygon defined in the input vector on the specified image.
*/
void renderMarkers(markers_vector &, cv::Mat &);
/**
* Identifies all possible marker candidates in a polygon vector.
*/
void isolateMarkers(const contours_vector &, markers_vector &);
/**
* Identifies all roughly 4 side figures in the input image.
*/
void findContours(cv::Mat &, contours_vector &, int);
/**
* Removes the prerspective distortion from a marker candidate image.
*/
void warpMarker(Marker &, cv::Mat &, cv::Mat &);
/******************************************************************************
* PUBLIC API *
******************************************************************************/
void getAllMarkers(std::vector<int> & codes, cv::Mat & img){
cv::Mat gray, thresh, cont, mark; cv::Mat gray, thresh, cont, mark;
contours_vector contours; contours_vector contours;
markers_vector markers; markers_vector markers;
@@ -49,11 +121,18 @@ void getAllMarkers(std::vector<int> & codes, cv::Mat & img){
codes.clear(); codes.clear();
// Find all marker candidates in the input image.
// 1) First, convert the image to grayscale.
// 2) Then, binarize the grayscale image.
// 3) Finally indentify all 4 sided figures in the binarized image.
cv::cvtColor(img, gray, CV_BGR2GRAY); cv::cvtColor(img, gray, CV_BGR2GRAY);
cv::adaptiveThreshold(gray, thresh, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 7, 7); cv::adaptiveThreshold(gray, thresh, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 7, 7);
findContours(thresh, contours, 40); findContours(thresh, contours, 40);
isolateMarkers(contours, markers); isolateMarkers(contours, markers);
// Remove the perspective distortion from the detected marker candidates.
// Then attempt to decode them and push the valid ones into the valid
// markes vector.
for(int i = 0; i < markers.size(); i++){ for(int i = 0; i < markers.size(); i++){
warpMarker(markers[i], gray, mark); warpMarker(markers[i], gray, mark);
@@ -67,6 +146,8 @@ void getAllMarkers(std::vector<int> & codes, cv::Mat & img){
for(int i = 0; i < valid_markers.size(); i++){ for(int i = 0; i < valid_markers.size(); i++){
#ifdef DESKTOP #ifdef DESKTOP
// Render the detected valid markers with their codes for debbuging
// purposes.
oss << valid_markers[i].code; oss << valid_markers[i].code;
cv::putText(mark, oss.str(), cv::Point(5, 250), cv::FONT_HERSHEY_PLAIN, 2, cv::Scalar::all(128), 3, 8); cv::putText(mark, oss.str(), cv::Point(5, 250), cv::FONT_HERSHEY_PLAIN, 2, cv::Scalar::all(128), 3, 8);
@@ -81,36 +162,72 @@ void getAllMarkers(std::vector<int> & codes, cv::Mat & img){
oss.str(""); oss.str("");
oss.clear(); oss.clear();
#endif #endif
cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), termCriteria); // Fix the detected corners to better approximate the markers. And
// push their codes to the output vector.
cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), TERM_CRITERIA);
codes.push_back(valid_markers[i].code); codes.push_back(valid_markers[i].code);
} }
// Render the detected markers on top of the input image.
cont = cv::Mat::zeros(img.size(), CV_8UC3); cont = cv::Mat::zeros(img.size(), CV_8UC3);
renderMarkers(valid_markers, cont); renderMarkers(valid_markers, cont);
img = img + cont; img = img + cont;
// Clear the local vectors.
markers.clear(); markers.clear();
contours.clear(); contours.clear();
valid_markers.clear(); valid_markers.clear();
} }
void calibrateCamera(cv::Mat & img){ bool findCalibrationPattern(points_vector & corners, cv::Mat & img){
bool patternfound; bool patternfound;
points_vector corners;
cv::Mat gray; cv::Mat gray;
// Convert the input image to grayscale and attempt to find the
// calibration pattern.
cv::cvtColor(img, gray, CV_BGR2GRAY); cv::cvtColor(img, gray, CV_BGR2GRAY);
patternfound = cv::findChessboardCorners(gray, checkersPatternSize, corners, PATTERN_DETECTION_PARAMS); patternfound = cv::findChessboardCorners(gray, CHESSBOARD_PATTERN_SIZE, corners, PATTERN_DETECTION_FLAGS);
// If the pattern was found then fix the detected points a bit.
if(patternfound) if(patternfound)
cv::cornerSubPix(gray, corners, cv::Size(11, 11), cv::Size(-1, -1), termCriteria); cv::cornerSubPix(gray, corners, cv::Size(11, 11), cv::Size(-1, -1), TERM_CRITERIA);
cv::drawChessboardCorners(img, checkersPatternSize, cv::Mat(corners), patternfound); // Render the detected pattern.
} cv::drawChessboardCorners(img, CHESSBOARD_PATTERN_SIZE, cv::Mat(corners), patternfound);
void findContours(cv::Mat & img, contours_vector & v, int minP){ return patternfound;
std::vector<std::vector<cv::Point> > c; }
double getCameraParameters(cv::Mat & camera_matrix, cv::Mat & dist_coeffs, std::vector<points_vector> & image_points, cv::Size image_size){
std::vector<cv::Mat> rvecs, tvecs;
std::vector<points_vector_3D> object_points;
points_vector_3D corner_points;
// Build the reference object points vector;
for(int i = 0; i < CHESSBOARD_PATTERN_SIZE.height; i++){
for(int j = 0; j < CHESSBOARD_PATTERN_SIZE.width; j++){
corner_points.push_back(cv::Point3f(float( j * SQUARE_SIZE ), float( i * SQUARE_SIZE ), 0));
}
}
object_points.push_back(corner_points);
object_points.resize(image_points.size(), object_points[0]);
// Build a camera matrix.
camera_matrix = cv::Mat::eye(3, 3, CV_64F);
// Build the distortion coefficients matrix.
dist_coeffs = cv::Mat::zeros(8, 1, CV_64F);
// Calibrate and return the reprojection error.
return cv::calibrateCamera(object_points, image_points, image_size, camera_matrix, dist_coeffs, rvecs, tvecs, 0, TERM_CRITERIA);
}
/******************************************************************************
* PRIVATE HELPER FUNCTIONS *
******************************************************************************/
void findContours(cv::Mat & img, contours_vector & v, int minP){
contours_vector c;
cv::findContours(img, c, CV_RETR_LIST, CV_CHAIN_APPROX_NONE); cv::findContours(img, c, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
v.clear(); v.clear();
@@ -119,9 +236,9 @@ void findContours(cv::Mat & img, contours_vector & v, int minP){
v.push_back(c[i]); v.push_back(c[i]);
} }
} }
} }
void renderMarkers(markers_vector & v, cv::Mat & dst){ void renderMarkers(markers_vector & v, cv::Mat & dst){
contours_vector cv; contours_vector cv;
for(size_t i = 0; i < v.size(); i++){ for(size_t i = 0; i < v.size(); i++){
@@ -132,9 +249,9 @@ void renderMarkers(markers_vector & v, cv::Mat & dst){
} }
cv::drawContours(dst, cv, -1, COLOR, 1, CV_AA); cv::drawContours(dst, cv, -1, COLOR, 1, CV_AA);
} }
void isolateMarkers(const contours_vector & vc, markers_vector & vm){ void isolateMarkers(const contours_vector & vc, markers_vector & vm){
std::vector<cv::Point> appCurve; std::vector<cv::Point> appCurve;
markers_vector posMarkers; markers_vector posMarkers;
@@ -205,9 +322,9 @@ void isolateMarkers(const contours_vector & vc, markers_vector & vm){
for(size_t i = 0; i < posMarkers.size(); ++i){ for(size_t i = 0; i < posMarkers.size(); ++i){
if(remMask[i]) vm.push_back(posMarkers[i]); if(remMask[i]) vm.push_back(posMarkers[i]);
} }
} }
void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){ void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
cv::Mat bin; cv::Mat bin;
cv::Size markerSize(350, 350); cv::Size markerSize(350, 350);
points_vector v; points_vector v;
@@ -220,9 +337,9 @@ void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
cv::warpPerspective(in, bin, M, markerSize); cv::warpPerspective(in, bin, M, markerSize);
cv::threshold(bin, out, 128, 255, cv::THRESH_BINARY | cv::THRESH_OTSU); cv::threshold(bin, out, 128, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
} }
int hammDistMarker(cv::Mat bits){ int hammDistMarker(cv::Mat bits){
int ids[4][5] = { int ids[4][5] = {
{1,0,0,0,0}, {1,0,0,0,0},
{1,0,1,1,1}, {1,0,1,1,1},
@@ -250,9 +367,9 @@ int hammDistMarker(cv::Mat bits){
} }
return dist; return dist;
} }
cv::Mat rotate(cv::Mat in){ cv::Mat rotate(cv::Mat in){
cv::Mat out; cv::Mat out;
in.copyTo(out); in.copyTo(out);
for (int i=0;i<in.rows;i++){ for (int i=0;i<in.rows;i++){
@@ -262,9 +379,9 @@ cv::Mat rotate(cv::Mat in){
} }
return out; return out;
} }
int decodeMarker(cv::Mat & marker){ int decodeMarker(cv::Mat & marker){
bool found = false; bool found = false;
int code = 0; int code = 0;
cv::Mat bits; cv::Mat bits;
@@ -325,9 +442,9 @@ int decodeMarker(cv::Mat & marker){
return code; return code;
}else }else
return -1; return -1;
} }
float perimeter(points_vector & p){ float perimeter(points_vector & p){
float per = 0.0f, dx, dy; float per = 0.0f, dx, dy;
for(size_t i; i < p.size(); ++i){ for(size_t i; i < p.size(); ++i){
@@ -337,10 +454,13 @@ float perimeter(points_vector & p){
} }
return per; return per;
} }
Marker::~Marker(){ /******************************************************************************
* CLASS METHODS *
******************************************************************************/
Marker::~Marker(){
points.clear(); points.clear();
} }
} }

View File

@@ -17,15 +17,12 @@
#define MARKER_HPP #define MARKER_HPP
#include <vector> #include <vector>
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
namespace nxtar{ namespace nxtar{
class Marker;
typedef std::vector<std::vector<cv::Point> > contours_vector;
typedef std::vector<cv::Point2f> points_vector; typedef std::vector<cv::Point2f> points_vector;
typedef std::vector<Marker> markers_vector;
class Marker{ class Marker{
public: public:
@@ -34,8 +31,26 @@ public:
int code; int code;
}; };
/**
* Detect all 5x5 markers in the input image and return their codes in the
* output vector.
*/
void getAllMarkers(std::vector<int> &, cv::Mat &); void getAllMarkers(std::vector<int> &, cv::Mat &);
void calibrateCamera(cv::Mat &);
/**
* Find a chessboard calibration pattern in the input image. Returns true
* if the pattern was found, false otherwise. The calibration points
* detected on the image are saved in the output vector.
*/
bool findCalibrationPattern(points_vector &, cv::Mat &);
/**
* Sets the camera matrix and the distortion coefficients for the camera
* that captured the input image points into the output matrices. Returns
* the reprojection error as returned by cv::calibrateCamera.
*/
double getCameraParameters(cv::Mat &, cv::Mat &, std::vector<points_vector> &, cv::Size);
} }
#endif #endif

View File

@@ -9,4 +9,4 @@
# Project target. # Project target.
target=android-19 target=android-19
android.library.reference.1=../../../../../NVPACK/OpenCV-2.4.5-Tegra-sdk-r2/sdk/java android.library.reference.1=../../../../../Documents/OpenCV-2.4.8-android-sdk/sdk/java

View File

@@ -17,16 +17,13 @@ package ve.ucv.ciens.ccg.nxtar;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader; import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils; import org.opencv.android.Utils;
import org.opencv.core.Mat; import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc; import org.opencv.imgproc.Imgproc;
import ve.ucv.ciens.ccg.nxtar.interfaces.CVProcessor; import ve.ucv.ciens.ccg.nxtar.interfaces.CVProcessor;
import ve.ucv.ciens.ccg.nxtar.interfaces.MulticastEnabler; import ve.ucv.ciens.ccg.nxtar.interfaces.OSFunctionalityProvider;
import ve.ucv.ciens.ccg.nxtar.interfaces.Toaster;
import android.content.Context; import android.content.Context;
import android.content.pm.ActivityInfo; import android.content.pm.ActivityInfo;
import android.graphics.Bitmap; import android.graphics.Bitmap;
@@ -43,39 +40,37 @@ import com.badlogic.gdx.backends.android.AndroidApplication;
import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration; import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration;
import com.badlogic.gdx.controllers.mappings.Ouya; import com.badlogic.gdx.controllers.mappings.Ouya;
public class MainActivity extends AndroidApplication implements Toaster, MulticastEnabler, CVProcessor{ public class MainActivity extends AndroidApplication implements OSFunctionalityProvider, CVProcessor{
private static final String TAG = "NXTAR_ANDROID_MAIN"; private static final String TAG = "NXTAR_ANDROID_MAIN";
private static final String CLASS_NAME = MainActivity.class.getSimpleName(); private static final String CLASS_NAME = MainActivity.class.getSimpleName();
private static boolean ocvOn = false;
private WifiManager wifiManager; private WifiManager wifiManager;
private MulticastLock multicastLock; private MulticastLock multicastLock;
private Handler uiHandler; private Handler uiHandler;
private Context uiContext; private Context uiContext;
private static boolean ocvOn = false;
private BaseLoaderCallback loaderCallback;
private final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); private final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
public native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
public native boolean findCalibrationPattern(long inMat, long outMat, float[] points);
static{ static{
if(!OpenCVLoader.initDebug()){ if(!OpenCVLoader.initDebug())
System.exit(1); ocvOn = false;
}
try{ try{
System.loadLibrary("cvproc"); System.loadLibrary("cvproc");
ocvOn = true; ocvOn = true;
}catch(UnsatisfiedLinkError u){ }catch(UnsatisfiedLinkError u){
System.exit(1); ocvOn = false;
} }
} }
public native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes);
public native void findCalibrationPattern(long inMat, long outMat);
@Override @Override
public void onCreate(Bundle savedInstanceState){ public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
//ocvOn = false;
if(!Ouya.runningOnOuya){ if(!Ouya.runningOnOuya){
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}else{ }else{
@@ -92,30 +87,12 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
cfg.useCompass = false; cfg.useCompass = false;
cfg.useWakelock = true; cfg.useWakelock = true;
loaderCallback = new BaseLoaderCallback(this){
@Override
public void onManagerConnected(int status){
switch(status){
case LoaderCallbackInterface.SUCCESS:
System.loadLibrary("cvproc");
ocvOn = true;
Toast.makeText(uiContext, R.string.ocv_success, Toast.LENGTH_LONG).show();
break;
default:
Toast.makeText(uiContext, R.string.ocv_failed, Toast.LENGTH_LONG).show();
Gdx.app.exit();
break;
}
}
};
//OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_7, this, loaderCallback);
initialize(new NxtARCore(this), cfg); initialize(new NxtARCore(this), cfg);
} }
//////////////////////////////// ////////////////////////////////////////////////
// Toaster interface methods. // // OSFunctionalityProvider interface methods. //
//////////////////////////////// ////////////////////////////////////////////////
@Override @Override
public void showShortToast(final String msg){ public void showShortToast(final String msg){
uiHandler.post(new Runnable(){ uiHandler.post(new Runnable(){
@@ -136,9 +113,6 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
}); });
} }
/////////////////////////////////////////
// MulticastEnabler interface methods. //
/////////////////////////////////////////
@Override @Override
public void enableMulticast(){ public void enableMulticast(){
Gdx.app.log(TAG, CLASS_NAME + ".enableMulticast() :: Requesting multicast lock."); Gdx.app.log(TAG, CLASS_NAME + ".enableMulticast() :: Requesting multicast lock.");
@@ -156,8 +130,11 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
} }
} }
////////////////////////////////////
// CVProcessor interface methods. //
////////////////////////////////////
@Override @Override
public CVData processFrame(byte[] frame, int w, int h) { public CVData findMarkersInFrame(byte[] frame, int w, int h) {
if(ocvOn){ if(ocvOn){
int codes[] = new int[15]; int codes[] = new int[15];
Bitmap tFrame, mFrame; Bitmap tFrame, mFrame;
@@ -168,8 +145,7 @@ public class MainActivity extends AndroidApplication implements Toaster, Multica
Mat outImg = new Mat(); Mat outImg = new Mat();
Utils.bitmapToMat(tFrame, inImg); Utils.bitmapToMat(tFrame, inImg);
//getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes); getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes);
findCalibrationPattern(inImg.getNativeObjAddr(), outImg.getNativeObjAddr());
Mat temp = new Mat(); Mat temp = new Mat();
Imgproc.cvtColor(outImg, temp, Imgproc.COLOR_BGR2RGB); Imgproc.cvtColor(outImg, temp, Imgproc.COLOR_BGR2RGB);