First commit.

This commit is contained in:
2015-10-15 17:50:22 -04:30
commit 7328307104
27 changed files with 1808 additions and 0 deletions

71
.gitignore vendored Normal file
View File

@@ -0,0 +1,71 @@
## Java
*.class
*.war
*.ear
hs_err_pid*
## GWT
war/
html/war/gwt_bree/
html/gwt-unitCache/
.apt_generated/
html/war/WEB-INF/deploy/
html/war/WEB-INF/classes/
.gwt/
gwt-unitCache/
www-test/
.gwt-tmp/
## Android Studio and Intellij and Android in general
android/libs/armeabi/
android/libs/armeabi-v7a/
android/libs/x86/
android/gen/
.idea/
*.ipr
*.iws
*.iml
out/
com_crashlytics_export_strings.xml
## Eclipse
.classpath
.project
.metadata
**/bin/
tmp/
*.tmp
*.bak
*.swp
*~.nib
local.properties
.settings/
.loadpath
.externalToolBuilders/
*.launch
## NetBeans
**/nbproject/private/
build/
nbbuild/
dist/
nbdist/
nbactions.xml
nb-configuration.xml
## Gradle
.gradle
gradle-app.setting
build/
## OS Specific
.DS_Store
## C++
*.so
*.o
## Emacs backup files
*~

22
LICENSE Normal file
View File

@@ -0,0 +1,22 @@
Copyright (c) 2015, Miguel Angel Astor Romero
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

9
README.md Executable file
View File

@@ -0,0 +1,9 @@
EVI - AR Demo
=============
Simple application for testing augmented reality algorithms using OpenCV. The
algorithms are based on algorithms by Daniel Lelis Baggio presented on the book
"Masteing OpenCV with Practical Computer Vision Projects". The original code for
the book can be found in [this Github repository] [1].
[1]: https://github.com/MasteringOpenCV/code

54
build.gradle Normal file
View File

@@ -0,0 +1,54 @@
buildscript {
repositories {
mavenCentral()
maven { url "https://oss.sonatype.org/content/repositories/snapshots/" }
}
dependencies {
}
}
allprojects {
apply plugin: "eclipse"
apply plugin: "idea"
version = '1.0'
ext {
appName = "EVI07 - Augmented Reality"
gdxVersion = '1.7.0'
roboVMVersion = '1.8.0'
box2DLightsVersion = '1.4'
ashleyVersion = '1.6.0'
aiVersion = '1.6.0'
}
repositories {
mavenCentral()
maven { url "https://oss.sonatype.org/content/repositories/snapshots/" }
maven { url "https://oss.sonatype.org/content/repositories/releases/" }
}
}
project(":desktop") {
apply plugin: "java"
dependencies {
compile project(":core")
compile "com.badlogicgames.gdx:gdx-backend-lwjgl:$gdxVersion"
compile "com.badlogicgames.gdx:gdx-platform:$gdxVersion:natives-desktop"
compile fileTree(dir: 'libs', include: '*.jar')
}
}
project(":core") {
apply plugin: "java"
dependencies {
compile "com.badlogicgames.gdx:gdx:$gdxVersion"
}
}
tasks.eclipse.doLast {
delete ".project"
}

BIN
core/assets/badlogic.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

BIN
core/assets/monkey.g3db Normal file

Binary file not shown.

11
core/build.gradle Normal file
View File

@@ -0,0 +1,11 @@
apply plugin: "java"
sourceCompatibility = 1.6
[compileJava, compileTestJava]*.options*.encoding = 'UTF-8'
sourceSets.main.java.srcDirs = [ "src/" ]
eclipse.project {
name = appName + "-core"
}

View File

@@ -0,0 +1,54 @@
package ve.ucv.ciens.icaro.ardemo;
import com.badlogic.gdx.graphics.PerspectiveCamera;
import com.badlogic.gdx.math.Matrix4;
import com.badlogic.gdx.math.Vector3;
public class CustomPerspectiveCamera extends PerspectiveCamera{
private final Vector3 tmp = new Vector3();
public CustomPerspectiveCamera(float fieldOfView, float viewportWidth, float viewportHeight){
super(fieldOfView, viewportWidth, viewportHeight);
update();
}
public void update(Matrix4 customProjection){
this.update(customProjection, true);
}
public void update(Matrix4 customProjection, boolean updateFrustum){
projection.set(customProjection);
view.setToLookAt(position, tmp.set(position).add(direction), up);
combined.set(projection).mul(view);
if(updateFrustum){
invProjectionView.set(combined).inv();
frustum.update(invProjectionView);
}
}
public void setCustomARProjectionMatrix(final float focalPointX, final float focalPointY, final float cameraCenterX, final float cameraCenterY, final float near, final float far, final float w, final float h){
final float FAR_PLUS_NEAR = far + near;
final float FAR_LESS_NEAR = far - near;
projection.val[Matrix4.M00] = -2.0f * focalPointX / w;
projection.val[Matrix4.M10] = 0.0f;
projection.val[Matrix4.M20] = 0.0f;
projection.val[Matrix4.M30] = 0.0f;
projection.val[Matrix4.M01] = 0.0f;
projection.val[Matrix4.M11] = 2.0f * focalPointY / h;
projection.val[Matrix4.M21] = 0.0f;
projection.val[Matrix4.M31] = 0.0f;
projection.val[Matrix4.M02] = 2.0f * cameraCenterX / w - 1.0f;
projection.val[Matrix4.M12] = 2.0f * cameraCenterY / h - 1.0f;
projection.val[Matrix4.M22] = -FAR_PLUS_NEAR / FAR_LESS_NEAR;
projection.val[Matrix4.M32] = -1.0f;
projection.val[Matrix4.M03] = 0.0f;
projection.val[Matrix4.M13] = 0.0f;
projection.val[Matrix4.M23] = -2.0f * far * near / FAR_LESS_NEAR;
projection.val[Matrix4.M33] = 0.0f;
}
}

View File

@@ -0,0 +1,102 @@
package ve.ucv.ciens.icaro.ardemo;
import ve.ucv.ciens.icaro.ardemo.ImageProcessor.CalibrationData;
import ve.ucv.ciens.icaro.ardemo.ImageProcessor.MarkerData;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.Pixmap;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
public class EviDemo extends ApplicationAdapter {
private static final String TAG = "NXTAR_ANDROID_MAIN";
private static final String CLASS_NAME = EviDemo.class.getSimpleName();
private ImageProcessor cvProc;
private Texture tex;
private Pixmap frame;
private SpriteBatch batch;
private MarkerData data;
private CalibrationData calib;
private float[][] calibrationSamples;
private int lastSampleTaken;
public EviDemo(ImageProcessor proc) {
super();
tex = null;
cvProc = proc;
frame = null;
lastSampleTaken = 0;
calibrationSamples = new float[ProjectConstants.CALIBRATION_SAMPLES][];
for(int i = 0; i < calibrationSamples.length; i++){
calibrationSamples[i] = new float[ProjectConstants.CALIBRATION_PATTERN_POINTS * 2];
}
}
@Override
public void create () {
batch = new SpriteBatch();
}
@Override
public void render () {
Gdx.gl.glClearColor(1, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
if(cvProc.isCameraCalibrated()) {
data = cvProc.findMarkersInFrame();
if(data != null) {
frame = new Pixmap(data.outFrame, 0, data.outFrame.length);
tex = new Texture(frame);
batch.begin();
batch.draw(tex, 0, 0);
batch.end();
frame.dispose();
tex.dispose();
}
} else {
calib = cvProc.findCalibrationPattern();
if(calib != null){
if(!cvProc.isCameraCalibrated() && calib.calibrationPoints != null){
Gdx.app.log(TAG, CLASS_NAME + ".render(): Sample taken.");
// Save the calibration points to the samples array.
for(int i = 0; i < calib.calibrationPoints.length; i += 2){
Gdx.app.log(TAG, CLASS_NAME + ".render(): Value " + Integer.toString(i) + " = (" + Float.toString(calib.calibrationPoints[i]) + ", " + Float.toString(calib.calibrationPoints[i + 1]) + ")");
calibrationSamples[lastSampleTaken][i] = calib.calibrationPoints[i];
calibrationSamples[lastSampleTaken][i + 1] = calib.calibrationPoints[i + 1];
}
// Move to the next sample.
lastSampleTaken++;
// If enough samples has been taken then calibrate the camera.
if(lastSampleTaken == ProjectConstants.CALIBRATION_SAMPLES){
Gdx.app.log(TAG, CLASS_NAME + "render(): Last sample taken.");
cvProc.calibrateCamera(calibrationSamples);
}
}
frame = new Pixmap(calib.outFrame, 0, calib.outFrame.length);
tex = new Texture(frame);
batch.begin();
batch.draw(tex, 0, 0);
batch.end();
frame.dispose();
tex.dispose();
}
}
}
}

View File

@@ -0,0 +1,60 @@
package ve.ucv.ciens.icaro.ardemo;
import com.badlogic.gdx.math.Matrix3;
import com.badlogic.gdx.math.Vector3;
public interface ImageProcessor {
public class MarkerData{
public byte[] outFrame;
public int[] markerCodes;
public Vector3[] translationVectors;
public Matrix3[] rotationMatrices;
}
public class CalibrationData{
public byte[] outFrame;
public float[] calibrationPoints;
}
/**
* <p>Finds up to {@link ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS} markers in the input
* image and returns their codes and pose estimation in the CVMarkerData structure. The
* markers are higlihted in the input image.</p>
*
* @param frame The JPEG encoded input image.
* @return A data structure containing the processed output image, the
* detected marker codes and their respective locations.
*/
public MarkerData findMarkersInFrame();
/**
* <p>Attempts to detect a checkerboard calibration pattern in the input image.
* If the pattenr is found the method returns an image with the pattern
* highlighted and the spatial location of the calibration points in the
* output data structure.</p>
*
* @param frame The JPEG encoded input image.
* @return A data structure containing the processed output image and the
* location of the calibration points. If the pattern was not found, the returnd
* calibration points array is null.
*/
public CalibrationData findCalibrationPattern();
/**
* <p>Obtains the intrinsic camera parameters necesary for calibration.</p>
*/
public boolean calibrateCamera(float[][] calibrationSamples);
/**
* <p>Indicates if OpenCV has been sucessfully initialized and used
* to obtain the camera parameters for calibration.</p>
*
* @return True if and only if OpenCV initialized succesfully and calibrateCamera has been called previously.
*/
public boolean isCameraCalibrated();
public float getFocalPointX();
public float getFocalPointY();
public float getCameraCenterX();
public float getCameraCenterY();
}

View File

@@ -0,0 +1,24 @@
package ve.ucv.ciens.icaro.ardemo;
public abstract class ProjectConstants{
public static final int SERVICE_DISCOVERY_PORT = 9988;
public static final int VIDEO_STREAMING_PORT = 9989;
public static final int MOTOR_CONTROL_PORT = 9990;
public static final int SENSOR_REPORT_PORT = 9991;
public static final int APP_CONTROL_PORT = 9992;
public static final String MULTICAST_ADDRESS = "230.0.0.1";
public static final int EXIT_SUCCESS = 0;
public static final int EXIT_FAILURE = 1;
public static final boolean DEBUG = false;
public static final int[] POWERS_OF_2 = {64, 128, 256, 512, 1024, 2048};
public static final float MAX_ABS_ROLL = 60.0f;
public static final String FONT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890:,";
public static final int MAXIMUM_NUMBER_OF_MARKERS = 5;
public static final int CALIBRATION_PATTERN_POINTS = 54;
public static final int CALIBRATION_SAMPLES = 10;
}

46
desktop/build.gradle Normal file
View File

@@ -0,0 +1,46 @@
apply plugin: "java"
sourceCompatibility = 1.6
sourceSets.main.java.srcDirs = [ "src/" ]
project.ext.mainClassName = "ve.ucv.ciens.icaro.ardemo.desktop.DesktopLauncher"
project.ext.assetsDir = new File("../core/assets");
task run(dependsOn: classes, type: JavaExec) {
main = project.mainClassName
classpath = sourceSets.main.runtimeClasspath
standardInput = System.in
workingDir = project.assetsDir
ignoreExitValue = true
}
task dist(type: Jar) {
from files(sourceSets.main.output.classesDir)
from files(sourceSets.main.output.resourcesDir)
from {configurations.compile.collect {zipTree(it)}}
from files(project.assetsDir);
manifest {
attributes 'Main-Class': project.mainClassName
}
}
dist.dependsOn classes
eclipse {
project {
name = appName + "-desktop"
linkedResource name: 'assets', type: '2', location: 'PARENT-1-PROJECT_LOC/core/assets'
}
}
task afterEclipseImport(description: "Post processing after project generation", group: "IDE") {
doLast {
def classpath = new XmlParser().parse(file(".classpath"))
new Node(classpath, "classpathentry", [ kind: 'src', path: 'assets' ]);
def writer = new FileWriter(file(".classpath"))
def printer = new XmlNodePrinter(new PrintWriter(writer))
printer.setPreserveWhitespace(true)
printer.print(classpath)
}
}

15
desktop/jni/Makefile Executable file
View File

@@ -0,0 +1,15 @@
TARGET = ../libevi_10.so
OBJECTS = cv_proc.o marker.o decode.o calib.o
JNIINC = /usr/lib/jvm/java-7-openjdk-amd64/include/
CXXFLAGS = -I$(JNIINC) `pkg-config --cflags opencv` -O3 -DDESKTOP -DNDEBUG -fPIC
LDLIBS = `pkg-config --libs opencv`
all: $(TARGET)
$(TARGET): $(OBJECTS)
g++ -o $(TARGET) -shared $(OBJECTS) $(CXXFLAGS) $(LDLIBS)
%o: %cpp
clean:
$(RM) $(TARGET) $(OBJECTS)

76
desktop/jni/calib.cpp Normal file
View File

@@ -0,0 +1,76 @@
#ifdef DESKTOP
#include <iostream>
#endif
#include "marker.hpp"
namespace nxtar{
/******************************************************************************
* PRIVATE CONSTANTS *
******************************************************************************/
/**
* Flags for the calibration pattern detecion function.
*/
static const int PATTERN_DETECTION_FLAGS = cv::CALIB_CB_ADAPTIVE_THRESH +
cv::CALIB_CB_NORMALIZE_IMAGE +
cv::CALIB_CB_FAST_CHECK;
/**
* Size of the chessboard pattern image (columns, rows).
*/
static const cv::Size CHESSBOARD_PATTERN_SIZE(6, 9);
/**
* Size of a square cell in the calibration pattern.
*/
static const float SQUARE_SIZE = 1.0f;
/******************************************************************************
* PUBLIC API *
******************************************************************************/
bool findCalibrationPattern(points_vector & corners, cv::Mat & img){
bool patternfound;
cv::Mat gray;
// Convert the input image to grayscale and attempt to find the
// calibration pattern.
cv::cvtColor(img, gray, CV_BGR2GRAY);
patternfound = cv::findChessboardCorners(gray, CHESSBOARD_PATTERN_SIZE, corners, PATTERN_DETECTION_FLAGS);
// If the pattern was found then fix the detected points a bit.
if(patternfound)
cv::cornerSubPix(gray, corners, cv::Size(11, 11), cv::Size(-1, -1), TERM_CRITERIA);
// Render the detected pattern.
cv::drawChessboardCorners(img, CHESSBOARD_PATTERN_SIZE, cv::Mat(corners), patternfound);
return patternfound;
}
double getCameraParameters(cv::Mat & camera_matrix, cv::Mat & dist_coeffs, std::vector<points_vector> & image_points, cv::Size image_size){
std::vector<cv::Mat> rvecs, tvecs;
std::vector<points_vector_3D> object_points;
points_vector_3D corner_points;
// Build the reference object points vector.
for(int i = 0; i < CHESSBOARD_PATTERN_SIZE.height; i++){
for(int j = 0; j < CHESSBOARD_PATTERN_SIZE.width; j++){
corner_points.push_back(cv::Point3f(float( j * SQUARE_SIZE ), float( i * SQUARE_SIZE ), 0));
}
}
object_points.push_back(corner_points);
object_points.resize(image_points.size(), object_points[0]);
// Build a camera matrix.
camera_matrix = cv::Mat::eye(3, 3, CV_64F);
// Build the distortion coefficients matrix.
dist_coeffs = cv::Mat::zeros(8, 1, CV_64F);
// Calibrate and return the reprojection error.
return cv::calibrateCamera(object_points, image_points, image_size, camera_matrix, dist_coeffs, rvecs, tvecs, 0, TERM_CRITERIA);
}
}

185
desktop/jni/cv_proc.cpp Normal file
View File

@@ -0,0 +1,185 @@
/*
* Copyright (C) 2014 Miguel Angel Astor Romero
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <stddef.h>
#include <cstdio>
#include "marker.hpp"
//#define LOG_ENABLED
#define MAX_MARKERS 5
#define TRANSLATION_VECTOR_POINTS 3
#define ROTATION_MATRIX_SIZE 9
#define POINTS_PER_CALIBRATION_SAMPLE 54
#define CALIBRATION_SAMPLES 10
#ifndef NDEBUG
#define log(TAG, MSG) (fprintf(stdout, "%s : %s\n", TAG, MSG))
#else
#define log(TAG, MSG) ;
#endif
const char * TAG = "CVPROC_NATIVE";
extern "C"{
/**
* JNI wrapper around the nxtar::getAllMarkers() method.
*/
JNIEXPORT void JNICALL Java_ve_ucv_ciens_icaro_ardemo_desktop_CVProcessor_getMarkerCodesAndLocations(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jintArray codes, jlong camMat, jlong distMat, jfloatArray translations, jfloatArray rotations){
char codeMsg[128];
std::vector<int> vCodes;
nxtar::markers_vector vMarkers;
cv::Mat temp;
// Get the native object addresses.
log(TAG, "getMarkerCodesAndLocations(): Requesting native data.");
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
cv::Mat& mCam = *(cv::Mat*)camMat;
cv::Mat& mDist = *(cv::Mat*)distMat;
jint * _codes = env->GetIntArrayElements(codes, 0);
jfloat * _tr = env->GetFloatArrayElements(translations, 0);
jfloat * _rt = env->GetFloatArrayElements(rotations, 0);
// Convert the input image to the BGR color space.
log(TAG, "getMarkerCodesAndLocations(): Converting color space before processing.");
cv::cvtColor(myuv, temp, CV_RGB2BGR);
// Find all markers in the input image.
log(TAG, "getMarkerCodesAndLocations(): Finding markers.");
nxtar::getAllMarkers(vMarkers, temp);
nxtar::estimateMarkerPosition(vMarkers, mCam, mDist);
// Copy the marker codes to the output vector.
log(TAG, "getMarkerCodesAndLocations(): Copying marker codes.");
for(size_t i = 0; i < vMarkers.size() && i < MAX_MARKERS; i++){
_codes[i] = (jint)vMarkers[i].code;
}
// Copy the geometric transformations to the output vectors.
for(int i = 0, p = 0; i < vMarkers.size(); i++, p += 3){
_tr[p ] = vMarkers[i].translation.at<jfloat>(0);
_tr[p + 1] = vMarkers[i].translation.at<jfloat>(1);
_tr[p + 2] = vMarkers[i].translation.at<jfloat>(2);
}
for(int k = 0; k < vMarkers.size(); k++){
for(int row = 0; row < 3; row++){
for(int col = 0; col < 3; col++){
_rt[col + (row * 3) + (9 * k)] = vMarkers[k].rotation.at<jfloat>(row, col);
}
}
}
// Clear marker memory.
vMarkers.clear();
// Convert the output image back to the RGB color space.
cv::cvtColor(temp, mbgr, CV_BGR2RGB);
// Release native data.
log(TAG, "getMarkerCodesAndLocations(): Releasing native data.");
env->ReleaseIntArrayElements(codes, _codes, 0);
env->ReleaseFloatArrayElements(translations, _tr, 0);
env->ReleaseFloatArrayElements(rotations, _rt, 0);
}
/**
* JNI wrapper around the nxtar::findCalibrationPattern() method.
*/
JNIEXPORT jboolean JNICALL Java_ve_ucv_ciens_icaro_ardemo_desktop_CVProcessor_findCalibrationPattern(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jfloatArray points){
nxtar::points_vector v_points;
bool found;
cv::Mat temp;
// Get the native object addresses.
log(TAG, "findCalibrationPattern(): Requesting native data.");
cv::Mat& myuv = *(cv::Mat*)addrMatIn;
cv::Mat& mbgr = *(cv::Mat*)addrMatOut;
jfloat * _points = env->GetFloatArrayElements(points, 0);
// Convert the input image to the BGR color space.
log(TAG, "findCalibrationPattern(): Converting color space before processing.");
cv::cvtColor(myuv, temp, CV_RGB2BGR);
// Find the calibration points in the input image.
log(TAG, "findCalibrationPattern(): Finding calibration pattern.");
found = nxtar::findCalibrationPattern(v_points, temp);
// If the points were found then save them to the output array.
if(found){
log(TAG, "findCalibrationPattern(): Copying calibration points.");
for(size_t i = 0, p = 0; i < v_points.size(); i++, p += 2){
_points[p ] = (jfloat)v_points[i].x;
_points[p + 1] = (jfloat)v_points[i].y;
}
}
// Convert the output image back to the RGB color space.
cv::cvtColor(temp, mbgr, CV_BGR2RGB);
// Release native data.
log(TAG, "findCalibrationPattern(): Releasing native data.");
env->ReleaseFloatArrayElements(points, _points, 0);
return (jboolean)found;
}
/**
* JNI wrapper around the nxtar::getCameraParameters() method.
*/
JNIEXPORT jdouble JNICALL Java_ve_ucv_ciens_icaro_ardemo_desktop_CVProcessor_calibrateCameraParameters(JNIEnv* env, jobject jobj, jlong addrMatIn, jlong addrMatOut, jlong addrMatFrame, jfloatArray points){
double error;
std::vector<nxtar::points_vector> imagePoints;
// Get native object addresses.
log(TAG, "calibrateCameraParameters(): Requesting native data.");
cv::Mat& mIn = *(cv::Mat*)addrMatIn;
cv::Mat& mOut = *(cv::Mat*)addrMatOut;
cv::Mat& mFrame = *(cv::Mat*)addrMatFrame;
jfloat * _points = env->GetFloatArrayElements(points, 0);
// Prepare the image points data structure.
log(TAG, "calibrateCameraParameters(): Preparing image points.");
for(int i = 0; i < CALIBRATION_SAMPLES; i++){
nxtar::points_vector tempVector;
for(int j = 0, p = 0; j < POINTS_PER_CALIBRATION_SAMPLE; j++, p += 2){
tempVector.push_back(cv::Point2f(_points[p], _points[p + 1]));
}
imagePoints.push_back(tempVector);
}
// Get the camera parameters.
log(TAG, "calibrateCameraParameters(): Getting camera parameters.");
error = nxtar::getCameraParameters(mIn, mOut, imagePoints, mFrame.size());
// Clear the image points.
log(TAG, "calibrateCameraParameters(): Clearing memory.");
for(int i = 0; i < imagePoints.size(); i++){
imagePoints[i].clear();
}
imagePoints.clear();
// Release native memory.
log(TAG, "calibrateCameraParameters(): Releasing native data.");
env->ReleaseFloatArrayElements(points, _points, 0);
// Return the calibration error as calculated by OpenCV.
return error;
}
} // extern "C"

463
desktop/jni/decode.cpp Executable file
View File

@@ -0,0 +1,463 @@
#include <algorithm>
#include <utility>
#include <limits>
#ifdef DESKTOP
#include <iostream>
#endif
#include "marker.hpp"
namespace nxtar{
/******************************************************************************
* PRIVATE CONSTANTS *
******************************************************************************/
/**
* Minimal number of points in a contour.
*/
static const int MIN_POINTS = 40;
/**
* Minimal lenght of a contour to be considered as a marker candidate.
*/
static const float MIN_CONTOUR_LENGTH = 0.1;
/**
* Color for rendering the marker outlines.
*/
static const cv::Scalar COLOR = cv::Scalar(255, 255, 255);
/******************************************************************************
* PRIVATE FUNCTION PROTOTYPES *
******************************************************************************/
static float perimeter(points_vector &);
static int hammDistMarker(cv::Mat);
static cv::Mat rotate(cv::Mat);
static int decodeMarker(cv::Mat &, int &);
static void renderMarkers(markers_vector &, cv::Mat &);
static void isolateMarkers(const contours_vector &, markers_vector &);
static void findContours(cv::Mat &, contours_vector &, int);
static void warpMarker(Marker &, cv::Mat &, cv::Mat &);
/******************************************************************************
* PUBLIC API *
******************************************************************************/
void getAllMarkers(markers_vector & valid_markers, cv::Mat & img){
int rotations = 0;
cv::Mat gray, thresh, cont, mark;
contours_vector contours;
markers_vector markers;
cv::Point2f point;
#ifdef DESKTOP
std::ostringstream oss;
#endif
valid_markers.clear();
// Find all marker candidates in the input image.
// 1) First, convert the image to grayscale.
// 2) Then, binarize the grayscale image.
// 3) Finally indentify all 4 sided figures in the binarized image.
cv::cvtColor(img, gray, CV_BGR2GRAY);
cv::adaptiveThreshold(gray, thresh, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 7, 7);
findContours(thresh, contours, MIN_POINTS);
isolateMarkers(contours, markers);
// Remove the perspective distortion from the detected marker candidates.
// Then attempt to decode them and push the valid ones into the valid
// markers vector.
for(int i = 0; i < markers.size(); i++){
warpMarker(markers[i], gray, mark);
int code = decodeMarker(mark, rotations);
if(code != -1){
markers[i].code = code;
// If the decoder detected the marker is rotated then reorder the points
// so that the orientation calculations always use the correct top of the marker.
if(rotations > 0){
while(rotations > 0){
for(int r = 0; r < 1; r++){
point = markers[i].points.at(markers[i].points.size() - 1);
markers[i].points.pop_back();
markers[i].points.insert(markers[i].points.begin(), point);
}
rotations--;
}
}
// Rotate 180 degrees.
for(int r = 0; r < 2; r++){
point = markers[i].points.at(markers[i].points.size() - 1);
markers[i].points.pop_back();
markers[i].points.insert(markers[i].points.begin(), point);
}
valid_markers.push_back(markers[i]);
}
}
for(int i = 0; i < valid_markers.size(); i++){
#ifdef DESKTOP
// Render the detected valid markers with their codes for debbuging
// purposes.
oss << valid_markers[i].code;
cv::putText(mark, oss.str(), cv::Point(5, 250), cv::FONT_HERSHEY_PLAIN, 2, cv::Scalar::all(128), 3, 8);
oss.str("");
oss.clear();
oss << "Marker[" << i << "]";
cv::imshow(oss.str(), mark);
oss.str("");
oss.clear();
#endif
// Fix the detected corners to better approximate the markers. And
// push their codes to the output vector.
cv::cornerSubPix(gray, valid_markers[i].points, cvSize(10, 10), cvSize(-1, -1), TERM_CRITERIA);
}
// Render the detected markers on top of the input image.
cont = cv::Mat::zeros(img.size(), CV_8UC3);
renderMarkers(valid_markers, cont);
img = img + cont;
// Clear the local vectors.
markers.clear();
contours.clear();
}
void estimateMarkerPosition(markers_vector & markers, cv::Mat & camMatrix, cv::Mat & distCoeff){
cv::Mat rVec, rAux, tAux;
cv::Mat_<float> tVec, rotation(3,3);
points_vector_3D objectPoints;
// Assemble a unitary CCW oriented reference polygon.
objectPoints.push_back(cv::Point3f(-0.5f, -0.5f, 0.0f));
objectPoints.push_back(cv::Point3f(-0.5f, 0.5f, 0.0f));
objectPoints.push_back(cv::Point3f( 0.5f, 0.5f, 0.0f));
objectPoints.push_back(cv::Point3f( 0.5f, -0.5f, 0.0f));
for(size_t i = 0; i < markers.size(); i++){
// Obtain the translation and rotation vectors.
cv::solvePnP(objectPoints, markers[i].points, camMatrix, distCoeff, rAux, tAux);
// Convert the obtained vectors to float.
rAux.convertTo(rVec, CV_32F);
tAux.convertTo(tVec, CV_32F);
// Convert the rotation vector to a rotation matrix.
cv::Rodrigues(rVec, rotation);
// Make the rotation and translation relative to the "camera" and save
// the results to the output marker.
markers[i].rotation = cv::Mat(rotation.t());
markers[i].translation = cv::Mat(-tVec);
}
}
/******************************************************************************
* PRIVATE HELPER FUNCTIONS *
******************************************************************************/
/**
* Find all contours in the input image and save them to the output
* vector.
*/
void findContours(cv::Mat & img, contours_vector & v, int minP){
contours_vector c;
// A contour is discarded if it possess less than the specified
// minimum number of points.
cv::findContours(img, c, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
v.clear();
for(size_t i = 0; i < c.size(); i++){
if(c[i].size() > minP){
v.push_back(c[i]);
}
}
}
/**
* Render the input marker vector onto the output image.
*/
void renderMarkers(markers_vector & v, cv::Mat & dst){
contours_vector cv;
// Extract the points that form every marker into a contours vector.
for(size_t i = 0; i < v.size(); i++){
std::vector<cv::Point> pv;
for(size_t j = 0; j < v[i].points.size(); ++j)
pv.push_back(cv::Point2f(v[i].points[j].x, v[i].points[j].y));
cv.push_back(pv);
}
// Render.
cv::drawContours(dst, cv, -1, COLOR, 1, CV_AA);
}
/**
* Identify and return all marker candidates.
*/
void isolateMarkers(const contours_vector & vc, markers_vector & vm){
std::vector<cv::Point> appCurve;
markers_vector posMarkers;
// For every detected contour.
for(size_t i = 0; i < vc.size(); ++i){
double eps = vc[i].size() * 0.05;
// Approximate the contour with a polygon.
cv::approxPolyDP(vc[i], appCurve, eps, true);
// If the polygon is not a cuadrilateral then this is not a marker
// candidate.
if(appCurve.size() != 4 || !cv::isContourConvex(appCurve)) continue;
// Calculate the lenght of the perimeter of this candidate. If it
// is too short then discard it.
float minD = std::numeric_limits<float>::max();
for(int i = 0; i < 4; i++){
cv::Point side = appCurve[i] - appCurve[(i + 1) % 4];
float sqSideLen = side.dot(side);
minD = std::min(minD, sqSideLen);
}
if(minD < MIN_CONTOUR_LENGTH) continue;
// Save the marker and order it's points counter-clockwise.
Marker m;
for(int i = 0; i < 4; i++)
m.points.push_back(cv::Point2f(appCurve[i].x, appCurve[i].y));
cv::Point v1 = m.points[1] - m.points[0];
cv::Point v2 = m.points[2] - m.points[0];
double o = (v1.x * v2.y) - (v1.y * v2.x);
if(o < 0.0) std::swap(m.points[1], m.points[3]);
posMarkers.push_back(m);
}
// Identify contours that are to close to each other to eliminate
// possible duplicates.
std::vector<std::pair<int, int> > tooNear;
for(size_t i = 0; i < posMarkers.size(); ++i){
const Marker & m1 = posMarkers[i];
for(size_t j = i + 1; j < posMarkers.size(); j++){
const Marker & m2 = posMarkers[j];
float dSq = 0.0f;
for(int c = 0; c < 4; c++){
cv::Point v = m1.points[c] - m2.points[c];
dSq += v.dot(v);
}
dSq /= 4.0f;
if(dSq < 100) tooNear.push_back(std::pair<int, int>(i, j));
}
}
// Mark one of every pair of duplicates to be discarded.
std::vector<bool> remMask(posMarkers.size(), false);
for(size_t i = 0; i < tooNear.size(); ++i){
float p1 = perimeter(posMarkers[tooNear[i].first].points);
float p2 = perimeter(posMarkers[tooNear[i].second].points);
size_t remInd;
if(p1 > p2) remInd = tooNear[i].second;
else remInd = tooNear[i].first;
remMask[remInd] = true;
}
// Save the candidates that survided the duplicates test.
vm.clear();
for(size_t i = 0; i < posMarkers.size(); ++i){
if(!remMask[i]) vm.push_back(posMarkers[i]);
}
}
/**
* Warp a marker image to remove it's perspective distortion.
*/
void warpMarker(Marker & m, cv::Mat & in, cv::Mat & out){
cv::Mat bin;
cv::Size markerSize(350, 350);
points_vector v;
// Assemble a unitary reference polygon.
v.push_back(cv::Point2f(0,0));
v.push_back(cv::Point2f(markerSize.width-1,0));
v.push_back(cv::Point2f(markerSize.width-1,markerSize.height-1));
v.push_back(cv::Point2f(0,markerSize.height-1));
// Warp the input image's perspective to transform it into the reference
// polygon's perspective.
cv::Mat M = cv::getPerspectiveTransform(m.points, v);
cv::warpPerspective(in, bin, M, markerSize);
// Binarize the warped image into the output image.
cv::threshold(bin, out, 128, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
}
/**
* Calculate the hamming distance of a 5x5 bit matrix.
* Function by Daniel Lelis Baggio for "Mastering OpenCV with Practical Computer Vision Projects".
*/
int hammDistMarker(cv::Mat bits){
int ids[4][5] = {
{1,0,0,0,0},
{1,0,1,1,1},
{0,1,0,0,1},
{0,1,1,1,0}
};
int dist = 0;
for (int y = 0; y < 5; y++){
int minSum = 1e5;
for (int p = 0; p < 4; p++){
int sum = 0;
for (int x = 0; x < 5; x++){
sum += bits.at<uchar>(y, x) == ids[p][x] ? 0 : 1;
}
if(minSum > sum)
minSum = sum;
}
dist += minSum;
}
return dist;
}
/**
* Rotate a matrix by 90 degrees clockwise.
*/
cv::Mat rotate(cv::Mat in){
cv::Mat out;
in.copyTo(out);
for (int i = 0; i < in.rows; i++){
for (int j = 0; j < in.cols; j++){
out.at<uchar>(i, j) = in.at<uchar>(in.cols-j - 1, i);
}
}
return out;
}
/**
* Decode a marker image and return it's code. Returns -1 if the image is
* not a valid marker.
*/
int decodeMarker(cv::Mat & marker, int & rotations){
bool found = false;
int code = 0;
cv::Mat bits;
rotations = 0;
// Verify that the outer rim of marker cells are all black.
for(int y = 0; y < 7; y++){
int inc = (y == 0 || y == 6) ? 1 : 6;
for(int x = 0; x < 7; x += inc){
int cX = x * 50;
int cY = y * 50;
cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
int nZ = cv::countNonZero(cell);
// If one of the rim cells is 50% white or more then this
// is not a valid marker.
if(nZ > (50 * 50) / 2) return -1;
}
}
// Create a 5x5 matrix to hold a simplified representation of this
// marker.
bits = cv::Mat::zeros(5, 5, CV_8UC1);
// For every cell in the marker flip it's corresponding 'bit' in the
// bit matrix if it is at least 50 % white.
for(int y = 0; y < 5; y++){
for(int x = 0; x < 5; x++){
int cX = (x + 1) * 50;
int cY = (y + 1) * 50;
cv::Mat cell = marker(cv::Rect(cX, cY, 50, 50));
int nZ = cv::countNonZero(cell);
if(nZ > (50 * 50) / 2) bits.at<uchar>(y, x) = 1;
}
}
// Calcultate the hamming distance of the bit matrix and each of it's
// 90 degree rotations to determine if this marker has a valid code.
if(hammDistMarker(bits) != 0){
for(int r = 1; r < 4; r++){
bits = rotate(bits);
rotations++;
if(hammDistMarker(bits) != 0) continue;
else{ found = true; break;}
}
}else found = true;
// If the code is valid then decode it to an int and return it.
if(found){
for(int y = 0; y < 5; y++){
code <<= 1;
if(bits.at<uchar>(y, 1))
code |= 1;
code <<= 1;
if(bits.at<uchar>(y, 3))
code |= 1;
}
return code;
}else
return -1;
}
/**
* Calculate the perimeter of a polygon defined as a vector of points.
*/
float perimeter(points_vector & p){
float per = 0.0f, dx, dy;
for(size_t i; i < p.size(); ++i){
dx = p[i].x - p[(i + 1) % p.size()].x;
dy = p[i].y - p[(i + 1) % p.size()].y;
per += sqrt((dx * dx) + (dy * dy));
}
return per;
}
}

15
desktop/jni/marker.cpp Normal file
View File

@@ -0,0 +1,15 @@
#include "marker.hpp"
namespace nxtar{
/******************************************************************************
* MARKER CLASS METHODS. *
******************************************************************************/
/**
* Clear the points vector associated with this marker.
*/
Marker::~Marker(){
points.clear();
}
}

67
desktop/jni/marker.hpp Executable file
View File

@@ -0,0 +1,67 @@
#ifndef MARKER_HPP
#define MARKER_HPP
#include <vector>
#include <opencv2/opencv.hpp>
namespace nxtar{
/**
* Termination criteria for OpenCV's iterative algorithms.
*/
const cv::TermCriteria TERM_CRITERIA = cv::TermCriteria(CV_TERMCRIT_EPS +
CV_TERMCRIT_ITER,
30,
0.1);
class Marker;
typedef std::vector<cv::Point2f> points_vector;
typedef std::vector<cv::Point3f> points_vector_3D;
typedef std::vector<std::vector<cv::Point> > contours_vector;
typedef std::vector<Marker> markers_vector;
class Marker{
public:
~Marker();
points_vector points;
cv::Mat translation;
cv::Mat rotation;
int code;
};
/**
* Detect all 5x5 markers in the input image and return their codes in the
* output vector.
*/
void getAllMarkers(markers_vector &, cv::Mat &);
/**
* Find a chessboard calibration pattern in the input image. Returns true
* if the pattern was found, false otherwise. The calibration points
* detected on the image are saved in the output vector.
*/
bool findCalibrationPattern(points_vector &, cv::Mat &);
/**
* Sets the camera matrix and the distortion coefficients for the camera
* that captured the input image points into the output matrices. Returns
* the reprojection error as returned by cv::calibrateCamera.
*/
double getCameraParameters(cv::Mat &,
cv::Mat &,
std::vector<points_vector> &,
cv::Size);
/**
* Obtains the necesary geometric transformations necessary to move a reference
* unitary polygon to the position and rotation of the markers passed as input.
* The obtained transformations are given relative to a camera centered in the
* origin and are saved inside the input markers.
*/
void estimateMarkerPosition(markers_vector &, cv::Mat &, cv::Mat &);
}
#endif

BIN
desktop/libs/opencv-248.jar Normal file

Binary file not shown.

View File

@@ -0,0 +1,242 @@
package ve.ucv.ciens.icaro.ardemo.desktop;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import org.opencv.imgproc.Imgproc;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.math.Matrix3;
import com.badlogic.gdx.math.Vector3;
import ve.ucv.ciens.icaro.ardemo.ImageProcessor;
import ve.ucv.ciens.icaro.ardemo.ProjectConstants;
public class CVProcessor implements ImageProcessor {
private static final String TAG = "NXTAR_ANDROID_MAIN";
private static final String CLASS_NAME = CVProcessor.class.getSimpleName();
private static boolean ocvOn = false;
private Mat cameraMatrix;
private Mat distortionCoeffs;
private VideoCapture markerCap;
private VideoCapture calibCap;
private boolean cameraCalibrated;
private native void getMarkerCodesAndLocations(long inMat, long outMat, int[] codes, long camMat, long distMat, float[] translations, float[] rotations);
private native boolean findCalibrationPattern(long inMat, long outMat, float[] points);
private native double calibrateCameraParameters(long camMat, long distMat, long frame, float[] calibrationPoints);
static{
try{
System.loadLibrary("opencv_java248");
System.loadLibrary("evi_10");
ocvOn = true;
}catch(UnsatisfiedLinkError e){
e.printStackTrace();
ocvOn = false;
}
}
public static boolean isOcvOn() {
return ocvOn;
}
public CVProcessor(String[] arg) {
cameraCalibrated = false;
cameraMatrix = new Mat();
distortionCoeffs = new Mat();
markerCap = new VideoCapture(arg[0]);
calibCap = new VideoCapture(arg[1]);
}
@Override
public MarkerData findMarkersInFrame() {
if(ocvOn){
if(cameraCalibrated){
int[] codes = new int[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
float[] translations = new float[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS * 3];
float[] rotations = new float[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS * 9];
MarkerData data;
boolean success;
Mat inImg = new Mat();
Mat outImg = new Mat();
// Fill the codes array with -1 to indicate markers that were not found;
for(int i : codes)
codes[i] = -1;
success = markerCap.grab();
if(!success){
Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): Failed to fetch a frame.");
markerCap.set(1, 1);
return null;
}
markerCap.retrieve(inImg);
Imgproc.resize(inImg, inImg, new Size(), 0.5, 0.5, Imgproc.INTER_AREA);
// Find the markers in the input image.
getMarkerCodesAndLocations(
inImg.getNativeObjAddr(),
outImg.getNativeObjAddr(),
codes,
cameraMatrix.getNativeObjAddr(),
distortionCoeffs.getNativeObjAddr(),
translations,
rotations);
// Encode the output image as a JPEG image.
MatOfByte buf = new MatOfByte();
Highgui.imencode(".jpg", outImg, buf);
// Create and fill the output data structure.
data = new MarkerData();
data.outFrame = buf.toArray();
data.markerCodes = codes;
data.rotationMatrices = new Matrix3[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
data.translationVectors = new Vector3[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
for(int i = 0, p = 0; i < ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS; i++, p += 3){
data.translationVectors[i] = new Vector3(translations[p], translations[p + 1], translations[p + 2]);
}
for(int k = 0; k < ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS; k++){
data.rotationMatrices[k] = new Matrix3();
for(int row = 0; row < 3; row++){
for(int col = 0; col < 3; col++){
data.rotationMatrices[k].val[col + (row * 3)] = rotations[col + (row * 3) + (9 * k)];
}
}
}
return data;
}else{
Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): The camera has not been calibrated.");
return null;
}
}else{
Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): OpenCV is not initialized.");
return null;
}
}
@Override
public CalibrationData findCalibrationPattern() {
if(ocvOn){
boolean found;
float points[] = new float[ProjectConstants.CALIBRATION_PATTERN_POINTS * 2];
boolean success;
Mat inImg = new Mat(), outImg = new Mat();
CalibrationData data = new CalibrationData();
// Decode the input frame and convert it to an OpenCV Matrix.
success = calibCap.grab();
if(!success){
calibCap.set(1, 1);
return null;
}
calibCap.retrieve(inImg);
Imgproc.resize(inImg, inImg, new Size(), 0.5, 0.5, Imgproc.INTER_AREA);
// Attempt to find the calibration pattern in the input frame.
found = findCalibrationPattern(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), points);
// Encode the output image as a JPEG image.
MatOfByte buf = new MatOfByte();
Highgui.imencode(".jpg", outImg, buf);
// Prepare the output data structure.
data.outFrame = buf.toArray();
data.calibrationPoints = found ? points : null;
return data;
}else{
Gdx.app.debug(TAG, CLASS_NAME + ".findCalibrationPattern(): OpenCV is not initialized.");
return null;
}
}
@Override
public boolean calibrateCamera(float[][] calibrationSamples) {
if(ocvOn){
float[] calibrationPoints = new float[ProjectConstants.CALIBRATION_PATTERN_POINTS * 2 * ProjectConstants.CALIBRATION_SAMPLES];
int w = ProjectConstants.CALIBRATION_PATTERN_POINTS * 2;
boolean success;
Mat inImg = new Mat();
// Save the calibration points on a one dimensional array for easier parameter passing
// to the native code.
for(int i = 0; i < ProjectConstants.CALIBRATION_SAMPLES; i++){
for(int j = 0, p = 0; j < ProjectConstants.CALIBRATION_PATTERN_POINTS; j++, p += 2){
calibrationPoints[p + (w * i)] = calibrationSamples[i][p];
calibrationPoints[(p + 1) + (w * i)] = calibrationSamples[i][p + 1];
}
}
// Decode the input image and convert it to an OpenCV matrix.
success = calibCap.grab();
if(!success){
calibCap.set(1, 1);
return false;
}
calibCap.retrieve(inImg);
Imgproc.resize(inImg, inImg, new Size(), 0.5, 0.5, Imgproc.INTER_AREA);
// Attempt to obtain the camera parameters.
double error = calibrateCameraParameters(
cameraMatrix.getNativeObjAddr(),
distortionCoeffs.getNativeObjAddr(),
inImg.getNativeObjAddr(),
calibrationPoints);
Gdx.app.log(TAG, CLASS_NAME + "calibrateCamera(): calibrateCameraParameters retured " + Double.toString(error));
cameraCalibrated = true;
return cameraCalibrated;
}else{
Gdx.app.debug(TAG, CLASS_NAME + ".calibrateCamera(): OpenCV is not ready or failed to load.");
return false;
}
}
@Override
public boolean isCameraCalibrated() {
return ocvOn && cameraCalibrated;
}
@Override
public float getFocalPointX() {
return ocvOn && cameraCalibrated ? (float)cameraMatrix.get(0, 0)[0] : 0.0f;
}
@Override
public float getFocalPointY() {
return ocvOn && cameraCalibrated ? (float)cameraMatrix.get(1, 1)[0] : 0.0f;
}
@Override
public float getCameraCenterX() {
return ocvOn && cameraCalibrated ? (float)cameraMatrix.get(0, 2)[0] : 0.0f;
}
@Override
public float getCameraCenterY() {
return ocvOn && cameraCalibrated ? (float)cameraMatrix.get(1, 2)[0] : 0.0f;
}
}

View File

@@ -0,0 +1,28 @@
package ve.ucv.ciens.icaro.ardemo.desktop;
import ve.ucv.ciens.icaro.ardemo.EviDemo;
import com.badlogic.gdx.backends.lwjgl.LwjglApplication;
import com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration;
public class DesktopLauncher{
private static final String TAG = "NXTAR_ANDROID_MAIN";
private static final String CLASS_NAME = DesktopLauncher.class.getSimpleName();
public static void main (String[] arg) {
if(!CVProcessor.isOcvOn()){
throw new RuntimeException(TAG + " : " + CLASS_NAME + ": OpenCV failed to load.");
}
if(arg.length != 2) {
System.err.println("Usage: EVI07 <markers video file> <calibration video file>");
System.exit(1);
}
LwjglApplicationConfiguration config = new LwjglApplicationConfiguration();
config.width = 640;
config.height = 360;
new LwjglApplication(new EviDemo(new CVProcessor(arg)), config);
}
}

3
gradle.properties Normal file
View File

@@ -0,0 +1,3 @@
org.gradle.daemon=true
org.gradle.jvmargs=-Xms128m -Xmx512m
org.gradle.configureondemand=true

BIN
gradle/wrapper/gradle-wrapper.jar vendored Normal file

Binary file not shown.

View File

@@ -0,0 +1,6 @@
#Sat Sep 21 13:08:26 CEST 2013
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=http\://services.gradle.org/distributions/gradle-2.4-all.zip

164
gradlew vendored Executable file
View File

@@ -0,0 +1,164 @@
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# For Cygwin, ensure paths are in UNIX format before anything is touched.
if $cygwin ; then
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
fi
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >&-
APP_HOME="`pwd -P`"
cd "$SAVED" >&-
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"

90
gradlew.bat vendored Normal file
View File

@@ -0,0 +1,90 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

1
settings.gradle Normal file
View File

@@ -0,0 +1 @@
include 'desktop', 'core'