From 1146a260b6440f5b90c230b67d768ff0298d433e Mon Sep 17 00:00:00 2001 From: unknown Date: Mon, 5 May 2014 12:32:06 -0430 Subject: [PATCH] Added intradocumentation. --- .../nxtar/states/CameraCalibrationState.java | 11 +++++- .../ciens/ccg/nxtar/states/InGameState.java | 36 ++++++++++--------- .../ccg/nxtar/utils/ProjectConstants.java | 32 ++++++++--------- 3 files changed, 45 insertions(+), 34 deletions(-) diff --git a/src/ve/ucv/ciens/ccg/nxtar/states/CameraCalibrationState.java b/src/ve/ucv/ciens/ccg/nxtar/states/CameraCalibrationState.java index 3bd1f2a..081705e 100644 --- a/src/ve/ucv/ciens/ccg/nxtar/states/CameraCalibrationState.java +++ b/src/ve/ucv/ciens/ccg/nxtar/states/CameraCalibrationState.java @@ -205,33 +205,41 @@ public class CameraCalibrationState extends BaseState{ if(backgroundShader != null) core.batch.setShader(null); }core.batch.end(); - // Fetch the current video frame and find the calibration pattern in it. + // Fetch the current video frame. frame = frameMonitor.getCurrentFrame(); + // Apply the undistortion method if the camera has been calibrated already. if(core.cvProc.cameraIsCalibrated()){ frame = core.cvProc.undistortFrame(frame); } + // Find the calibration points in the video frame. CVCalibrationData data = core.cvProc.findCalibrationPattern(frame); + // Disable the sampling button if the calibration pattern was not found. if(data.calibrationPoints != null && !core.cvProc.cameraIsCalibrated()){ takeSampleButton.setDisabled(false); }else{ takeSampleButton.setDisabled(true); } + // If the user requested a sample be taken. if(takeSample && !core.cvProc.cameraIsCalibrated() && data.calibrationPoints != null){ + // Disable sample taking. takeSample = false; Gdx.app.log(TAG, CLASS_NAME + ".render(): Sample taken."); + // Save the calibration points to the samples array. for(int i = 0; i < data.calibrationPoints.length; i += 2){ Gdx.app.log(TAG, CLASS_NAME + ".render(): Value " + Integer.toString(i) + " = (" + Float.toString(data.calibrationPoints[i]) + ", " + Float.toString(data.calibrationPoints[i + 1]) + ")"); calibrationSamples[lastSampleTaken][i] = data.calibrationPoints[i]; calibrationSamples[lastSampleTaken][i + 1] = data.calibrationPoints[i + 1]; } + // Move to the next sample. lastSampleTaken++; + // If enough samples has been taken then calibrate the camera. if(lastSampleTaken == ProjectConstants.CALIBRATION_SAMPLES){ Gdx.app.log(TAG, CLASS_NAME + "render(): Last sample taken."); @@ -285,6 +293,7 @@ public class CameraCalibrationState extends BaseState{ takeSampleButton.draw(core.batch, 1.0f); }core.batch.end(); }else{ + // TODO: Render OUYA gui. } // Save this frame as previous to avoid processing the same frame twice when network latency is high. diff --git a/src/ve/ucv/ciens/ccg/nxtar/states/InGameState.java b/src/ve/ucv/ciens/ccg/nxtar/states/InGameState.java index 798d916..583fe95 100644 --- a/src/ve/ucv/ciens/ccg/nxtar/states/InGameState.java +++ b/src/ve/ucv/ciens/ccg/nxtar/states/InGameState.java @@ -129,6 +129,7 @@ public class InGameState extends BaseState{ motorGamepadButtonPressed[5] = false; motorGamepadButtonPressed[6] = false; + // Set up the background. backgroundTexture = new Texture(Gdx.files.internal("data/gfx/textures/tile_aqua.png")); backgroundTexture.setWrap(TextureWrap.Repeat, TextureWrap.Repeat); backgroundTexture.setFilter(TextureFilter.Linear, TextureFilter.Linear); @@ -136,6 +137,7 @@ public class InGameState extends BaseState{ background.setSize(Gdx.graphics.getWidth(), Gdx.graphics.getHeight()); background.setPosition(-(Gdx.graphics.getWidth() / 2), -(Gdx.graphics.getHeight() / 2)); + // Set up the shader. backgroundShader = new ShaderProgram(Gdx.files.internal(SHADER_PATH + ".vert"), Gdx.files.internal(SHADER_PATH + ".frag")); if(!backgroundShader.isCompiled()){ Gdx.app.error(TAG, CLASS_NAME + ".MainMenuStateBase() :: Failed to compile the background shader."); @@ -154,11 +156,13 @@ public class InGameState extends BaseState{ byte[] prevFrame = null; Size dimensions = null; CVMarkerData data; + TextureRegion region; + // Clear the screen. Gdx.gl.glClearColor(1, 1, 1, 1); Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT); - Gdx.app.log(TAG, CLASS_NAME + ".render(): Frame buffer cleared."); + // Render the background. core.batch.setProjectionMatrix(pixelPerfectCamera.combined); core.batch.begin();{ if(backgroundShader != null){ @@ -168,35 +172,33 @@ public class InGameState extends BaseState{ background.draw(core.batch); if(backgroundShader != null) core.batch.setShader(null); }core.batch.end(); - Gdx.app.log(TAG, CLASS_NAME + ".render(): Background drawn."); + // Fetch the current video frame. frame = frameMonitor.getCurrentFrame(); + // Apply the undistortion method if the camera has been calibrated already. if(core.cvProc.cameraIsCalibrated()){ frame = core.cvProc.undistortFrame(frame); } + // Attempt to find the markers in the current video frame. data = core.cvProc.findMarkersInFrame(frame); - Gdx.app.log(TAG, CLASS_NAME + ".render(): Frame processed."); - - /*if(data != null){ - for(int i = 0; i < data.markerCodes.length; i++){ - Gdx.app.log(TAG, CLASS_NAME + String.format(".render(): Marker code[%d] = %d", i, data.markerCodes[i])); - } - }*/ + // If a valid frame was fetched. if(data != null && data.outFrame != null && !Arrays.equals(frame, prevFrame)){ + // Decode the video frame. dimensions = frameMonitor.getFrameDimensions(); videoFrame = new Pixmap(data.outFrame, 0, dimensions.getWidth() * dimensions.getHeight()); videoFrameTexture = new Texture(videoFrame); videoFrameTexture.setFilter(TextureFilter.Linear, TextureFilter.Linear); videoFrame.dispose(); - Gdx.app.log(TAG, CLASS_NAME + ".render(): Texture created."); - - TextureRegion region = new TextureRegion(videoFrameTexture, 0, 0, dimensions.getWidth(), dimensions.getHeight()); + // Convert the decoded frame into a renderable texture. + region = new TextureRegion(videoFrameTexture, 0, 0, dimensions.getWidth(), dimensions.getHeight()); renderableVideoFrame = new Sprite(region); renderableVideoFrame.setOrigin(renderableVideoFrame.getWidth() / 2, renderableVideoFrame.getHeight() / 2); + + // Set the position and orientation of the renderable video frame. if(!Ouya.runningOnOuya){ renderableVideoFrame.setSize(1.0f, renderableVideoFrame.getHeight() / renderableVideoFrame.getWidth() ); renderableVideoFrame.rotate90(true); @@ -207,22 +209,24 @@ public class InGameState extends BaseState{ renderableVideoFrame.rotate90(true); renderableVideoFrame.translate(-renderableVideoFrame.getWidth() / 2, -renderableVideoFrame.getHeight() / 2); } - Gdx.app.log(TAG, CLASS_NAME + ".render(): Texture resized and positioned."); + // Set the correct camera for the device. if(!Ouya.runningOnOuya){ core.batch.setProjectionMatrix(camera.combined); }else{ core.batch.setProjectionMatrix(pixelPerfectCamera.combined); } + + // Render the video frame. core.batch.begin();{ renderableVideoFrame.draw(core.batch); }core.batch.end(); - Gdx.app.log(TAG, CLASS_NAME + ".render(): Texture drawn."); + // Clear the video frame from memory. videoFrameTexture.dispose(); - Gdx.app.log(TAG, CLASS_NAME + ".render(): Texture released."); } + // Render the interface buttons. if(!Ouya.runningOnOuya){ core.batch.setProjectionMatrix(pixelPerfectCamera.combined); core.batch.begin();{ @@ -236,8 +240,8 @@ public class InGameState extends BaseState{ }core.batch.end(); } + // Save this frame as previous to avoid processing the same frame twice when network latency is high. prevFrame = frame; - Gdx.app.log(TAG, CLASS_NAME + ".render(): Render complete."); } @Override diff --git a/src/ve/ucv/ciens/ccg/nxtar/utils/ProjectConstants.java b/src/ve/ucv/ciens/ccg/nxtar/utils/ProjectConstants.java index c2d7173..ed0090a 100644 --- a/src/ve/ucv/ciens/ccg/nxtar/utils/ProjectConstants.java +++ b/src/ve/ucv/ciens/ccg/nxtar/utils/ProjectConstants.java @@ -18,28 +18,26 @@ package ve.ucv.ciens.ccg.nxtar.utils; import com.badlogic.gdx.controllers.mappings.Ouya; public abstract class ProjectConstants{ - public static final int SERVICE_DISCOVERY_PORT = 9988; - public static final int VIDEO_STREAMING_PORT = 9989; - public static final int MOTOR_CONTROL_PORT = 9990; - public static final int SENSOR_REPORT_PORT = 9991; - public static final int APP_CONTROL_PORT = 9992; + public static final int SERVICE_DISCOVERY_PORT = 9988; + public static final int VIDEO_STREAMING_PORT = 9989; + public static final int MOTOR_CONTROL_PORT = 9990; + public static final int SENSOR_REPORT_PORT = 9991; + public static final int APP_CONTROL_PORT = 9992; + public static final String MULTICAST_ADDRESS = "230.0.0.1"; - public static final String MULTICAST_ADDRESS = "230.0.0.1"; + public static final int EXIT_SUCCESS = 0; + public static final int EXIT_FAILURE = 1; - public static final int EXIT_SUCCESS = 0; - public static final int EXIT_FAILURE = 1; + public static final boolean DEBUG = true; - public static final boolean DEBUG = true; + public static final int[] POWERS_OF_2 = {64, 128, 256, 512, 1024, 2048}; - public static final int[] POWERS_OF_2 = {64, 128, 256, 512, 1024, 2048}; + public static final float OVERSCAN; + public static final int MENU_BUTTON_FONT_SIZE; + public static final String FONT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"; - public static final float OVERSCAN; - public static final int MENU_BUTTON_FONT_SIZE; - - public static final String FONT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"; - - public static final int CALIBRATION_PATTERN_POINTS = 54; - public static final int CALIBRATION_SAMPLES = 10; + public static final int CALIBRATION_PATTERN_POINTS = 54; + public static final int CALIBRATION_SAMPLES = 10; static{ OVERSCAN = Ouya.runningOnOuya ? 0.9f : 1.0f;