diff --git a/.github/workflows/build_ci.yml b/.github/workflows/build_ci.yml
index 7d086724..561e7b6b 100644
--- a/.github/workflows/build_ci.yml
+++ b/.github/workflows/build_ci.yml
@@ -4,6 +4,7 @@ on: [push, pull_request]
jobs:
test_linux:
+ if: false
runs-on: ubuntu-latest
steps:
diff --git a/EOCV-Sim/build.gradle b/EOCV-Sim/build.gradle
index f206f3c2..ee8165d4 100644
--- a/EOCV-Sim/build.gradle
+++ b/EOCV-Sim/build.gradle
@@ -24,7 +24,7 @@ publishing {
}
}
-ext.kotest_version = '4.4.3'
+ext.kotest_version = '5.7.2'
test {
useJUnitPlatform()
@@ -51,13 +51,13 @@ dependencies {
implementation "com.github.deltacv.AprilTagDesktop:AprilTagDesktop:$apriltag_plugin_version"
implementation 'info.picocli:picocli:4.6.1'
- implementation 'com.google.code.gson:gson:2.8.7'
+ implementation 'com.google.code.gson:gson:2.8.9'
implementation "io.github.classgraph:classgraph:$classgraph_version"
implementation 'com.formdev:flatlaf:1.2'
implementation 'com.formdev:flatlaf-intellij-themes:1.2'
- implementation 'net.lingala.zip4j:zip4j:2.8.0'
+ implementation 'net.lingala.zip4j:zip4j:2.10.0'
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-core:$kotlinx_coroutines_version"
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-swing:$kotlinx_coroutines_version"
@@ -96,4 +96,4 @@ task(writeBuildClassJava) {
"}"
}
-build.dependsOn writeBuildClassJava
\ No newline at end of file
+build.dependsOn writeBuildClassJava
diff --git a/EOCV-Sim/src/main/resources/templates/default_workspace.zip b/EOCV-Sim/src/main/resources/templates/default_workspace.zip
index cb37d808..2afb2646 100644
Binary files a/EOCV-Sim/src/main/resources/templates/default_workspace.zip and b/EOCV-Sim/src/main/resources/templates/default_workspace.zip differ
diff --git a/EOCV-Sim/src/main/resources/templates/gradle_workspace.zip b/EOCV-Sim/src/main/resources/templates/gradle_workspace.zip
index e1d353c6..59480ce0 100644
Binary files a/EOCV-Sim/src/main/resources/templates/gradle_workspace.zip and b/EOCV-Sim/src/main/resources/templates/gradle_workspace.zip differ
diff --git a/EOCV-Sim/src/test/kotlin/com/github/serivesmejia/eocvsim/test/CoreTests.kt b/EOCV-Sim/src/test/kotlin/com/github/serivesmejia/eocvsim/test/CoreTests.kt
index d1916747..2b911300 100644
--- a/EOCV-Sim/src/test/kotlin/com/github/serivesmejia/eocvsim/test/CoreTests.kt
+++ b/EOCV-Sim/src/test/kotlin/com/github/serivesmejia/eocvsim/test/CoreTests.kt
@@ -27,6 +27,7 @@ package com.github.serivesmejia.eocvsim.test
import com.github.serivesmejia.eocvsim.EOCVSim
import io.kotest.core.spec.style.StringSpec
import org.opencv.core.Mat
+import org.openftc.apriltag.AprilTagDetectorJNI
class OpenCvTest : StringSpec({
"Loading native library" {
@@ -36,4 +37,15 @@ class OpenCvTest : StringSpec({
"Creating a Mat" {
Mat()
}
+})
+
+class AprilTagsTest : StringSpec({
+ "Create AprilTag detector" {
+ val detector = AprilTagDetectorJNI.createApriltagDetector(
+ AprilTagDetectorJNI.TagFamily.TAG_36h11.string,
+ 0f, 3
+ )
+
+ println("Created detector $detector")
+ }
})
\ No newline at end of file
diff --git a/README.md b/README.md
index 97a5e142..7fcf1639 100644
--- a/README.md
+++ b/README.md
@@ -72,6 +72,13 @@ For bug reporting or feature requesting, use the [issues tab](https://github.com
### Formerly, EOCV-Sim was hosted on a [personal account repo](https://github.com/serivesmejia/EOCV-Sim/). Released prior to 3.0.0 can be found there for historic purposes.
+### [v3.5.1 - FTC SDK 9.0](https://github.com/deltacv/EOCV-Sim/releases/tag/v3.5.1)
+ - This is the 18th release for EOCV-Sim
+ - Changelog
+ - Addresses the changes made in the FTC SDK 9.P for the 2023-2024 season:
+ - Fields in AprilTagDetection and AprilTagPose(ftc/raw) objects are now final
+ - VisionPortal builder method setCameraMonitorViewId() has been renamed to setLiveViewContainerId() and enableCameraMonitoring() has been renamed to enableLiveView()
+
### [v3.5.0 - New VisionPortal and VisionProcessor API](https://github.com/deltacv/EOCV-Sim/releases/tag/v3.5.0)
- This is the 18th release for EOCV-Sim
- Changelog
diff --git a/TeamCode/build.gradle b/TeamCode/build.gradle
index 805fd90b..a1e9743b 100644
--- a/TeamCode/build.gradle
+++ b/TeamCode/build.gradle
@@ -17,4 +17,4 @@ dependencies {
task(runSim, dependsOn: 'classes', type: JavaExec) {
main = 'com.github.serivesmejia.eocvsim.Main'
classpath = sourceSets.main.runtimeClasspath
-}
+}
\ No newline at end of file
diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/SimpleThresholdPipeline.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/processor/SimpleThresholdProcessor.java
similarity index 81%
rename from TeamCode/src/main/java/org/firstinspires/ftc/teamcode/SimpleThresholdPipeline.java
rename to TeamCode/src/main/java/org/firstinspires/ftc/teamcode/processor/SimpleThresholdProcessor.java
index c6e5fe20..55d72796 100644
--- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/SimpleThresholdPipeline.java
+++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/processor/SimpleThresholdProcessor.java
@@ -1,173 +1,181 @@
-/*
- * Copyright (c) 2021 Sebastian Erives
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- *
- */
-
-package org.firstinspires.ftc.teamcode;
-
-import com.qualcomm.robotcore.eventloop.opmode.Disabled;
-import org.firstinspires.ftc.robotcore.external.Telemetry;
-import org.opencv.core.Core;
-import org.opencv.core.Mat;
-import org.opencv.core.Scalar;
-import org.opencv.imgproc.Imgproc;
-import org.openftc.easyopencv.OpenCvPipeline;
-
-@Disabled
-public class SimpleThresholdPipeline extends OpenCvPipeline {
-
- /*
- * These are our variables that will be
- * modifiable from the variable tuner.
- *
- * Scalars in OpenCV are generally used to
- * represent color. So our values in the
- * lower and upper Scalars here represent
- * the Y, Cr and Cb values respectively.
- *
- * YCbCr, like most color spaces, range
- * from 0-255, so we default to those
- * min and max values here for now, meaning
- * that all pixels will be shown.
- */
- public Scalar lower = new Scalar(0, 0, 0);
- public Scalar upper = new Scalar(255, 255, 255);
-
- /**
- * This will allow us to choose the color
- * space we want to use on the live field
- * tuner instead of hardcoding it
- */
- public ColorSpace colorSpace = ColorSpace.YCrCb;
-
- /*
- * A good practice when typing EOCV pipelines is
- * declaring the Mats you will use here at the top
- * of your pipeline, to reuse the same buffers every
- * time. This removes the need to call mat.release()
- * with every Mat you create on the processFrame method,
- * and therefore, reducing the possibility of getting a
- * memory leak and causing the app to crash due to an
- * "Out of Memory" error.
- */
- private Mat ycrcbMat = new Mat();
- private Mat binaryMat = new Mat();
- private Mat maskedInputMat = new Mat();
-
- private Telemetry telemetry = null;
-
- /**
- * Enum to choose which color space to choose
- * with the live variable tuner isntead of
- * hardcoding it.
- */
- enum ColorSpace {
- /*
- * Define our "conversion codes" in the enum
- * so that we don't have to do a switch
- * statement in the processFrame method.
- */
- RGB(Imgproc.COLOR_RGBA2RGB),
- HSV(Imgproc.COLOR_RGB2HSV),
- YCrCb(Imgproc.COLOR_RGB2YCrCb),
- Lab(Imgproc.COLOR_RGB2Lab);
-
- //store cvtCode in a public var
- public int cvtCode = 0;
-
- //constructor to be used by enum declarations above
- ColorSpace(int cvtCode) {
- this.cvtCode = cvtCode;
- }
- }
-
- public SimpleThresholdPipeline(Telemetry telemetry) {
- this.telemetry = telemetry;
- }
-
- @Override
- public Mat processFrame(Mat input) {
- /*
- * Converts our input mat from RGB to
- * specified color space by the enum.
- * EOCV ALWAYS returns RGB mats, so you'd
- * always convert from RGB to the color
- * space you want to use.
- *
- * Takes our "input" mat as an input, and outputs
- * to a separate Mat buffer "ycrcbMat"
- */
- Imgproc.cvtColor(input, ycrcbMat, colorSpace.cvtCode);
-
- /*
- * This is where our thresholding actually happens.
- * Takes our "ycrcbMat" as input and outputs a "binary"
- * Mat to "binaryMat" of the same size as our input.
- * "Discards" all the pixels outside the bounds specified
- * by the scalars above (and modifiable with EOCV-Sim's
- * live variable tuner.)
- *
- * Binary meaning that we have either a 0 or 255 value
- * for every pixel.
- *
- * 0 represents our pixels that were outside the bounds
- * 255 represents our pixels that are inside the bounds
- */
- Core.inRange(ycrcbMat, lower, upper, binaryMat);
-
- /*
- * Release the reusable Mat so that old data doesn't
- * affect the next step in the current processing
- */
- maskedInputMat.release();
-
- /*
- * Now, with our binary Mat, we perform a "bitwise and"
- * to our input image, meaning that we will perform a mask
- * which will include the pixels from our input Mat which
- * are "255" in our binary Mat (meaning that they're inside
- * the range) and will discard any other pixel outside the
- * range (RGB 0, 0, 0. All discarded pixels will be black)
- */
- Core.bitwise_and(input, input, maskedInputMat, binaryMat);
-
- /**
- * Add some nice and informative telemetry messages
- */
- telemetry.addData("[>]", "Change these values in tuner menu");
- telemetry.addData("[Color Space]", colorSpace.name());
- telemetry.addData("[Lower Scalar]", lower);
- telemetry.addData("[Upper Scalar]", upper);
- telemetry.update();
-
- /*
- * The Mat returned from this method is the
- * one displayed on the viewport.
- *
- * To visualize our threshold, we'll return
- * the "masked input mat" which shows the
- * pixel from the input Mat that were inside
- * the threshold range.
- */
- return maskedInputMat;
- }
-
-}
\ No newline at end of file
+/*
+ * Copyright (c) 2023 Sebastian Erives
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ *
+ */
+
+package org.firstinspires.ftc.teamcode.processor;
+
+import android.graphics.Canvas;
+import com.qualcomm.robotcore.eventloop.opmode.Disabled;
+import org.firstinspires.ftc.robotcore.external.Telemetry;
+import org.firstinspires.ftc.robotcore.internal.camera.calibration.CameraCalibration;
+import org.firstinspires.ftc.vision.VisionProcessor;
+import org.opencv.core.Core;
+import org.opencv.core.Mat;
+import org.opencv.core.Scalar;
+import org.opencv.imgproc.Imgproc;
+
+@Disabled
+public class SimpleThresholdProcessor implements VisionProcessor {
+
+ /*
+ * These are our variables that will be
+ * modifiable from the variable tuner.
+ *
+ * Scalars in OpenCV are generally used to
+ * represent color. So our values in the
+ * lower and upper Scalars here represent
+ * the Y, Cr and Cb values respectively.
+ *
+ * YCbCr, like most color spaces, range
+ * from 0-255, so we default to those
+ * min and max values here for now, meaning
+ * that all pixels will be shown.
+ */
+ public Scalar lower = new Scalar(0, 0, 0);
+ public Scalar upper = new Scalar(255, 255, 255);
+
+ /**
+ * This will allow us to choose the color
+ * space we want to use on the live field
+ * tuner instead of hardcoding it
+ */
+ public ColorSpace colorSpace = ColorSpace.YCrCb;
+
+ /*
+ * A good practice when typing EOCV pipelines is
+ * declaring the Mats you will use here at the top
+ * of your pipeline, to reuse the same buffers every
+ * time. This removes the need to call mat.release()
+ * with every Mat you create on the processFrame method,
+ * and therefore, reducing the possibility of getting a
+ * memory leak and causing the app to crash due to an
+ * "Out of Memory" error.
+ */
+ private Mat ycrcbMat = new Mat();
+ private Mat binaryMat = new Mat();
+ private Mat maskedInputMat = new Mat();
+
+ private Telemetry telemetry = null;
+
+ /**
+ * Enum to choose which color space to choose
+ * with the live variable tuner isntead of
+ * hardcoding it.
+ */
+ enum ColorSpace {
+ /*
+ * Define our "conversion codes" in the enum
+ * so that we don't have to do a switch
+ * statement in the processFrame method.
+ */
+ RGB(Imgproc.COLOR_RGBA2RGB),
+ HSV(Imgproc.COLOR_RGB2HSV),
+ YCrCb(Imgproc.COLOR_RGB2YCrCb),
+ Lab(Imgproc.COLOR_RGB2Lab);
+
+ //store cvtCode in a public var
+ public int cvtCode = 0;
+
+ //constructor to be used by enum declarations above
+ ColorSpace(int cvtCode) {
+ this.cvtCode = cvtCode;
+ }
+ }
+
+ public SimpleThresholdProcessor(Telemetry telemetry) {
+ this.telemetry = telemetry;
+ }
+
+ @Override
+ public void init(int width, int height, CameraCalibration calibration) {
+ }
+
+ @Override
+ public Object processFrame(Mat frame, long captureTimeNanos) {
+ /*
+ * Converts our input mat from RGB to
+ * specified color space by the enum.
+ * EOCV ALWAYS returns RGB mats, so you'd
+ * always convert from RGB to the color
+ * space you want to use.
+ *
+ * Takes our "input" mat as an input, and outputs
+ * to a separate Mat buffer "ycrcbMat"
+ */
+ Imgproc.cvtColor(frame, ycrcbMat, colorSpace.cvtCode);
+
+ /*
+ * This is where our thresholding actually happens.
+ * Takes our "ycrcbMat" as input and outputs a "binary"
+ * Mat to "binaryMat" of the same size as our input.
+ * "Discards" all the pixels outside the bounds specified
+ * by the scalars above (and modifiable with EOCV-Sim's
+ * live variable tuner.)
+ *
+ * Binary meaning that we have either a 0 or 255 value
+ * for every pixel.
+ *
+ * 0 represents our pixels that were outside the bounds
+ * 255 represents our pixels that are inside the bounds
+ */
+ Core.inRange(ycrcbMat, lower, upper, binaryMat);
+
+ /*
+ * Release the reusable Mat so that old data doesn't
+ * affect the next step in the current processing
+ */
+ maskedInputMat.release();
+
+ /*
+ * Now, with our binary Mat, we perform a "bitwise and"
+ * to our input image, meaning that we will perform a mask
+ * which will include the pixels from our input Mat which
+ * are "255" in our binary Mat (meaning that they're inside
+ * the range) and will discard any other pixel outside the
+ * range (RGB 0, 0, 0. All discarded pixels will be black)
+ */
+ Core.bitwise_and(frame, frame, maskedInputMat, binaryMat);
+
+ /**
+ * Add some nice and informative telemetry messages
+ */
+ telemetry.addData("[>]", "Change these values in tuner menu");
+ telemetry.addData("[Color Space]", colorSpace.name());
+ telemetry.addData("[Lower Scalar]", lower);
+ telemetry.addData("[Upper Scalar]", upper);
+ telemetry.update();
+
+ /*
+ * Different from OpenCvPipeline, you cannot return
+ * a Mat from processFrame. Therefore, we will take
+ * advantage of the fact that anything drawn onto the
+ * passed `frame` object will be displayed on the
+ * viewport. We will just return null here.
+ */
+ maskedInputMat.copyTo(frame);
+ return null;
+ }
+
+ @Override
+ public void onDrawFrame(Canvas canvas, int onscreenWidth, int onscreenHeight, float scaleBmpPxToCanvasPx, float scaleCanvasDensity, Object userContext) {
+ }
+}
diff --git a/Vision/src/main/java/org/firstinspires/ftc/vision/VisionPortal.java b/Vision/src/main/java/org/firstinspires/ftc/vision/VisionPortal.java
index e3dd8c0b..ff3af581 100644
--- a/Vision/src/main/java/org/firstinspires/ftc/vision/VisionPortal.java
+++ b/Vision/src/main/java/org/firstinspires/ftc/vision/VisionPortal.java
@@ -106,7 +106,7 @@ public enum MultiPortalLayout
* Split up the screen for using multiple vision portals with live views simultaneously
* @param numPortals the number of portals to create space for on the screen
* @param mpl the methodology for laying out the multiple live views on the screen
- * @return an array of view IDs, whose elements may be passed to {@link Builder#setCameraMonitorViewId(int)}
+ * @return an array of view IDs, whose elements may be passed to {@link Builder#setLiveViewContainerId(int)}
*/
public static int[] makeMultiPortalView(int numPortals, MultiPortalLayout mpl)
{
@@ -194,9 +194,9 @@ public Builder setStreamFormat(StreamFormat streamFormat)
* @param enableLiveView whether or not to use a live preview
* @return the {@link Builder} object, to allow for method chaining
*/
- public Builder enableCameraMonitoring(boolean enableLiveView)
+ public Builder enableLiveView(boolean enableLiveView)
{
- setCameraMonitorViewId(1);
+ setLiveViewContainerId(1);
return this;
}
@@ -213,12 +213,12 @@ public Builder setAutoStopLiveView(boolean autoPause)
}
/**
- * A more advanced version of {@link #enableCameraMonitoring(boolean)}; allows you
+ * A more advanced version of {@link #enableLiveView(boolean)}; allows you
* to specify a specific view ID to use as a container, rather than just using the default one
* @param cameraMonitorViewId view ID of container for live view
* @return the {@link Builder} object, to allow for method chaining
*/
- public Builder setCameraMonitorViewId(int cameraMonitorViewId)
+ public Builder setLiveViewContainerId(int cameraMonitorViewId)
{
this.cameraMonitorViewId = cameraMonitorViewId;
return this;
diff --git a/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagDetection.java b/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagDetection.java
index e8c95e12..251fe4ee 100644
--- a/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagDetection.java
+++ b/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagDetection.java
@@ -40,46 +40,60 @@ public class AprilTagDetection
/**
* The numerical ID of the detection
*/
- public int id;
+ public final int id;
/**
* The number of bits corrected when reading the tag ID payload
*/
- public int hamming;
+ public final int hamming;
/*
* How much margin remains before the detector would decide to reject a tag
*/
- public float decisionMargin;
+ public final float decisionMargin;
/*
* The image pixel coordinates of the center of the tag
*/
- public Point center;
+ public final Point center;
/*
* The image pixel coordinates of the corners of the tag
*/
- public Point[] corners;
+ public final Point[] corners;
/*
* Metadata known about this tag from the tag library set on the detector;
* will be NULL if the tag was not in the tag library
*/
- public AprilTagMetadata metadata;
+ public final AprilTagMetadata metadata;
/*
* 6DOF pose data formatted in useful ways for FTC gameplay
*/
- public AprilTagPoseFtc ftcPose;
+ public final AprilTagPoseFtc ftcPose;
/*
* Raw translation vector and orientation matrix returned by the pose solver
*/
- public AprilTagPoseRaw rawPose;
+ public final AprilTagPoseRaw rawPose;
/*
* Timestamp of when the image in which this detection was found was acquired
*/
- public long frameAcquisitionNanoTime;
-}
+ public final long frameAcquisitionNanoTime;
+
+ public AprilTagDetection(int id, int hamming, float decisionMargin, Point center, Point[] corners,
+ AprilTagMetadata metadata, AprilTagPoseFtc ftcPose, AprilTagPoseRaw rawPose, long frameAcquisitionNanoTime)
+ {
+ this.id = id;
+ this.hamming = hamming;
+ this.decisionMargin = decisionMargin;
+ this.center = center;
+ this.corners = corners;
+ this.metadata = metadata;
+ this.ftcPose = ftcPose;
+ this.rawPose = rawPose;
+ this.frameAcquisitionNanoTime = frameAcquisitionNanoTime;
+ }
+}
\ No newline at end of file
diff --git a/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagPoseFtc.java b/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagPoseFtc.java
index 16300f7f..44ca39e4 100644
--- a/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagPoseFtc.java
+++ b/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagPoseFtc.java
@@ -50,53 +50,67 @@ public class AprilTagPoseFtc
* X translation of AprilTag, relative to camera lens. Measured sideways (Horizontally in camera image) the positive X axis extends out to the right of the camera viewpoint.
* An x value of zero implies that the Tag is centered between the left and right sides of the Camera image.
*/
- public double x;
+ public final double x;
/**
* Y translation of AprilTag, relative to camera lens. Measured forwards (Horizontally in camera image) the positive Y axis extends out in the direction the camera is pointing.
* A y value of zero implies that the Tag is touching (aligned with) the lens of the camera, which is physically unlikley. This value should always be positive.
*/
- public double y;
+ public final double y;
/**
* Z translation of AprilTag, relative to camera lens. Measured upwards (Vertically in camera image) the positive Z axis extends Upwards in the camera viewpoint.
* A z value of zero implies that the Tag is centered between the top and bottom of the camera image.
*/
- public double z;
+ public final double z;
/**
* Rotation of AprilTag around the Z axis. Right-Hand-Rule defines positive Yaw rotation as Counter-Clockwise when viewed from above.
* A yaw value of zero implies that the camera is directly in front of the Tag, as viewed from above.
*/
- public double yaw;
+ public final double yaw;
/**
* Rotation of AprilTag around the X axis. Right-Hand-Rule defines positive Pitch rotation as the Tag Image face twisting down when viewed from the camera.
* A pitch value of zero implies that the camera is directly in front of the Tag, as viewed from the side.
*/
- public double pitch;
+ public final double pitch;
/**
* Rotation of AprilTag around the Y axis. Right-Hand-Rule defines positive Roll rotation as the Tag Image rotating Clockwise when viewed from the camera.
* A roll value of zero implies that the Tag image is alligned squarely and upright, when viewed in the camera image frame.
*/
- public double roll;
+ public final double roll;
/**
* Range, (Distance), from the Camera lens to the center of the Tag, as measured along the X-Y plane (across the ground).
*/
- public double range;
+ public final double range;
/**
* Bearing, or Horizontal Angle, from the "camera center-line", to the "line joining the Camera lens and the Center of the Tag".
* This angle is measured across the X-Y plane (across the ground).
* A positive Bearing indicates that the robot must employ a positive Yaw (rotate counter clockwise) in order to point towards the target.
*/
- public double bearing;
+ public final double bearing;
/**
* Elevation, (Vertical Angle), from "the camera center-line", to "the line joining the Camera Lens and the Center of the Tag".
* A positive Elevation indicates that the robot must employ a positive Pitch (tilt up) in order to point towards the target.
*/
- public double elevation;
-}
+ public final double elevation;
+
+ public AprilTagPoseFtc(double x, double y, double z, double yaw, double pitch,
+ double roll, double range, double bearing, double elevation)
+ {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ this.yaw = yaw;
+ this.pitch = pitch;
+ this.roll = roll;
+ this.range = range;
+ this.bearing = bearing;
+ this.elevation = elevation;
+ }
+}
\ No newline at end of file
diff --git a/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagPoseRaw.java b/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagPoseRaw.java
index edd67664..3ded6205 100644
--- a/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagPoseRaw.java
+++ b/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagPoseRaw.java
@@ -40,20 +40,28 @@ public class AprilTagPoseRaw
/**
* X translation
*/
- public double x;
+ public final double x;
/**
* Y translation
*/
- public double y;
+ public final double y;
/**
* Z translation
*/
- public double z;
+ public final double z;
/**
* 3x3 rotation matrix
*/
- public MatrixF R;
-}
+ public final MatrixF R;
+
+ public AprilTagPoseRaw(double x, double y, double z, MatrixF R)
+ {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ this.R = R;
+ }
+}
\ No newline at end of file
diff --git a/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagProcessorImpl.java b/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagProcessorImpl.java
index 047538ae..ccd58a8f 100644
--- a/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagProcessorImpl.java
+++ b/Vision/src/main/java/org/firstinspires/ftc/vision/apriltag/AprilTagProcessorImpl.java
@@ -37,6 +37,7 @@
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.util.MovingStatistics;
+import com.qualcomm.robotcore.util.RobotLog;
import org.firstinspires.ftc.robotcore.external.matrices.GeneralMatrixF;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
@@ -56,16 +57,17 @@
import org.opencv.imgproc.Imgproc;
import org.openftc.apriltag.AprilTagDetectorJNI;
import org.openftc.apriltag.ApriltagDetectionJNI;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import java.util.ArrayList;
+import java.util.logging.Logger;
@Disabled
public class AprilTagProcessorImpl extends AprilTagProcessor
{
public static final String TAG = "AprilTagProcessorImpl";
- Logger logger = LoggerFactory.getLogger(TAG);
+
+ private Logger logger = Logger.getLogger(TAG);
+
private long nativeApriltagPtr;
private Mat grey = new Mat();
private ArrayList detections = new ArrayList<>();
@@ -128,7 +130,7 @@ protected void finalize()
}
else
{
- logger.warn("AprilTagDetectionPipeline.finalize(): nativeApriltagPtr was NULL");
+ System.out.println("AprilTagDetectionPipeline.finalize(): nativeApriltagPtr was NULL");
}
}
@@ -138,14 +140,14 @@ public void init(int width, int height, CameraCalibration calibration)
// If the user didn't give us a calibration, but we have one built in,
// then go ahead and use it!!
if (calibration != null && fx == 0 && fy == 0 && cx == 0 && cy == 0
- && !(calibration.focalLengthX == 0 && calibration.focalLengthY == 0 && calibration.principalPointX == 0 && calibration.principalPointY == 0)) // needed because we may get an all zero calibration to indicate none, instead of null
+ && !(calibration.focalLengthX == 0 && calibration.focalLengthY == 0 && calibration.principalPointX == 0 && calibration.principalPointY == 0)) // needed because we may get an all zero calibration to indicate none, instead of null
{
fx = calibration.focalLengthX;
fy = calibration.focalLengthY;
cx = calibration.principalPointX;
cy = calibration.principalPointY;
- logger.warn(String.format("User did not provide a camera calibration; but we DO have a built in calibration we can use.\n [%dx%d] (may be scaled) %s\nfx=%7.3f fy=%7.3f cx=%7.3f cy=%7.3f",
+ logger.info(String.format("User did not provide a camera calibration; but we DO have a built in calibration we can use.\n [%dx%d] (may be scaled) %s\nfx=%7.3f fy=%7.3f cx=%7.3f cy=%7.3f",
calibration.getSize().getWidth(), calibration.getSize().getHeight(), calibration.getIdentity().toString(), fx, fy, cx, cy));
}
else if (fx == 0 && fy == 0 && cx == 0 && cy == 0)
@@ -153,17 +155,16 @@ else if (fx == 0 && fy == 0 && cx == 0 && cy == 0)
// set it to *something* so we don't crash the native code
String warning = "User did not provide a camera calibration, nor was a built-in calibration found for this camera; 6DOF pose data will likely be inaccurate.";
- logger.warn(warning);
- // RobotLog.addGlobalWarningMessage(warning);
+ logger.warning(warning);
fx = 578.272;
fy = 578.272;
- cx = (double) width /2;
- cy = (double) height /2;
+ cx = width/2;
+ cy = height/2;
}
else
{
- logger.warn(String.format("User provided their own camera calibration fx=%7.3f fy=%7.3f cx=%7.3f cy=%7.3f",
+ logger.info(String.format("User provided their own camera calibration fx=%7.3f fy=%7.3f cx=%7.3f cy=%7.3f",
fx, fy, cx, cy));
}
@@ -212,72 +213,62 @@ ArrayList runAprilTagDetectorForMultipleTagSizes(long capture
for (long ptrDetection : detectionPointers)
{
- AprilTagDetection detection = new AprilTagDetection();
- detection.frameAcquisitionNanoTime = captureTimeNanos;
-
- detection.id = ApriltagDetectionJNI.getId(ptrDetection);
+ AprilTagMetadata metadata = tagLibrary.lookupTag(ApriltagDetectionJNI.getId(ptrDetection));
- AprilTagMetadata metadata = tagLibrary.lookupTag(detection.id);
- detection.metadata = metadata;
-
- detection.hamming = ApriltagDetectionJNI.getHamming(ptrDetection);
- detection.decisionMargin = ApriltagDetectionJNI.getDecisionMargin(ptrDetection);
- double[] center = ApriltagDetectionJNI.getCenterpoint(ptrDetection);
- detection.center = new Point(center[0], center[1]);
double[][] corners = ApriltagDetectionJNI.getCorners(ptrDetection);
- detection.corners = new Point[4];
+ Point[] cornerPts = new Point[4];
for (int p = 0; p < 4; p++)
{
- detection.corners[p] = new Point(corners[p][0], corners[p][1]);
+ cornerPts[p] = new Point(corners[p][0], corners[p][1]);
}
+ AprilTagPoseRaw rawPose;
+ AprilTagPoseFtc ftcPose;
+
if (metadata != null)
{
PoseSolver solver = poseSolver; // snapshot, can change
- detection.rawPose = new AprilTagPoseRaw();
-
long startSolveTime = System.currentTimeMillis();
if (solver == PoseSolver.APRILTAG_BUILTIN)
{
- // Translation
double[] pose = ApriltagDetectionJNI.getPoseEstimate(
ptrDetection,
outputUnitsLength.fromUnit(metadata.distanceUnit, metadata.tagsize),
fx, fy, cx, cy);
- detection.rawPose.x = pose[0];
- detection.rawPose.y = pose[1];
- detection.rawPose.z = pose[2];
-
- // Rotation
+ // Build rotation matrix
float[] rotMtxVals = new float[3 * 3];
for (int i = 0; i < 9; i++)
{
rotMtxVals[i] = (float) pose[3 + i];
}
- detection.rawPose.R = new GeneralMatrixF(3, 3, rotMtxVals);
+
+ rawPose = new AprilTagPoseRaw(
+ pose[0], pose[1], pose[2], // x y z
+ new GeneralMatrixF(3, 3, rotMtxVals)); // R
}
else
{
Pose opencvPose = poseFromTrapezoid(
- detection.corners,
+ cornerPts,
cameraMatrix,
outputUnitsLength.fromUnit(metadata.distanceUnit, metadata.tagsize),
solver.code);
- detection.rawPose.x = opencvPose.tvec.get(0,0)[0];
- detection.rawPose.y = opencvPose.tvec.get(1,0)[0];
- detection.rawPose.z = opencvPose.tvec.get(2,0)[0];
-
+ // Build rotation matrix
Mat R = new Mat(3, 3, CvType.CV_32F);
Calib3d.Rodrigues(opencvPose.rvec, R);
-
float[] tmp2 = new float[9];
R.get(0,0, tmp2);
- detection.rawPose.R = new GeneralMatrixF(3,3, tmp2);
+
+ rawPose = new AprilTagPoseRaw(
+ opencvPose.tvec.get(0,0)[0], // x
+ opencvPose.tvec.get(1,0)[0], // y
+ opencvPose.tvec.get(2,0)[0], // z
+ new GeneralMatrixF(3,3, tmp2)); // R
}
long endSolveTime = System.currentTimeMillis();
@@ -286,28 +277,36 @@ ArrayList runAprilTagDetectorForMultipleTagSizes(long capture
else
{
// We don't know anything about the tag size so we can't solve the pose
- detection.rawPose = null;
+ rawPose = null;
}
- if (detection.rawPose != null)
+ if (rawPose != null)
{
- detection.ftcPose = new AprilTagPoseFtc();
-
- detection.ftcPose.x = detection.rawPose.x;
- detection.ftcPose.y = detection.rawPose.z;
- detection.ftcPose.z = -detection.rawPose.y;
-
- Orientation rot = Orientation.getOrientation(detection.rawPose.R, AxesReference.INTRINSIC, AxesOrder.YXZ, outputUnitsAngle);
- detection.ftcPose.yaw = -rot.firstAngle;
- detection.ftcPose.roll = rot.thirdAngle;
- detection.ftcPose.pitch = rot.secondAngle;
-
- detection.ftcPose.range = Math.hypot(detection.ftcPose.x, detection.ftcPose.y);
- detection.ftcPose.bearing = outputUnitsAngle.fromUnit(AngleUnit.RADIANS, Math.atan2(-detection.ftcPose.x, detection.ftcPose.y));
- detection.ftcPose.elevation = outputUnitsAngle.fromUnit(AngleUnit.RADIANS, Math.atan2(detection.ftcPose.z, detection.ftcPose.y));
+ Orientation rot = Orientation.getOrientation(rawPose.R, AxesReference.INTRINSIC, AxesOrder.YXZ, outputUnitsAngle);
+
+ ftcPose = new AprilTagPoseFtc(
+ rawPose.x, // x NB: These are *intentionally* not matched directly;
+ rawPose.z, // y this is the mapping between the AprilTag coordinate
+ -rawPose.y, // z system and the FTC coordinate system
+ -rot.firstAngle, // yaw
+ rot.secondAngle, // pitch
+ rot.thirdAngle, // roll
+ Math.hypot(rawPose.x, rawPose.z), // range
+ outputUnitsAngle.fromUnit(AngleUnit.RADIANS, Math.atan2(-rawPose.x, rawPose.z)), // bearing
+ outputUnitsAngle.fromUnit(AngleUnit.RADIANS, Math.atan2(-rawPose.y, rawPose.z))); // elevation
}
+ else
+ {
+ ftcPose = null;
+ }
+
+ double[] center = ApriltagDetectionJNI.getCenterpoint(ptrDetection);
- detections.add(detection);
+ detections.add(new AprilTagDetection(
+ ApriltagDetectionJNI.getId(ptrDetection),
+ ApriltagDetectionJNI.getHamming(ptrDetection),
+ ApriltagDetectionJNI.getDecisionMargin(ptrDetection),
+ new Point(center[0], center[1]), cornerPts, metadata, ftcPose, rawPose, captureTimeNanos));
}
ApriltagDetectionJNI.freeDetectionList(ptrDetectionArray);
diff --git a/build.gradle b/build.gradle
index 8628c343..02fbde3a 100644
--- a/build.gradle
+++ b/build.gradle
@@ -8,8 +8,8 @@ buildscript {
kotlinx_coroutines_version = "1.5.0-native-mt"
slf4j_version = "1.7.32"
log4j_version = "2.17.1"
- opencv_version = "4.5.5-1"
- apriltag_plugin_version = "2.0.0-B"
+ opencv_version = "4.7.0-0"
+ apriltag_plugin_version = "2.0.0-C"
skiko_version = "0.7.75"
classgraph_version = "4.8.108"
@@ -37,7 +37,7 @@ plugins {
allprojects {
group 'com.github.deltacv'
- version '3.5.0'
+ version '3.5.1'
apply plugin: 'java'
@@ -82,4 +82,4 @@ allprojects {
file.delete()
}
}
-}
\ No newline at end of file
+}