Skip to content

Commit

Permalink
Address SDK 9.0 changes
Browse files Browse the repository at this point in the history
  • Loading branch information
serivesmejia committed Sep 10, 2023
1 parent 833dc01 commit cf698e2
Show file tree
Hide file tree
Showing 9 changed files with 305 additions and 264 deletions.
Binary file modified EOCV-Sim/src/main/resources/templates/default_workspace.zip
Binary file not shown.
Binary file modified EOCV-Sim/src/main/resources/templates/gradle_workspace.zip
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -1,173 +1,181 @@
/*
* Copyright (c) 2021 Sebastian Erives
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/

package org.firstinspires.ftc.teamcode;

import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import org.openftc.easyopencv.OpenCvPipeline;

@Disabled
public class SimpleThresholdPipeline extends OpenCvPipeline {

/*
* These are our variables that will be
* modifiable from the variable tuner.
*
* Scalars in OpenCV are generally used to
* represent color. So our values in the
* lower and upper Scalars here represent
* the Y, Cr and Cb values respectively.
*
* YCbCr, like most color spaces, range
* from 0-255, so we default to those
* min and max values here for now, meaning
* that all pixels will be shown.
*/
public Scalar lower = new Scalar(0, 0, 0);
public Scalar upper = new Scalar(255, 255, 255);

/**
* This will allow us to choose the color
* space we want to use on the live field
* tuner instead of hardcoding it
*/
public ColorSpace colorSpace = ColorSpace.YCrCb;

/*
* A good practice when typing EOCV pipelines is
* declaring the Mats you will use here at the top
* of your pipeline, to reuse the same buffers every
* time. This removes the need to call mat.release()
* with every Mat you create on the processFrame method,
* and therefore, reducing the possibility of getting a
* memory leak and causing the app to crash due to an
* "Out of Memory" error.
*/
private Mat ycrcbMat = new Mat();
private Mat binaryMat = new Mat();
private Mat maskedInputMat = new Mat();

private Telemetry telemetry = null;

/**
* Enum to choose which color space to choose
* with the live variable tuner isntead of
* hardcoding it.
*/
enum ColorSpace {
/*
* Define our "conversion codes" in the enum
* so that we don't have to do a switch
* statement in the processFrame method.
*/
RGB(Imgproc.COLOR_RGBA2RGB),
HSV(Imgproc.COLOR_RGB2HSV),
YCrCb(Imgproc.COLOR_RGB2YCrCb),
Lab(Imgproc.COLOR_RGB2Lab);

//store cvtCode in a public var
public int cvtCode = 0;

//constructor to be used by enum declarations above
ColorSpace(int cvtCode) {
this.cvtCode = cvtCode;
}
}

public SimpleThresholdPipeline(Telemetry telemetry) {
this.telemetry = telemetry;
}

@Override
public Mat processFrame(Mat input) {
/*
* Converts our input mat from RGB to
* specified color space by the enum.
* EOCV ALWAYS returns RGB mats, so you'd
* always convert from RGB to the color
* space you want to use.
*
* Takes our "input" mat as an input, and outputs
* to a separate Mat buffer "ycrcbMat"
*/
Imgproc.cvtColor(input, ycrcbMat, colorSpace.cvtCode);

/*
* This is where our thresholding actually happens.
* Takes our "ycrcbMat" as input and outputs a "binary"
* Mat to "binaryMat" of the same size as our input.
* "Discards" all the pixels outside the bounds specified
* by the scalars above (and modifiable with EOCV-Sim's
* live variable tuner.)
*
* Binary meaning that we have either a 0 or 255 value
* for every pixel.
*
* 0 represents our pixels that were outside the bounds
* 255 represents our pixels that are inside the bounds
*/
Core.inRange(ycrcbMat, lower, upper, binaryMat);

/*
* Release the reusable Mat so that old data doesn't
* affect the next step in the current processing
*/
maskedInputMat.release();

/*
* Now, with our binary Mat, we perform a "bitwise and"
* to our input image, meaning that we will perform a mask
* which will include the pixels from our input Mat which
* are "255" in our binary Mat (meaning that they're inside
* the range) and will discard any other pixel outside the
* range (RGB 0, 0, 0. All discarded pixels will be black)
*/
Core.bitwise_and(input, input, maskedInputMat, binaryMat);

/**
* Add some nice and informative telemetry messages
*/
telemetry.addData("[>]", "Change these values in tuner menu");
telemetry.addData("[Color Space]", colorSpace.name());
telemetry.addData("[Lower Scalar]", lower);
telemetry.addData("[Upper Scalar]", upper);
telemetry.update();

/*
* The Mat returned from this method is the
* one displayed on the viewport.
*
* To visualize our threshold, we'll return
* the "masked input mat" which shows the
* pixel from the input Mat that were inside
* the threshold range.
*/
return maskedInputMat;
}

}
/*
* Copyright (c) 2023 Sebastian Erives
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/

package org.firstinspires.ftc.teamcode.processor;

import android.graphics.Canvas;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.robotcore.internal.camera.calibration.CameraCalibration;
import org.firstinspires.ftc.vision.VisionProcessor;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;

@Disabled
public class SimpleThresholdProcessor implements VisionProcessor {

/*
* These are our variables that will be
* modifiable from the variable tuner.
*
* Scalars in OpenCV are generally used to
* represent color. So our values in the
* lower and upper Scalars here represent
* the Y, Cr and Cb values respectively.
*
* YCbCr, like most color spaces, range
* from 0-255, so we default to those
* min and max values here for now, meaning
* that all pixels will be shown.
*/
public Scalar lower = new Scalar(0, 0, 0);
public Scalar upper = new Scalar(255, 255, 255);

/**
* This will allow us to choose the color
* space we want to use on the live field
* tuner instead of hardcoding it
*/
public ColorSpace colorSpace = ColorSpace.YCrCb;

/*
* A good practice when typing EOCV pipelines is
* declaring the Mats you will use here at the top
* of your pipeline, to reuse the same buffers every
* time. This removes the need to call mat.release()
* with every Mat you create on the processFrame method,
* and therefore, reducing the possibility of getting a
* memory leak and causing the app to crash due to an
* "Out of Memory" error.
*/
private Mat ycrcbMat = new Mat();
private Mat binaryMat = new Mat();
private Mat maskedInputMat = new Mat();

private Telemetry telemetry = null;

/**
* Enum to choose which color space to choose
* with the live variable tuner isntead of
* hardcoding it.
*/
enum ColorSpace {
/*
* Define our "conversion codes" in the enum
* so that we don't have to do a switch
* statement in the processFrame method.
*/
RGB(Imgproc.COLOR_RGBA2RGB),
HSV(Imgproc.COLOR_RGB2HSV),
YCrCb(Imgproc.COLOR_RGB2YCrCb),
Lab(Imgproc.COLOR_RGB2Lab);

//store cvtCode in a public var
public int cvtCode = 0;

//constructor to be used by enum declarations above
ColorSpace(int cvtCode) {
this.cvtCode = cvtCode;
}
}

public SimpleThresholdProcessor(Telemetry telemetry) {
this.telemetry = telemetry;
}

@Override
public void init(int width, int height, CameraCalibration calibration) {
}

@Override
public Object processFrame(Mat frame, long captureTimeNanos) {
/*
* Converts our input mat from RGB to
* specified color space by the enum.
* EOCV ALWAYS returns RGB mats, so you'd
* always convert from RGB to the color
* space you want to use.
*
* Takes our "input" mat as an input, and outputs
* to a separate Mat buffer "ycrcbMat"
*/
Imgproc.cvtColor(frame, ycrcbMat, colorSpace.cvtCode);

/*
* This is where our thresholding actually happens.
* Takes our "ycrcbMat" as input and outputs a "binary"
* Mat to "binaryMat" of the same size as our input.
* "Discards" all the pixels outside the bounds specified
* by the scalars above (and modifiable with EOCV-Sim's
* live variable tuner.)
*
* Binary meaning that we have either a 0 or 255 value
* for every pixel.
*
* 0 represents our pixels that were outside the bounds
* 255 represents our pixels that are inside the bounds
*/
Core.inRange(ycrcbMat, lower, upper, binaryMat);

/*
* Release the reusable Mat so that old data doesn't
* affect the next step in the current processing
*/
maskedInputMat.release();

/*
* Now, with our binary Mat, we perform a "bitwise and"
* to our input image, meaning that we will perform a mask
* which will include the pixels from our input Mat which
* are "255" in our binary Mat (meaning that they're inside
* the range) and will discard any other pixel outside the
* range (RGB 0, 0, 0. All discarded pixels will be black)
*/
Core.bitwise_and(frame, frame, maskedInputMat, binaryMat);

/**
* Add some nice and informative telemetry messages
*/
telemetry.addData("[>]", "Change these values in tuner menu");
telemetry.addData("[Color Space]", colorSpace.name());
telemetry.addData("[Lower Scalar]", lower);
telemetry.addData("[Upper Scalar]", upper);
telemetry.update();

/*
* Different from OpenCvPipeline, you cannot return
* a Mat from processFrame. Therefore, we will take
* advantage of the fact that anything drawn onto the
* passed `frame` object will be displayed on the
* viewport. We will just return null here.
*/
maskedInputMat.copyTo(frame);
return null;
}

@Override
public void onDrawFrame(Canvas canvas, int onscreenWidth, int onscreenHeight, float scaleBmpPxToCanvasPx, float scaleCanvasDensity, Object userContext) {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ public enum MultiPortalLayout
* Split up the screen for using multiple vision portals with live views simultaneously
* @param numPortals the number of portals to create space for on the screen
* @param mpl the methodology for laying out the multiple live views on the screen
* @return an array of view IDs, whose elements may be passed to {@link Builder#setCameraMonitorViewId(int)}
* @return an array of view IDs, whose elements may be passed to {@link Builder#setLiveViewContainerId(int)}
*/
public static int[] makeMultiPortalView(int numPortals, MultiPortalLayout mpl)
{
Expand Down Expand Up @@ -194,9 +194,9 @@ public Builder setStreamFormat(StreamFormat streamFormat)
* @param enableLiveView whether or not to use a live preview
* @return the {@link Builder} object, to allow for method chaining
*/
public Builder enableCameraMonitoring(boolean enableLiveView)
public Builder enableLiveView(boolean enableLiveView)
{
setCameraMonitorViewId(1);
setLiveViewContainerId(1);
return this;
}

Expand All @@ -213,12 +213,12 @@ public Builder setAutoStopLiveView(boolean autoPause)
}

/**
* A more advanced version of {@link #enableCameraMonitoring(boolean)}; allows you
* A more advanced version of {@link #enableLiveView(boolean)}; allows you
* to specify a specific view ID to use as a container, rather than just using the default one
* @param cameraMonitorViewId view ID of container for live view
* @return the {@link Builder} object, to allow for method chaining
*/
public Builder setCameraMonitorViewId(int cameraMonitorViewId)
public Builder setLiveViewContainerId(int cameraMonitorViewId)
{
this.cameraMonitorViewId = cameraMonitorViewId;
return this;
Expand Down
Loading

0 comments on commit cf698e2

Please sign in to comment.