m_epochs = new LinkedHashMap<>(); // microseconds
+
+ /** Tracer constructor. */
+ public OrderedTracer() {
+ resetTimer();
+ }
+
+ /** Clears all epochs. */
+ public void clearEpochs() {
+ m_epochs.clear();
+ resetTimer();
+ }
+
+ /** Restarts the epoch timer. */
+ public final void resetTimer() {
+ m_startTime = RobotController.getFPGATime();
+ }
+
+ /**
+ * Adds time since last epoch to the list printed by printEpochs().
+ *
+ * Epochs are a way to partition the time elapsed so that when overruns occur, one can
+ * determine which parts of an operation consumed the most time.
+ *
+ *
This should be called immediately after execution has finished, with a call to this method
+ * or {@link #resetTimer()} before execution.
+ *
+ * @param epochName The name to associate with the epoch.
+ */
+ public void addEpoch(String epochName) {
+ long currentTime = RobotController.getFPGATime();
+ m_epochs.put(epochName, currentTime - m_startTime);
+ m_startTime = currentTime;
+ }
+
+ /** Prints list of epochs added so far and their times to the DriverStation. */
+ public void printEpochs() {
+ printEpochs(out -> DriverStation.reportWarning(out, false));
+ }
+
+ /**
+ * Prints list of epochs added so far and their times to the entered String consumer.
+ *
+ *
This overload can be useful for logging to a file, etc.
+ *
+ * @param output the stream that the output is sent to
+ */
+ public void printEpochs(Consumer output) {
+ long now = RobotController.getFPGATime();
+ if (now - m_lastEpochsPrintTime > kMinPrintPeriod) {
+ StringBuilder sb = new StringBuilder();
+ m_lastEpochsPrintTime = now;
+ m_epochs.forEach(
+ (key, value) -> sb.append(String.format("\t%s: %.6fs\n", key, value / 1.0e6)));
+ if (sb.length() > 0) {
+ output.accept(sb.toString());
+ }
+ }
+ }
+
+ public long[] getEpochTimes() {
+ var ret = m_epochs.values();
+ var arr = new long[ret.size()];
+ int i = 0;
+ for (var e : ret) {
+ arr[i] = e;
+ i++;
+ }
+ return arr;
+ }
+
+ public String[] getEpochNames() {
+ return m_epochs.keySet().toArray(new String[0]);
+ }
+}
diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java b/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java
index 8caeb00822..df6c00d44c 100644
--- a/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java
+++ b/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java
@@ -17,6 +17,7 @@
package org.photonvision.vision.frame;
+import org.photonvision.common.util.OrderedTracer;
import org.photonvision.common.util.math.MathUtils;
import org.photonvision.vision.opencv.CVMat;
import org.photonvision.vision.opencv.Releasable;
@@ -32,19 +33,25 @@ public class Frame implements Releasable {
public final FrameStaticProperties frameStaticProperties;
+ // A Frame owns the tracer from frame capture from the underlying device until publish/GC. This
+ // does mean we get extra GC pressure, but w/e
+ public final OrderedTracer m_tracer;
+
public Frame(
long sequenceID,
CVMat color,
CVMat processed,
FrameThresholdType type,
long timestampNanos,
- FrameStaticProperties frameStaticProperties) {
+ FrameStaticProperties frameStaticProperties,
+ OrderedTracer tracer) {
this.sequenceID = sequenceID;
this.colorImage = color;
this.processedImage = processed;
this.type = type;
this.timestampNanos = timestampNanos;
this.frameStaticProperties = frameStaticProperties;
+ this.m_tracer = tracer;
}
public Frame(
@@ -52,8 +59,16 @@ public Frame(
CVMat color,
CVMat processed,
FrameThresholdType processType,
- FrameStaticProperties frameStaticProperties) {
- this(sequenceID, color, processed, processType, MathUtils.wpiNanoTime(), frameStaticProperties);
+ FrameStaticProperties frameStaticProperties,
+ OrderedTracer tracer) {
+ this(
+ sequenceID,
+ color,
+ processed,
+ processType,
+ MathUtils.wpiNanoTime(),
+ frameStaticProperties,
+ tracer);
}
public Frame() {
@@ -63,7 +78,8 @@ public Frame() {
new CVMat(),
FrameThresholdType.NONE,
MathUtils.wpiNanoTime(),
- new FrameStaticProperties(0, 0, 0, null));
+ new FrameStaticProperties(0, 0, 0, null),
+ new OrderedTracer());
}
public void copyTo(Frame destFrame) {
diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java
index 2238db496f..0cc170734b 100644
--- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java
+++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java
@@ -17,6 +17,7 @@
package org.photonvision.vision.frame.provider;
+import org.photonvision.common.util.OrderedTracer;
import org.photonvision.common.util.numbers.IntegerCouple;
import org.photonvision.vision.frame.Frame;
import org.photonvision.vision.frame.FrameProvider;
@@ -31,15 +32,20 @@
public abstract class CpuImageProcessor extends FrameProvider {
protected static class CapturedFrame {
- CVMat colorImage;
- FrameStaticProperties staticProps;
- long captureTimestamp;
+ public final CVMat colorImage;
+ public final FrameStaticProperties staticProps;
+ public final long captureTimestamp;
+ public final OrderedTracer tracer;
public CapturedFrame(
- CVMat colorImage, FrameStaticProperties staticProps, long captureTimestampNanos) {
+ CVMat colorImage,
+ FrameStaticProperties staticProps,
+ long captureTimestampNanos,
+ OrderedTracer tracer) {
this.colorImage = colorImage;
this.staticProps = staticProps;
this.captureTimestamp = captureTimestampNanos;
+ this.tracer = tracer;
}
}
@@ -72,6 +78,7 @@ public final Frame get() {
{
CVPipeResult out = m_rImagePipe.run(input.colorImage.getMat());
sumNanos += out.nanosElapsed;
+ input.tracer.addEpoch("CpuImageProcessor::rotate");
}
if (!input.colorImage.getMat().empty()) {
@@ -79,12 +86,15 @@ public final Frame get() {
var hsvResult = m_hsvPipe.run(input.colorImage.getMat());
outputMat = new CVMat(hsvResult.output);
sumNanos += hsvResult.nanosElapsed;
+ input.tracer.addEpoch("CpuImageProcessor::processHSV");
} else if (m_processType == FrameThresholdType.GREYSCALE) {
var result = m_grayPipe.run(input.colorImage.getMat());
outputMat = new CVMat(result.output);
sumNanos += result.nanosElapsed;
+ input.tracer.addEpoch("CpuImageProcessor::processGray");
} else {
outputMat = new CVMat();
+ input.tracer.addEpoch("CpuImageProcessor::processNone");
}
++sequenceID;
@@ -99,7 +109,8 @@ public final Frame get() {
outputMat,
m_processType,
input.captureTimestamp,
- input.staticProps.rotate(m_rImagePipe.getParams().rotation));
+ input.staticProps.rotate(m_rImagePipe.getParams().rotation),
+ input.tracer);
}
@Override
diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java
index 7cc0846188..3616b3e55c 100644
--- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java
+++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java
@@ -22,6 +22,7 @@
import java.nio.file.Paths;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
+import org.photonvision.common.util.OrderedTracer;
import org.photonvision.common.util.math.MathUtils;
import org.photonvision.vision.calibration.CameraCalibrationCoefficients;
import org.photonvision.vision.frame.FrameProvider;
@@ -98,6 +99,8 @@ public FileFrameProvider(Path path, double fov) {
@Override
public CapturedFrame getInputMat() {
+ var tracer = new OrderedTracer();
+
var out = new CVMat();
out.copyFrom(originalFrame);
@@ -111,7 +114,10 @@ public CapturedFrame getInputMat() {
}
lastGetMillis = System.currentTimeMillis();
- return new CapturedFrame(out, properties, MathUtils.wpiNanoTime());
+
+ tracer.addEpoch("FileFrame::fakeDelay");
+
+ return new CapturedFrame(out, properties, MathUtils.wpiNanoTime(), tracer);
}
@Override
diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java
index 513deaadaf..3bc32fbc20 100644
--- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java
+++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java
@@ -20,6 +20,7 @@
import org.opencv.core.Mat;
import org.photonvision.common.logging.LogGroup;
import org.photonvision.common.logging.Logger;
+import org.photonvision.common.util.OrderedTracer;
import org.photonvision.common.util.math.MathUtils;
import org.photonvision.raspi.LibCameraJNI;
import org.photonvision.vision.camera.LibcameraGpuSettables;
@@ -51,11 +52,15 @@ public String getName() {
@Override
public Frame get() {
+ var tracer = new OrderedTracer();
+
// We need to make sure that other threads don't try to change video modes while
// we're waiting for a frame
// System.out.println("GET!");
synchronized (settables.CAMERA_LOCK) {
+ tracer.addEpoch("Libcam::lockAcquired");
var p_ptr = LibCameraJNI.awaitNewFrame(settables.r_ptr);
+ tracer.addEpoch("Libcam::gotNewFrame");
if (p_ptr == 0) {
logger.error("No new frame from " + settables.getConfiguration().nickname);
@@ -74,6 +79,7 @@ public Frame get() {
var colorMat = new CVMat(new Mat(LibCameraJNI.takeColorFrame(p_ptr)));
var processedMat = new CVMat(new Mat(LibCameraJNI.takeProcessedFrame(p_ptr)));
+ tracer.addEpoch("Libcam::takeFrames");
// System.out.println("Color mat: " + colorMat.getMat().size());
@@ -91,6 +97,7 @@ public Frame get() {
var latency = (now - capture);
LibCameraJNI.releasePair(p_ptr);
+ tracer.addEpoch("Libcam::releaseFrames");
// Know frame is good -- increment sequence
++sequenceID;
@@ -101,7 +108,8 @@ public Frame get() {
processedMat,
type,
MathUtils.wpiNanoTime() - latency,
- settables.getFrameStaticProperties().rotate(settables.getRotation()));
+ settables.getFrameStaticProperties().rotate(settables.getRotation()),
+ tracer);
}
}
diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java
index f28cabdd91..8447c110b6 100644
--- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java
+++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java
@@ -20,6 +20,7 @@
import edu.wpi.first.cscore.CvSink;
import org.photonvision.common.logging.LogGroup;
import org.photonvision.common.logging.Logger;
+import org.photonvision.common.util.OrderedTracer;
import org.photonvision.vision.opencv.CVMat;
import org.photonvision.vision.processes.VisionSourceSettables;
@@ -42,18 +43,22 @@ public USBFrameProvider(CvSink sink, VisionSourceSettables visionSettables) {
@Override
public CapturedFrame getInputMat() {
+ var tracer = new OrderedTracer();
+
// We allocate memory so we don't fill a Mat in use by another thread (memory model is easier)
var mat = new CVMat();
// This is from wpi::Now, or WPIUtilJNI.now(). The epoch from grabFrame is uS since
// Hal::initialize was called
long captureTimeNs = cvSink.grabFrame(mat.getMat()) * 1000;
+ tracer.addEpoch("UsbFrameProvider::grabFrame");
+
if (captureTimeNs == 0) {
var error = cvSink.getError();
logger.error("Error grabbing image: " + error);
}
- return new CapturedFrame(mat, settables.getFrameStaticProperties(), captureTimeNs);
+ return new CapturedFrame(mat, settables.getFrameStaticProperties(), captureTimeNs, tracer);
}
@Override
diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java
index 5cc9cc98f7..518ed5e7e8 100644
--- a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java
+++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java
@@ -127,6 +127,8 @@ protected void setPipeParamsImpl() {
@Override
protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings settings) {
+ frame.m_tracer.addEpoch("AtagPL::process start");
+
long sumPipeNanosElapsed = 0L;
if (frame.type != FrameThresholdType.GREYSCALE) {
@@ -137,6 +139,7 @@ protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings setting
CVPipeResult> tagDetectionPipeResult;
tagDetectionPipeResult = aprilTagDetectionPipe.run(frame.processedImage);
sumPipeNanosElapsed += tagDetectionPipeResult.nanosElapsed;
+ frame.m_tracer.addEpoch("AtagPL::process tagDetect");
List detections = tagDetectionPipeResult.output;
List usedDetections = new ArrayList<>();
@@ -162,6 +165,7 @@ protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings setting
targetList.add(target);
}
+ frame.m_tracer.addEpoch("AtagPL::process filter");
// Do multi-tag pose estimation
Optional multiTagResult = Optional.empty();
@@ -170,6 +174,7 @@ protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings setting
sumPipeNanosElapsed += multiTagOutput.nanosElapsed;
multiTagResult = multiTagOutput.output;
}
+ frame.m_tracer.addEpoch("AtagPL::process multitagPoseEst");
// Do single-tag pose estimation
if (settings.solvePNPEnabled) {
@@ -231,6 +236,7 @@ protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings setting
targetList.add(target);
}
}
+ frame.m_tracer.addEpoch("AtagPL::process collectResults");
var fpsResult = calculateFPSPipe.run(null);
var fps = fpsResult.output;
diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java
index 91dc44a58d..669fd29edd 100644
--- a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java
+++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java
@@ -73,7 +73,12 @@ public R run(Frame frame, QuirkyCamera cameraQuirks) {
if (settings == null) {
throw new RuntimeException("No settings provided for pipeline!");
}
+
+ frame.m_tracer.addEpoch("CvPipeline::run");
+ ;
setPipeParams(frame.frameStaticProperties, settings, cameraQuirks);
+ frame.m_tracer.addEpoch("CvPipeline::setPipeParams");
+ ;
// if (frame.image.getMat().empty()) {
// //noinspection unchecked
diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java
index ec4f1e609c..a1844f99ce 100644
--- a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java
+++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java
@@ -161,7 +161,8 @@ protected CVPipelineResult process(Frame frame, Calibration3dPipelineSettings se
new CVMat(),
outputColorCVMat,
FrameThresholdType.NONE,
- frame.frameStaticProperties),
+ frame.frameStaticProperties,
+ frame.m_tracer),
getCornersList());
}
diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java
index 2da0f23a36..5f278671ab 100644
--- a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java
+++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java
@@ -87,7 +87,8 @@ public DriverModePipelineResult process(Frame frame, DriverModePipelineSettings
frame.processedImage,
frame.colorImage,
frame.type,
- frame.frameStaticProperties));
+ frame.frameStaticProperties,
+ frame.m_tracer));
}
@Override
diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java
index 36b2e0eea8..c77109ca27 100644
--- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java
+++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java
@@ -41,6 +41,7 @@
import org.photonvision.common.hardware.HardwareManager;
import org.photonvision.common.logging.LogGroup;
import org.photonvision.common.logging.Logger;
+import org.photonvision.common.util.EpochPublisher;
import org.photonvision.common.util.SerializationUtils;
import org.photonvision.vision.calibration.CameraCalibrationCoefficients;
import org.photonvision.vision.camera.CameraQuirk;
@@ -81,6 +82,7 @@ public class VisionModule {
private final StatusLEDConsumer statusLEDsConsumer;
protected final int moduleIndex;
protected final QuirkyCamera cameraQuirks;
+ private final EpochPublisher epochPublisher;
protected TrackedTarget lastPipelineResultBestTarget;
@@ -147,6 +149,7 @@ public VisionModule(PipelineManager pipelineManager, VisionSource visionSource,
this::setPipeline,
pipelineManager::getDriverMode,
this::setDriverMode);
+ epochPublisher = new EpochPublisher(visionSource.getSettables().getConfiguration().nickname);
uiDataConsumer = new UIDataPublisher(index);
statusLEDsConsumer = new StatusLEDConsumer(index);
addResultConsumer(ntConsumer);
@@ -583,7 +586,11 @@ public void addResultConsumer(CVPipelineResultConsumer dataConsumer) {
}
private void consumeResult(CVPipelineResult result) {
+ var tracer = result.inputAndOutputFrame.m_tracer;
+
+ tracer.addEpoch("VisionModule::consume start");
consumePipelineResult(result);
+ tracer.addEpoch("VisionModule::consume pipelineResult");
// Pipelines like DriverMode and Calibrate3dPipeline have null output frames
if (result.inputAndOutputFrame != null
@@ -597,6 +604,11 @@ private void consumeResult(CVPipelineResult result) {
result.release();
// In this case we don't bother with a separate streaming thread and we release
}
+ tracer.addEpoch("VisionModule::consume streams");
+
+ // tracer.printEpochs();
+ epochPublisher.consume(tracer);
+ ;
}
private void consumePipelineResult(CVPipelineResult result) {