Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Undistort pitch/yaw using opencv #1250

Merged
merged 7 commits into from
May 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,18 @@
*/
package org.photonvision.vision.target;

import org.opencv.calib3d.Calib3d;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Point;
import org.opencv.core.RotatedRect;
import org.opencv.core.TermCriteria;
import org.photonvision.common.util.math.MathUtils;
import org.photonvision.common.util.numbers.DoubleCouple;
import org.photonvision.vision.calibration.CameraCalibrationCoefficients;
import org.photonvision.vision.opencv.DualOffsetValues;

public class TargetCalculations {

/**
* Calculates the yaw and pitch of a point in the image. Yaw and pitch must be calculated together
* to account for perspective distortion. Yaw is positive right, and pitch is positive up.
Expand All @@ -33,6 +38,7 @@ public class TargetCalculations {
* @param offsetCenterY The Y value of the offset principal point (cy) in pixels
* @param targetCenterY The Y value of the target's center point in pixels
* @param verticalFocalLength The vertical focal length (fy) in pixels
* @param cameraCal Camera calibration parameters, or null if not calibrated
* @return The yaw and pitch from the principal axis to the target center, in degrees.
*/
public static DoubleCouple calculateYawPitch(
Expand All @@ -41,7 +47,34 @@ public static DoubleCouple calculateYawPitch(
double horizontalFocalLength,
double offsetCenterY,
double targetCenterY,
double verticalFocalLength) {
double verticalFocalLength,
CameraCalibrationCoefficients cameraCal) {

if (cameraCal != null) {
// undistort
MatOfPoint2f temp = new MatOfPoint2f();
temp.fromArray(new Point(targetCenterX, targetCenterY));
// Tighten up termination criteria
var termCriteria = new TermCriteria(TermCriteria.COUNT + TermCriteria.EPS, 30, 1e-6);
Calib3d.undistortImagePoints(
temp,
temp,
cameraCal.getCameraIntrinsicsMat(),
cameraCal.getDistCoeffsMat(),
termCriteria);
float buff[] = new float[2];
temp.get(0, 0, buff);
temp.release();

// if outside of the imager, convergence fails, or really really bad user camera cal,
// undistort will fail by giving us nans. at some point we should log this failure
// if we can't undistort, don't change the cnter location
if (Float.isFinite(buff[0]) && Float.isFinite(buff[1])) {
targetCenterX = buff[0];
targetCenterY = buff[1];
}
}

double yaw = Math.atan((targetCenterX - offsetCenterX) / horizontalFocalLength);
double pitch =
Math.atan((offsetCenterY - targetCenterY) / (verticalFocalLength / Math.cos(yaw)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.photonvision.targeting.PhotonTrackedTarget;
import org.photonvision.targeting.TargetCorner;
import org.photonvision.vision.aruco.ArucoDetectionResult;
import org.photonvision.vision.calibration.CameraCalibrationCoefficients;
import org.photonvision.vision.frame.FrameStaticProperties;
import org.photonvision.vision.opencv.CVShape;
import org.photonvision.vision.opencv.Contour;
Expand Down Expand Up @@ -92,7 +93,8 @@ public TrackedTarget(
params.horizontalFocalLength,
params.cameraCenterPoint.y,
tagDetection.getCenterY(),
params.verticalFocalLength);
params.verticalFocalLength,
params.cameraCal);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();
var bestPose = new Transform3d();
Expand Down Expand Up @@ -187,7 +189,8 @@ public TrackedTarget(
params.horizontalFocalLength,
params.cameraCenterPoint.y,
result.getCenterY(),
params.verticalFocalLength);
params.verticalFocalLength,
params.cameraCal);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();

Expand Down Expand Up @@ -323,7 +326,8 @@ public void calculateValues(TargetCalculationParameters params) {
params.horizontalFocalLength,
m_robotOffsetPoint.y,
m_targetOffsetPoint.y,
params.verticalFocalLength);
params.verticalFocalLength,
params.cameraCal);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();

Expand Down Expand Up @@ -480,6 +484,9 @@ public static class TargetCalculationParameters {
// area calculation values
final double imageArea;

// Camera calibration, null if not calibrated
final CameraCalibrationCoefficients cameraCal;

public TargetCalculationParameters(
boolean isLandscape,
TargetOffsetPointEdge targetOffsetPointEdge,
Expand All @@ -489,7 +496,8 @@ public TargetCalculationParameters(
Point cameraCenterPoint,
double horizontalFocalLength,
double verticalFocalLength,
double imageArea) {
double imageArea,
CameraCalibrationCoefficients cal) {

this.isLandscape = isLandscape;
this.targetOffsetPointEdge = targetOffsetPointEdge;
Expand All @@ -500,6 +508,7 @@ public TargetCalculationParameters(
this.horizontalFocalLength = horizontalFocalLength;
this.verticalFocalLength = verticalFocalLength;
this.imageArea = imageArea;
this.cameraCal = cal;
}

public TargetCalculationParameters(
Expand All @@ -520,6 +529,7 @@ public TargetCalculationParameters(
this.horizontalFocalLength = frameStaticProperties.horizontalFocalLength;
this.verticalFocalLength = frameStaticProperties.verticalFocalLength;
this.imageArea = frameStaticProperties.imageArea;
this.cameraCal = frameStaticProperties.cameraCalibration;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import edu.wpi.first.math.geometry.Rotation3d;
import edu.wpi.first.math.geometry.Translation3d;
import java.util.List;
import java.util.stream.Stream;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
Expand All @@ -33,12 +34,15 @@
import org.opencv.imgproc.Imgproc;
import org.photonvision.common.util.TestUtils;
import org.photonvision.common.util.numbers.DoubleCouple;
import org.photonvision.vision.calibration.CameraCalibrationCoefficients;
import org.photonvision.vision.calibration.CameraLensModel;
import org.photonvision.vision.calibration.JsonMatOfDouble;
import org.photonvision.vision.frame.FrameStaticProperties;
import org.photonvision.vision.opencv.DualOffsetValues;

public class TargetCalculationsTest {

private static Size imageSize = new Size(800, 600);
private static Size imageSize = new Size(1280, 720);
private static Point imageCenterPoint =
new Point(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5);
private static final double diagFOV = Math.toRadians(70.0);
Expand All @@ -55,7 +59,8 @@ public class TargetCalculationsTest {
imageCenterPoint,
props.horizontalFocalLength,
props.verticalFocalLength,
imageSize.width * imageSize.height);
imageSize.width * imageSize.height,
null);

@BeforeAll
public static void setup() {
Expand All @@ -76,7 +81,8 @@ public void testYawPitchBehavior() {
params.horizontalFocalLength,
imageCenterPoint.y,
targetCenterPoint.y,
params.verticalFocalLength);
params.verticalFocalLength,
params.cameraCal);

assertTrue(targetYawPitch.getFirst() > 0, "Yaw is not positive right");
assertTrue(targetYawPitch.getSecond() < 0, "Pitch is not positive up");
Expand All @@ -91,7 +97,8 @@ public void testYawPitchBehavior() {
params.horizontalFocalLength,
imageCenterPoint.y,
imageCenterPoint.y,
params.verticalFocalLength);
params.verticalFocalLength,
params.cameraCal);
assertEquals(fovs.getFirst() / 2.0, maxYaw.getFirst(), 0.025, "Horizontal FOV check failed");
var maxPitch =
TargetCalculations.calculateYawPitch(
Expand All @@ -100,7 +107,8 @@ public void testYawPitchBehavior() {
params.horizontalFocalLength,
imageCenterPoint.y,
0,
params.verticalFocalLength);
params.verticalFocalLength,
params.cameraCal);
assertEquals(fovs.getSecond() / 2.0, maxPitch.getSecond(), 0.025, "Vertical FOV check failed");
}

Expand All @@ -112,17 +120,40 @@ private static Stream<Arguments> testYawPitchCalcArgs() {
Arguments.of(0, 10),
Arguments.of(10, 10),
Arguments.of(-10, -10),
Arguments.of(30, 45),
Arguments.of(-45, -20));
Arguments.of(-18, 14),
Arguments.of(-23, -16));
}

private static double[] testCameraMatrix = {240, 0, 320, 0, 240, 320, 0, 0, 1};

@ParameterizedTest
@MethodSource("testYawPitchCalcArgs")
public void testYawPitchCalc(double yawDeg, double pitchDeg) {
Mat testCameraMat = new Mat(3, 3, CvType.CV_64F);
testCameraMat.put(0, 0, testCameraMatrix);
// FOV: ~58.5 deg horizontal, ~35 deg vertical
JsonMatOfDouble testCameraMatrix =
new JsonMatOfDouble(
3, 3, new double[] {1142.341323, 0, 621.384201309, 0, 1139.92214, 349.897631, 0, 0, 1});
JsonMatOfDouble testDistortion =
new JsonMatOfDouble(
5,
1,
new double[] {
0.186841202993646,
-1.482894102216622,
0.005692954661309707,
0.0006757267756945662,
2.8659664873321287
});
double IMAGER_WIDTH = 1280, IMAGER_HEIGHT = 720;
var testCameraCal =
new CameraCalibrationCoefficients(
imageSize,
testCameraMatrix,
testDistortion,
new double[0],
List.of(),
new Size(),
0,
CameraLensModel.LENSMODEL_OPENCV);

// Since we create this translation using the given yaw/pitch, we should see the same angles
// calculated
var targetTrl =
Expand All @@ -136,21 +167,33 @@ public void testYawPitchCalc(double yawDeg, double pitchDeg) {
objectPoints,
new MatOfDouble(0, 0, 0),
new MatOfDouble(0, 0, 0),
testCameraMat,
new MatOfDouble(0, 0, 0, 0, 0),
testCameraCal.getCameraIntrinsicsMat(),
testCameraCal.getDistCoeffsMat(),
imagePoints);
var point = imagePoints.toArray()[0];

// need point within FOV to be valid
assertTrue(Math.abs(point.x) >= 0);
assertTrue(Math.abs(point.x) <= IMAGER_WIDTH);
assertTrue(Math.abs(point.y) >= 0);
assertTrue(Math.abs(point.y) <= IMAGER_HEIGHT);

// Test if the target yaw/pitch calculation matches what the target was created with
var yawPitch =
TargetCalculations.calculateYawPitch(
testCameraCal.cameraIntrinsics.data[2],
point.x,
testCameraMatrix[2],
testCameraMatrix[0],
testCameraCal.cameraIntrinsics.data[0],
testCameraCal.cameraIntrinsics.data[5],
point.y,
testCameraMatrix[5],
testCameraMatrix[4]);
assertEquals(yawDeg, yawPitch.getFirst(), 1e-3, "Yaw calculation incorrect");
assertEquals(pitchDeg, yawPitch.getSecond(), 1e-3, "Pitch calculation incorrect");
testCameraCal.cameraIntrinsics.data[4],
testCameraCal);
// convert photon angles to wpilib NWU angles
assertEquals(yawDeg, -yawPitch.getFirst(), 1e-3, "Yaw calculation incorrect");
assertEquals(pitchDeg, -yawPitch.getSecond(), 1e-3, "Pitch calculation incorrect");

testCameraCal.release();
testDistortion.release();
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ void axisTest() {
new Point(imageSize.width / 2, imageSize.height / 2),
61,
34.3,
imageSize.area());
imageSize.area(),
null);

var trackedTarget = new TrackedTarget(pTarget, setting, null);
// TODO change these hardcoded values
Expand Down
Loading