001/*
002 * MIT License
003 *
004 * Copyright (c) PhotonVision
005 *
006 * Permission is hereby granted, free of charge, to any person obtaining a copy
007 * of this software and associated documentation files (the "Software"), to deal
008 * in the Software without restriction, including without limitation the rights
009 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
010 * copies of the Software, and to permit persons to whom the Software is
011 * furnished to do so, subject to the following conditions:
012 *
013 * The above copyright notice and this permission notice shall be included in all
014 * copies or substantial portions of the Software.
015 *
016 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
017 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
018 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
019 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
020 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
021 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
022 * SOFTWARE.
023 */
024
025package org.photonvision.simulation;
026
027import com.fasterxml.jackson.databind.ObjectMapper;
028import edu.wpi.first.math.MatBuilder;
029import edu.wpi.first.math.MathUtil;
030import edu.wpi.first.math.Matrix;
031import edu.wpi.first.math.Nat;
032import edu.wpi.first.math.Pair;
033import edu.wpi.first.math.VecBuilder;
034import edu.wpi.first.math.Vector;
035import edu.wpi.first.math.geometry.Pose3d;
036import edu.wpi.first.math.geometry.Rotation2d;
037import edu.wpi.first.math.geometry.Rotation3d;
038import edu.wpi.first.math.geometry.Translation3d;
039import edu.wpi.first.math.numbers.*;
040import edu.wpi.first.wpilibj.DriverStation;
041import java.io.IOException;
042import java.nio.file.Path;
043import java.util.ArrayList;
044import java.util.Arrays;
045import java.util.List;
046import java.util.Random;
047import org.ejml.data.DMatrix3;
048import org.ejml.dense.fixed.CommonOps_DDF3;
049import org.opencv.core.MatOfPoint2f;
050import org.opencv.core.Point;
051import org.opencv.imgproc.Imgproc;
052import org.photonvision.estimation.OpenCVHelp;
053import org.photonvision.estimation.RotTrlTransform3d;
054
055/**
056 * Calibration and performance values for this camera.
057 *
058 * <p>The resolution will affect the accuracy of projected(3d to 2d) target corners and similarly
059 * the severity of image noise on estimation(2d to 3d).
060 *
061 * <p>The camera intrinsics and distortion coefficients describe the results of calibration, and how
062 * to map between 3d field points and 2d image points.
063 *
064 * <p>The performance values (framerate/exposure time, latency) determine how often results should
065 * be updated and with how much latency in simulation. High exposure time causes motion blur which
066 * can inhibit target detection while moving. Note that latency estimation does not account for
067 * network latency and the latency reported will always be perfect.
068 */
069public class SimCameraProperties {
070    private final Random rand = new Random();
071    // calibration
072    private int resWidth;
073    private int resHeight;
074    private Matrix<N3, N3> camIntrinsics;
075    private Matrix<N8, N1> distCoeffs;
076    private double avgErrorPx;
077    private double errorStdDevPx;
078    // performance
079    private double frameSpeedMs = 0;
080    private double exposureTimeMs = 0;
081    private double avgLatencyMs = 0;
082    private double latencyStdDevMs = 0;
083    // util
084    private final List<DMatrix3> viewplanes = new ArrayList<>();
085
086    /** Default constructor which is the same as {@link #PERFECT_90DEG} */
087    public SimCameraProperties() {
088        setCalibration(960, 720, Rotation2d.fromDegrees(90));
089    }
090
091    /**
092     * Reads camera properties from a PhotonVision <code>config.json</code> file. This is only the
093     * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error.
094     * Other camera properties must be set.
095     *
096     * @param path Path to the <code>config.json</code> file
097     * @param width The width of the desired resolution in the JSON
098     * @param height The height of the desired resolution in the JSON
099     * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid
100     *     calibrated resolution.
101     */
102    public SimCameraProperties(String path, int width, int height) throws IOException {
103        this(Path.of(path), width, height);
104    }
105
106    /**
107     * Reads camera properties from a PhotonVision <code>config.json</code> file. This is only the
108     * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error.
109     * Other camera properties must be set.
110     *
111     * @param path Path to the <code>config.json</code> file
112     * @param width The width of the desired resolution in the JSON
113     * @param height The height of the desired resolution in the JSON
114     * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid
115     *     calibrated resolution.
116     */
117    public SimCameraProperties(Path path, int width, int height) throws IOException {
118        var mapper = new ObjectMapper();
119        var json = mapper.readTree(path.toFile());
120        json = json.get("calibrations");
121        boolean success = false;
122        try {
123            for (int i = 0; i < json.size() && !success; i++) {
124                // check if this calibration entry is our desired resolution
125                var calib = json.get(i);
126                int jsonWidth = calib.get("resolution").get("width").asInt();
127                int jsonHeight = calib.get("resolution").get("height").asInt();
128                if (jsonWidth != width || jsonHeight != height) continue;
129                // get the relevant calibration values
130                var jsonIntrinsicsNode = calib.get("cameraIntrinsics").get("data");
131                double[] jsonIntrinsics = new double[jsonIntrinsicsNode.size()];
132                for (int j = 0; j < jsonIntrinsicsNode.size(); j++) {
133                    jsonIntrinsics[j] = jsonIntrinsicsNode.get(j).asDouble();
134                }
135                var jsonDistortNode = calib.get("distCoeffs").get("data");
136                double[] jsonDistortion = new double[8];
137                Arrays.fill(jsonDistortion, 0);
138                for (int j = 0; j < jsonDistortNode.size(); j++) {
139                    jsonDistortion[j] = jsonDistortNode.get(j).asDouble();
140                }
141                var jsonViewErrors = calib.get("perViewErrors");
142                double jsonAvgError = 0;
143                for (int j = 0; j < jsonViewErrors.size(); j++) {
144                    jsonAvgError += jsonViewErrors.get(j).asDouble();
145                }
146                jsonAvgError /= jsonViewErrors.size();
147                double jsonErrorStdDev = calib.get("standardDeviation").asDouble();
148                // assign the read JSON values to this CameraProperties
149                setCalibration(
150                        jsonWidth,
151                        jsonHeight,
152                        MatBuilder.fill(Nat.N3(), Nat.N3(), jsonIntrinsics),
153                        MatBuilder.fill(Nat.N8(), Nat.N1(), jsonDistortion));
154                setCalibError(jsonAvgError, jsonErrorStdDev);
155                success = true;
156            }
157        } catch (Exception e) {
158            throw new IOException("Invalid calibration JSON");
159        }
160        if (!success) throw new IOException("Requested resolution not found in calibration");
161    }
162
163    public SimCameraProperties setRandomSeed(long seed) {
164        rand.setSeed(seed);
165        return this;
166    }
167
168    public SimCameraProperties setCalibration(int resWidth, int resHeight, Rotation2d fovDiag) {
169        if (fovDiag.getDegrees() < 1 || fovDiag.getDegrees() > 179) {
170            fovDiag = Rotation2d.fromDegrees(MathUtil.clamp(fovDiag.getDegrees(), 1, 179));
171            DriverStation.reportError(
172                    "Requested invalid FOV! Clamping between (1, 179) degrees...", false);
173        }
174        double resDiag = Math.hypot(resWidth, resHeight);
175        double diagRatio = Math.tan(fovDiag.getRadians() / 2);
176        var fovWidth = new Rotation2d(Math.atan(diagRatio * (resWidth / resDiag)) * 2);
177        var fovHeight = new Rotation2d(Math.atan(diagRatio * (resHeight / resDiag)) * 2);
178
179        // assume no distortion
180        var distCoeff = VecBuilder.fill(0, 0, 0, 0, 0, 0, 0, 0);
181
182        // assume centered principal point (pixels)
183        double cx = resWidth / 2.0 - 0.5;
184        double cy = resHeight / 2.0 - 0.5;
185
186        // use given fov to determine focal point (pixels)
187        double fx = cx / Math.tan(fovWidth.getRadians() / 2.0);
188        double fy = cy / Math.tan(fovHeight.getRadians() / 2.0);
189
190        // create camera intrinsics matrix
191        var camIntrinsics = MatBuilder.fill(Nat.N3(), Nat.N3(), fx, 0, cx, 0, fy, cy, 0, 0, 1);
192        setCalibration(resWidth, resHeight, camIntrinsics, distCoeff);
193
194        return this;
195    }
196
197    public SimCameraProperties setCalibration(
198            int resWidth, int resHeight, Matrix<N3, N3> camIntrinsics, Matrix<N8, N1> distCoeffs) {
199        this.resWidth = resWidth;
200        this.resHeight = resHeight;
201        this.camIntrinsics = camIntrinsics;
202        this.distCoeffs = distCoeffs;
203
204        // left, right, up, and down view planes
205        Translation3d[] p = {
206            new Translation3d(
207                    1, new Rotation3d(0, 0, getPixelYaw(0).plus(new Rotation2d(-Math.PI / 2)).getRadians())),
208            new Translation3d(
209                    1,
210                    new Rotation3d(
211                            0, 0, getPixelYaw(resWidth).plus(new Rotation2d(Math.PI / 2)).getRadians())),
212            new Translation3d(
213                    1, new Rotation3d(0, getPixelPitch(0).plus(new Rotation2d(Math.PI / 2)).getRadians(), 0)),
214            new Translation3d(
215                    1,
216                    new Rotation3d(
217                            0, getPixelPitch(resHeight).plus(new Rotation2d(-Math.PI / 2)).getRadians(), 0))
218        };
219        viewplanes.clear();
220        for (Translation3d translation3d : p) {
221            viewplanes.add(
222                    new DMatrix3(translation3d.getX(), translation3d.getY(), translation3d.getZ()));
223        }
224
225        return this;
226    }
227
228    public SimCameraProperties setCalibError(double avgErrorPx, double errorStdDevPx) {
229        this.avgErrorPx = avgErrorPx;
230        this.errorStdDevPx = errorStdDevPx;
231        return this;
232    }
233
234    /**
235     * Sets the simulated FPS for this camera.
236     *
237     * @param fps The average frames per second the camera should process at. <b>Exposure time limits
238     *     FPS if set!</b>
239     * @return This camera properties object for use in chaining
240     */
241    public SimCameraProperties setFPS(double fps) {
242        frameSpeedMs = Math.max(1000.0 / fps, exposureTimeMs);
243
244        return this;
245    }
246
247    /**
248     * Sets the simulated exposure time for this camera.
249     *
250     * @param exposureTimeMs The amount of time the "shutter" is open for one frame. Affects motion
251     *     blur. <b>Frame speed(from FPS) is limited to this!</b>
252     * @return This camera properties object for use in chaining
253     */
254    public SimCameraProperties setExposureTimeMs(double exposureTimeMs) {
255        this.exposureTimeMs = exposureTimeMs;
256        frameSpeedMs = Math.max(frameSpeedMs, exposureTimeMs);
257
258        return this;
259    }
260
261    /**
262     * Sets the simulated latency for this camera.
263     *
264     * @param avgLatencyMs The average latency (from image capture to data published) in milliseconds
265     *     a frame should have
266     * @return This camera properties object for use in chaining
267     */
268    public SimCameraProperties setAvgLatencyMs(double avgLatencyMs) {
269        this.avgLatencyMs = avgLatencyMs;
270
271        return this;
272    }
273
274    /**
275     * Sets the simulated latency variation for this camera.
276     *
277     * @param latencyStdDevMs The standard deviation in milliseconds of the latency
278     * @return This camera properties object for use in chaining
279     */
280    public SimCameraProperties setLatencyStdDevMs(double latencyStdDevMs) {
281        this.latencyStdDevMs = latencyStdDevMs;
282
283        return this;
284    }
285
286    /**
287     * Gets the width of the simulated camera image.
288     *
289     * @return The width in pixels
290     */
291    public int getResWidth() {
292        return resWidth;
293    }
294
295    /**
296     * Gets the height of the simulated camera image.
297     *
298     * @return The height in pixels
299     */
300    public int getResHeight() {
301        return resHeight;
302    }
303
304    /**
305     * Gets the area of the simulated camera image.
306     *
307     * @return The area in pixels
308     */
309    public int getResArea() {
310        return resWidth * resHeight;
311    }
312
313    /** Width:height */
314    public double getAspectRatio() {
315        return (double) resWidth / resHeight;
316    }
317
318    public Matrix<N3, N3> getIntrinsics() {
319        return camIntrinsics.copy();
320    }
321
322    /**
323     * Returns the camera calibration's distortion coefficients, in OPENCV8 form. Higher-order terms
324     * are set to 0
325     *
326     * @return The distortion coefficients in an 8d vector
327     */
328    public Vector<N8> getDistCoeffs() {
329        return new Vector<>(distCoeffs);
330    }
331
332    /**
333     * Gets the FPS of the simulated camera.
334     *
335     * @return The FPS
336     */
337    public double getFPS() {
338        return 1000.0 / frameSpeedMs;
339    }
340
341    /**
342     * Gets the time per frame of the simulated camera.
343     *
344     * @return The time per frame in milliseconds
345     */
346    public double getFrameSpeedMs() {
347        return frameSpeedMs;
348    }
349
350    /**
351     * Gets the exposure time of the simulated camera.
352     *
353     * @return The exposure time in milliseconds
354     */
355    public double getExposureTimeMs() {
356        return exposureTimeMs;
357    }
358
359    /**
360     * Gets the average latency of the simulated camera.
361     *
362     * @return The average latency in milliseconds
363     */
364    public double getAvgLatencyMs() {
365        return avgLatencyMs;
366    }
367
368    /**
369     * Gets the time per frame of the simulated camera.
370     *
371     * @return The time per frame in milliseconds
372     */
373    public double getLatencyStdDevMs() {
374        return latencyStdDevMs;
375    }
376
377    /**
378     * Returns a copy of the camera properties.
379     *
380     * @return The copied camera properties
381     */
382    public SimCameraProperties copy() {
383        var newProp = new SimCameraProperties();
384        newProp.setCalibration(resWidth, resHeight, camIntrinsics, distCoeffs);
385        newProp.setCalibError(avgErrorPx, errorStdDevPx);
386        newProp.setFPS(getFPS());
387        newProp.setExposureTimeMs(exposureTimeMs);
388        newProp.setAvgLatencyMs(avgLatencyMs);
389        newProp.setLatencyStdDevMs(latencyStdDevMs);
390        return newProp;
391    }
392
393    /**
394     * The percentage (0 - 100) of this camera's resolution the contour takes up in pixels of the
395     * image.
396     *
397     * @param points Points of the contour
398     * @return The percentage
399     */
400    public double getContourAreaPercent(Point[] points) {
401        return Imgproc.contourArea(new MatOfPoint2f(OpenCVHelp.getConvexHull(points)))
402                / getResArea()
403                * 100;
404    }
405
406    /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */
407    public Rotation2d getPixelYaw(double pixelX) {
408        double fx = camIntrinsics.get(0, 0);
409        // account for principal point not being centered
410        double cx = camIntrinsics.get(0, 2);
411        double xOffset = cx - pixelX;
412        return new Rotation2d(fx, xOffset);
413    }
414
415    /**
416     * The pitch from the principal point of this camera to the pixel y value. Pitch is positive down.
417     *
418     * <p>Note that this angle is naively computed and may be incorrect. See {@link
419     * #getCorrectedPixelRot(Point)}.
420     */
421    public Rotation2d getPixelPitch(double pixelY) {
422        double fy = camIntrinsics.get(1, 1);
423        // account for principal point not being centered
424        double cy = camIntrinsics.get(1, 2);
425        double yOffset = cy - pixelY;
426        return new Rotation2d(fy, -yOffset);
427    }
428
429    /**
430     * Finds the yaw and pitch to the given image point. Yaw is positive left, and pitch is positive
431     * down.
432     *
433     * <p>Note that pitch is naively computed and may be incorrect. See {@link
434     * #getCorrectedPixelRot(Point)}.
435     */
436    public Rotation3d getPixelRot(Point point) {
437        return new Rotation3d(
438                0, getPixelPitch(point.y).getRadians(), getPixelYaw(point.x).getRadians());
439    }
440
441    /**
442     * Gives the yaw and pitch of the line intersecting the camera lens and the given pixel
443     * coordinates on the sensor. Yaw is positive left, and pitch positive down.
444     *
445     * <p>The pitch traditionally calculated from pixel offsets do not correctly account for non-zero
446     * values of yaw because of perspective distortion (not to be confused with lens distortion)-- for
447     * example, the pitch angle is naively calculated as:
448     *
449     * <pre>pitch = arctan(pixel y offset / focal length y)</pre>
450     *
451     * However, using focal length as a side of the associated right triangle is not correct when the
452     * pixel x value is not 0, because the distance from this pixel (projected on the x-axis) to the
453     * camera lens increases. Projecting a line back out of the camera with these naive angles will
454     * not intersect the 3d point that was originally projected into this 2d pixel. Instead, this
455     * length should be:
456     *
457     * <pre>focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))</pre>
458     *
459     * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given
460     *     pixel (roll is zero).
461     */
462    public Rotation3d getCorrectedPixelRot(Point point) {
463        double fx = camIntrinsics.get(0, 0);
464        double cx = camIntrinsics.get(0, 2);
465        double xOffset = cx - point.x;
466
467        double fy = camIntrinsics.get(1, 1);
468        double cy = camIntrinsics.get(1, 2);
469        double yOffset = cy - point.y;
470
471        // calculate yaw normally
472        var yaw = new Rotation2d(fx, xOffset);
473        // correct pitch based on yaw
474        var pitch = new Rotation2d(fy / Math.cos(Math.atan(xOffset / fx)), -yOffset);
475
476        return new Rotation3d(0, pitch.getRadians(), yaw.getRadians());
477    }
478
479    public Rotation2d getHorizFOV() {
480        // sum of FOV left and right principal point
481        var left = getPixelYaw(0);
482        var right = getPixelYaw(resWidth);
483        return left.minus(right);
484    }
485
486    public Rotation2d getVertFOV() {
487        // sum of FOV above and below principal point
488        var above = getPixelPitch(0);
489        var below = getPixelPitch(resHeight);
490        return below.minus(above);
491    }
492
493    public Rotation2d getDiagFOV() {
494        return new Rotation2d(Math.hypot(getHorizFOV().getRadians(), getVertFOV().getRadians()));
495    }
496
497    /**
498     * Determines where the line segment defined by the two given translations intersects the camera's
499     * frustum/field-of-vision, if at all.
500     *
501     * <p>The line is parametrized so any of its points <code>p = t * (b - a) + a</code>. This method
502     * returns these values of t, minimum first, defining the region of the line segment which is
503     * visible in the frustum. If both ends of the line segment are visible, this simply returns {0,
504     * 1}. If, for example, point b is visible while a is not, and half of the line segment is inside
505     * the camera frustum, {0.5, 1} would be returned.
506     *
507     * @param camRt The change in basis from world coordinates to camera coordinates. See {@link
508     *     RotTrlTransform3d#makeRelativeTo(Pose3d)}.
509     * @param a The initial translation of the line
510     * @param b The final translation of the line
511     * @return A Pair of Doubles. The values may be null:
512     *     <ul>
513     *       <li>{Double, Double} : Two parametrized values(t), minimum first, representing which
514     *           segment of the line is visible in the camera frustum.
515     *       <li>{Double, null} : One value(t) representing a single intersection point. For example,
516     *           the line only intersects the intersection of two adjacent viewplanes.
517     *       <li>{null, null} : No values. The line segment is not visible in the camera frustum.
518     *     </ul>
519     */
520    public Pair<Double, Double> getVisibleLine(
521            RotTrlTransform3d camRt, Translation3d a, Translation3d b) {
522        // translations relative to the camera
523        var rela = camRt.apply(a);
524        var relb = camRt.apply(b);
525
526        // check if both ends are behind camera
527        if (rela.getX() <= 0 && relb.getX() <= 0) return new Pair<>(null, null);
528
529        var av = new DMatrix3(rela.getX(), rela.getY(), rela.getZ());
530        var bv = new DMatrix3(relb.getX(), relb.getY(), relb.getZ());
531        // a to b
532        var abv = new DMatrix3();
533        CommonOps_DDF3.subtract(bv, av, abv);
534
535        // check if the ends of the line segment are visible
536        boolean aVisible = true;
537        boolean bVisible = true;
538        for (DMatrix3 normal : viewplanes) {
539            double aVisibility = CommonOps_DDF3.dot(av, normal);
540            if (aVisibility < 0) aVisible = false;
541            double bVisibility = CommonOps_DDF3.dot(bv, normal);
542            if (bVisibility < 0) bVisible = false;
543            // both ends are outside at least one of the same viewplane
544            if (aVisibility <= 0 && bVisibility <= 0) return new Pair<>(null, null);
545        }
546        // both ends are inside frustum
547        if (aVisible && bVisible) return new Pair<>((double) 0, 1.0);
548
549        // parametrized (t=0 at a, t=1 at b) intersections with viewplanes
550        double[] intersections = {Double.NaN, Double.NaN, Double.NaN, Double.NaN};
551        // intersection points
552        List<DMatrix3> ipts = new ArrayList<>();
553        for (double val : intersections) ipts.add(null);
554
555        // find intersections
556        for (int i = 0; i < viewplanes.size(); i++) {
557            var normal = viewplanes.get(i);
558
559            // we want to know the value of t when the line intercepts this plane
560            // parametrized: v = t * ab + a, where v lies on the plane
561            // we can find the projection of a onto the plane normal
562            // a_projn = normal.times(av.dot(normal) / normal.dot(normal));
563            var a_projn = new DMatrix3();
564            CommonOps_DDF3.scale(
565                    CommonOps_DDF3.dot(av, normal) / CommonOps_DDF3.dot(normal, normal), normal, a_projn);
566            // this projection lets us determine the scalar multiple t of ab where
567            // (t * ab + a) is a vector which lies on the plane
568            if (Math.abs(CommonOps_DDF3.dot(abv, a_projn)) < 1e-5) continue; // line is parallel to plane
569            intersections[i] = CommonOps_DDF3.dot(a_projn, a_projn) / -CommonOps_DDF3.dot(abv, a_projn);
570
571            // vector a to the viewplane
572            var apv = new DMatrix3();
573            CommonOps_DDF3.scale(intersections[i], abv, apv);
574            // av + apv = intersection point
575            var intersectpt = new DMatrix3();
576            CommonOps_DDF3.add(av, apv, intersectpt);
577            ipts.set(i, intersectpt);
578
579            // discard intersections outside the camera frustum
580            for (int j = 1; j < viewplanes.size(); j++) {
581                int oi = (i + j) % viewplanes.size();
582                var onormal = viewplanes.get(oi);
583                // if the dot of the intersection point with any plane normal is negative, it is outside
584                if (CommonOps_DDF3.dot(intersectpt, onormal) < 0) {
585                    intersections[i] = Double.NaN;
586                    ipts.set(i, null);
587                    break;
588                }
589            }
590
591            // discard duplicate intersections
592            if (ipts.get(i) == null) continue;
593            for (int j = i - 1; j >= 0; j--) {
594                var oipt = ipts.get(j);
595                if (oipt == null) continue;
596                var diff = new DMatrix3();
597                CommonOps_DDF3.subtract(oipt, intersectpt, diff);
598                if (CommonOps_DDF3.elementMaxAbs(diff) < 1e-4) {
599                    intersections[i] = Double.NaN;
600                    ipts.set(i, null);
601                    break;
602                }
603            }
604        }
605
606        // determine visible segment (minimum and maximum t)
607        double inter1 = Double.NaN;
608        double inter2 = Double.NaN;
609        for (double inter : intersections) {
610            if (!Double.isNaN(inter)) {
611                if (Double.isNaN(inter1)) inter1 = inter;
612                else inter2 = inter;
613            }
614        }
615
616        // two viewplane intersections
617        if (!Double.isNaN(inter2)) {
618            double max = Math.max(inter1, inter2);
619            double min = Math.min(inter1, inter2);
620            if (aVisible) min = 0;
621            if (bVisible) max = 1;
622            return new Pair<>(min, max);
623        }
624        // one viewplane intersection
625        else if (!Double.isNaN(inter1)) {
626            if (aVisible) return new Pair<>((double) 0, inter1);
627            if (bVisible) return new Pair<>(inter1, 1.0);
628            return new Pair<>(inter1, null);
629        }
630        // no intersections
631        else return new Pair<>(null, null);
632    }
633
634    /**
635     * Returns these points after applying this camera's estimated noise.
636     *
637     * @param points The points to add noise to
638     * @return The points with noise
639     */
640    public Point[] estPixelNoise(Point[] points) {
641        if (avgErrorPx == 0 && errorStdDevPx == 0) return points;
642
643        Point[] noisyPts = new Point[points.length];
644        for (int i = 0; i < points.length; i++) {
645            var p = points[i];
646            // error pixels in random direction
647            double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx;
648            double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI;
649            noisyPts[i] =
650                    new Point(p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle));
651        }
652        return noisyPts;
653    }
654
655    /**
656     * Returns an estimation of a frame's processing latency with noise added.
657     *
658     * @return The latency estimate in milliseconds
659     */
660    public double estLatencyMs() {
661        return Math.max(avgLatencyMs + rand.nextGaussian() * latencyStdDevMs, 0);
662    }
663
664    /**
665     * Estimates how long until the next frame should be processed.
666     *
667     * @return The estimated time until the next frame in milliseconds
668     */
669    public double estMsUntilNextFrame() {
670        // exceptional processing latency blocks the next frame
671        return frameSpeedMs + Math.max(0, estLatencyMs() - frameSpeedMs);
672    }
673
674    // pre-calibrated example cameras
675
676    /**
677     * Creates a set of camera properties where the camera has a 960x720 resolution, 90 degree FOV,
678     * and is a "perfect" lagless camera.
679     *
680     * @return The properties for this theoretical camera
681     */
682    public static SimCameraProperties PERFECT_90DEG() {
683        return new SimCameraProperties();
684    }
685
686    /**
687     * Creates a set of camera properties matching those of Microsoft Lifecam running on a Raspberry
688     * Pi 4 at 320x240 resolution.
689     *
690     * <p>Note that this set of properties represents <i>a camera setup</i>, not <i>your camera
691     * setup</i>. Do not use these camera properties for any non-sim vision calculations, especially
692     * the calibration data. Always use your camera's calibration data to do vision calculations in
693     * non-sim environments. These properties exist as a sample that may be used to get representative
694     * data in sim.
695     *
696     * @return The properties for this camera setup
697     */
698    public static SimCameraProperties PI4_LIFECAM_320_240() {
699        var prop = new SimCameraProperties();
700        prop.setCalibration(
701                320,
702                240,
703                MatBuilder.fill(
704                        Nat.N3(),
705                        Nat.N3(),
706                        // intrinsic
707                        328.2733242048587,
708                        0.0,
709                        164.8190261141906,
710                        0.0,
711                        318.0609794305216,
712                        123.8633838438093,
713                        0.0,
714                        0.0,
715                        1.0),
716                VecBuilder.fill( // distort
717                        0.09957946553445934,
718                        -0.9166265114485799,
719                        0.0019519890627236526,
720                        -0.0036071725380870333,
721                        1.5627234622420942,
722                        0,
723                        0,
724                        0));
725        prop.setCalibError(0.21, 0.0124);
726        prop.setFPS(30);
727        prop.setAvgLatencyMs(30);
728        prop.setLatencyStdDevMs(10);
729        return prop;
730    }
731
732    /**
733     * Creates a set of camera properties matching those of Microsoft Lifecam running on a Raspberry
734     * Pi 4 at 640x480 resolution.
735     *
736     * <p>Note that this set of properties represents <i>a camera setup</i>, not <i>your camera
737     * setup</i>. Do not use these camera properties for any non-sim vision calculations, especially
738     * the calibration data. Always use your camera's calibration data to do vision calculations in
739     * non-sim environments. These properties exist as a sample that may be used to get representative
740     * data in sim.
741     *
742     * @return The properties for this camera setup
743     */
744    public static SimCameraProperties PI4_LIFECAM_640_480() {
745        var prop = new SimCameraProperties();
746        prop.setCalibration(
747                640,
748                480,
749                MatBuilder.fill(
750                        Nat.N3(),
751                        Nat.N3(),
752                        // intrinsic
753                        669.1428078983059,
754                        0.0,
755                        322.53377249329213,
756                        0.0,
757                        646.9843137061716,
758                        241.26567383784163,
759                        0.0,
760                        0.0,
761                        1.0),
762                VecBuilder.fill( // distort
763                        0.12788470750464645,
764                        -1.2350335805796528,
765                        0.0024990767286192732,
766                        -0.0026958287600230705,
767                        2.2951386729115537,
768                        0,
769                        0,
770                        0));
771        prop.setCalibError(0.26, 0.046);
772        prop.setFPS(15);
773        prop.setAvgLatencyMs(65);
774        prop.setLatencyStdDevMs(15);
775        return prop;
776    }
777
778    /**
779     * Creates a set of camera properties matching those of a Limelight 2 running at 640x480
780     * resolution.
781     *
782     * <p>Note that this set of properties represents <i>a camera setup</i>, not <i>your camera
783     * setup</i>. Do not use these camera properties for any non-sim vision calculations, especially
784     * the calibration data. Always use your camera's calibration data to do vision calculations in
785     * non-sim environments. These properties exist as a sample that may be used to get representative
786     * data in sim.
787     *
788     * @return The properties for this camera setup
789     */
790    public static SimCameraProperties LL2_640_480() {
791        var prop = new SimCameraProperties();
792        prop.setCalibration(
793                640,
794                480,
795                MatBuilder.fill(
796                        Nat.N3(),
797                        Nat.N3(), // intrinsic
798                        511.22843367007755,
799                        0.0,
800                        323.62049380211096,
801                        0.0,
802                        514.5452336723849,
803                        261.8827920543568,
804                        0.0,
805                        0.0,
806                        1.0),
807                VecBuilder.fill( // distort
808                        0.1917469998873756,
809                        -0.5142936883324216,
810                        0.012461562046896614,
811                        0.0014084973492408186,
812                        0.35160648971214437,
813                        0,
814                        0,
815                        0));
816        prop.setCalibError(0.25, 0.05);
817        prop.setFPS(15);
818        prop.setAvgLatencyMs(35);
819        prop.setLatencyStdDevMs(8);
820        return prop;
821    }
822
823    /**
824     * Creates a set of camera properties matching those of a Limelight 2 running at 960x720
825     * resolution.
826     *
827     * <p>Note that this set of properties represents <i>a camera setup</i>, not <i>your camera
828     * setup</i>. Do not use these camera properties for any non-sim vision calculations, especially
829     * the calibration data. Always use your camera's calibration data to do vision calculations in
830     * non-sim environments. These properties exist as a sample that may be used to get representative
831     * data in sim.
832     *
833     * @return The properties for this camera setup
834     */
835    public static SimCameraProperties LL2_960_720() {
836        var prop = new SimCameraProperties();
837        prop.setCalibration(
838                960,
839                720,
840                MatBuilder.fill(
841                        Nat.N3(),
842                        Nat.N3(),
843                        // intrinsic
844                        769.6873145148892,
845                        0.0,
846                        486.1096609458122,
847                        0.0,
848                        773.8164483705323,
849                        384.66071662358354,
850                        0.0,
851                        0.0,
852                        1.0),
853                VecBuilder.fill( // distort
854                        0.189462064814501,
855                        -0.49903003669627627,
856                        0.007468423590519429,
857                        0.002496885298683693,
858                        0.3443122090208624,
859                        0,
860                        0,
861                        0));
862        prop.setCalibError(0.35, 0.10);
863        prop.setFPS(10);
864        prop.setAvgLatencyMs(50);
865        prop.setLatencyStdDevMs(15);
866        return prop;
867    }
868
869    /**
870     * Creates a set of camera properties matching those of a Limelight 2 running at 1280x720
871     * resolution.
872     *
873     * <p>Note that this set of properties represents <i>a camera setup</i>, not <i>your camera
874     * setup</i>. Do not use these camera properties for any non-sim vision calculations, especially
875     * the calibration data. Always use your camera's calibration data to do vision calculations in
876     * non-sim environments. These properties exist as a sample that may be used to get representative
877     * data in sim.
878     *
879     * @return The properties for this camera setup
880     */
881    public static SimCameraProperties LL2_1280_720() {
882        var prop = new SimCameraProperties();
883        prop.setCalibration(
884                1280,
885                720,
886                MatBuilder.fill(
887                        Nat.N3(),
888                        Nat.N3(),
889                        // intrinsic
890                        1011.3749416937393,
891                        0.0,
892                        645.4955139388737,
893                        0.0,
894                        1008.5391755084075,
895                        508.32877656020196,
896                        0.0,
897                        0.0,
898                        1.0),
899                VecBuilder.fill( // distort
900                        0.13730101577061535,
901                        -0.2904345656989261,
902                        8.32475714507539E-4,
903                        -3.694397782014239E-4,
904                        0.09487962227027584,
905                        0,
906                        0,
907                        0));
908        prop.setCalibError(0.37, 0.06);
909        prop.setFPS(7);
910        prop.setAvgLatencyMs(60);
911        prop.setLatencyStdDevMs(20);
912        return prop;
913    }
914}