001/* 002 * MIT License 003 * 004 * Copyright (c) PhotonVision 005 * 006 * Permission is hereby granted, free of charge, to any person obtaining a copy 007 * of this software and associated documentation files (the "Software"), to deal 008 * in the Software without restriction, including without limitation the rights 009 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 010 * copies of the Software, and to permit persons to whom the Software is 011 * furnished to do so, subject to the following conditions: 012 * 013 * The above copyright notice and this permission notice shall be included in all 014 * copies or substantial portions of the Software. 015 * 016 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 017 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 018 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 019 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 020 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 021 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 022 * SOFTWARE. 023 */ 024 025package org.photonvision.simulation; 026 027import com.fasterxml.jackson.databind.ObjectMapper; 028import edu.wpi.first.math.MatBuilder; 029import edu.wpi.first.math.MathUtil; 030import edu.wpi.first.math.Matrix; 031import edu.wpi.first.math.Nat; 032import edu.wpi.first.math.Pair; 033import edu.wpi.first.math.VecBuilder; 034import edu.wpi.first.math.Vector; 035import edu.wpi.first.math.geometry.Pose3d; 036import edu.wpi.first.math.geometry.Rotation2d; 037import edu.wpi.first.math.geometry.Rotation3d; 038import edu.wpi.first.math.geometry.Translation3d; 039import edu.wpi.first.math.numbers.*; 040import edu.wpi.first.wpilibj.DriverStation; 041import java.io.IOException; 042import java.nio.file.Path; 043import java.util.ArrayList; 044import java.util.Arrays; 045import java.util.List; 046import java.util.Random; 047import org.ejml.data.DMatrix3; 048import org.ejml.dense.fixed.CommonOps_DDF3; 049import org.opencv.core.MatOfPoint2f; 050import org.opencv.core.Point; 051import org.opencv.imgproc.Imgproc; 052import org.photonvision.estimation.OpenCVHelp; 053import org.photonvision.estimation.RotTrlTransform3d; 054 055/** 056 * Calibration and performance values for this camera. 057 * 058 * <p>The resolution will affect the accuracy of projected(3d to 2d) target corners and similarly 059 * the severity of image noise on estimation(2d to 3d). 060 * 061 * <p>The camera intrinsics and distortion coefficients describe the results of calibration, and how 062 * to map between 3d field points and 2d image points. 063 * 064 * <p>The performance values (framerate/exposure time, latency) determine how often results should 065 * be updated and with how much latency in simulation. High exposure time causes motion blur which 066 * can inhibit target detection while moving. Note that latency estimation does not account for 067 * network latency and the latency reported will always be perfect. 068 */ 069public class SimCameraProperties { 070 private final Random rand = new Random(); 071 // calibration 072 private int resWidth; 073 private int resHeight; 074 private Matrix<N3, N3> camIntrinsics; 075 private Matrix<N8, N1> distCoeffs; 076 private double avgErrorPx; 077 private double errorStdDevPx; 078 // performance 079 private double frameSpeedMs = 0; 080 private double exposureTimeMs = 0; 081 private double avgLatencyMs = 0; 082 private double latencyStdDevMs = 0; 083 // util 084 private final List<DMatrix3> viewplanes = new ArrayList<>(); 085 086 /** Default constructor which is the same as {@link #PERFECT_90DEG} */ 087 public SimCameraProperties() { 088 setCalibration(960, 720, Rotation2d.fromDegrees(90)); 089 } 090 091 /** 092 * Reads camera properties from a photonvision <code>config.json</code> file. This is only the 093 * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. 094 * Other camera properties must be set. 095 * 096 * @param path Path to the <code>config.json</code> file 097 * @param width The width of the desired resolution in the JSON 098 * @param height The height of the desired resolution in the JSON 099 * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid 100 * calibrated resolution. 101 */ 102 public SimCameraProperties(String path, int width, int height) throws IOException { 103 this(Path.of(path), width, height); 104 } 105 106 /** 107 * Reads camera properties from a photonvision <code>config.json</code> file. This is only the 108 * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. 109 * Other camera properties must be set. 110 * 111 * @param path Path to the <code>config.json</code> file 112 * @param width The width of the desired resolution in the JSON 113 * @param height The height of the desired resolution in the JSON 114 * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid 115 * calibrated resolution. 116 */ 117 public SimCameraProperties(Path path, int width, int height) throws IOException { 118 var mapper = new ObjectMapper(); 119 var json = mapper.readTree(path.toFile()); 120 json = json.get("calibrations"); 121 boolean success = false; 122 try { 123 for (int i = 0; i < json.size() && !success; i++) { 124 // check if this calibration entry is our desired resolution 125 var calib = json.get(i); 126 int jsonWidth = calib.get("resolution").get("width").asInt(); 127 int jsonHeight = calib.get("resolution").get("height").asInt(); 128 if (jsonWidth != width || jsonHeight != height) continue; 129 // get the relevant calibration values 130 var jsonIntrinsicsNode = calib.get("cameraIntrinsics").get("data"); 131 double[] jsonIntrinsics = new double[jsonIntrinsicsNode.size()]; 132 for (int j = 0; j < jsonIntrinsicsNode.size(); j++) { 133 jsonIntrinsics[j] = jsonIntrinsicsNode.get(j).asDouble(); 134 } 135 var jsonDistortNode = calib.get("distCoeffs").get("data"); 136 double[] jsonDistortion = new double[8]; 137 Arrays.fill(jsonDistortion, 0); 138 for (int j = 0; j < jsonDistortNode.size(); j++) { 139 jsonDistortion[j] = jsonDistortNode.get(j).asDouble(); 140 } 141 var jsonViewErrors = calib.get("perViewErrors"); 142 double jsonAvgError = 0; 143 for (int j = 0; j < jsonViewErrors.size(); j++) { 144 jsonAvgError += jsonViewErrors.get(j).asDouble(); 145 } 146 jsonAvgError /= jsonViewErrors.size(); 147 double jsonErrorStdDev = calib.get("standardDeviation").asDouble(); 148 // assign the read JSON values to this CameraProperties 149 setCalibration( 150 jsonWidth, 151 jsonHeight, 152 MatBuilder.fill(Nat.N3(), Nat.N3(), jsonIntrinsics), 153 MatBuilder.fill(Nat.N8(), Nat.N1(), jsonDistortion)); 154 setCalibError(jsonAvgError, jsonErrorStdDev); 155 success = true; 156 } 157 } catch (Exception e) { 158 throw new IOException("Invalid calibration JSON"); 159 } 160 if (!success) throw new IOException("Requested resolution not found in calibration"); 161 } 162 163 public SimCameraProperties setRandomSeed(long seed) { 164 rand.setSeed(seed); 165 return this; 166 } 167 168 public SimCameraProperties setCalibration(int resWidth, int resHeight, Rotation2d fovDiag) { 169 if (fovDiag.getDegrees() < 1 || fovDiag.getDegrees() > 179) { 170 fovDiag = Rotation2d.fromDegrees(MathUtil.clamp(fovDiag.getDegrees(), 1, 179)); 171 DriverStation.reportError( 172 "Requested invalid FOV! Clamping between (1, 179) degrees...", false); 173 } 174 double resDiag = Math.hypot(resWidth, resHeight); 175 double diagRatio = Math.tan(fovDiag.getRadians() / 2); 176 var fovWidth = new Rotation2d(Math.atan(diagRatio * (resWidth / resDiag)) * 2); 177 var fovHeight = new Rotation2d(Math.atan(diagRatio * (resHeight / resDiag)) * 2); 178 179 // assume no distortion 180 var distCoeff = VecBuilder.fill(0, 0, 0, 0, 0, 0, 0, 0); 181 182 // assume centered principal point (pixels) 183 double cx = resWidth / 2.0 - 0.5; 184 double cy = resHeight / 2.0 - 0.5; 185 186 // use given fov to determine focal point (pixels) 187 double fx = cx / Math.tan(fovWidth.getRadians() / 2.0); 188 double fy = cy / Math.tan(fovHeight.getRadians() / 2.0); 189 190 // create camera intrinsics matrix 191 var camIntrinsics = MatBuilder.fill(Nat.N3(), Nat.N3(), fx, 0, cx, 0, fy, cy, 0, 0, 1); 192 setCalibration(resWidth, resHeight, camIntrinsics, distCoeff); 193 194 return this; 195 } 196 197 public SimCameraProperties setCalibration( 198 int resWidth, int resHeight, Matrix<N3, N3> camIntrinsics, Matrix<N8, N1> distCoeffs) { 199 this.resWidth = resWidth; 200 this.resHeight = resHeight; 201 this.camIntrinsics = camIntrinsics; 202 this.distCoeffs = distCoeffs; 203 204 // left, right, up, and down view planes 205 Translation3d[] p = { 206 new Translation3d( 207 1, new Rotation3d(0, 0, getPixelYaw(0).plus(new Rotation2d(-Math.PI / 2)).getRadians())), 208 new Translation3d( 209 1, 210 new Rotation3d( 211 0, 0, getPixelYaw(resWidth).plus(new Rotation2d(Math.PI / 2)).getRadians())), 212 new Translation3d( 213 1, new Rotation3d(0, getPixelPitch(0).plus(new Rotation2d(Math.PI / 2)).getRadians(), 0)), 214 new Translation3d( 215 1, 216 new Rotation3d( 217 0, getPixelPitch(resHeight).plus(new Rotation2d(-Math.PI / 2)).getRadians(), 0)) 218 }; 219 viewplanes.clear(); 220 for (Translation3d translation3d : p) { 221 viewplanes.add( 222 new DMatrix3(translation3d.getX(), translation3d.getY(), translation3d.getZ())); 223 } 224 225 return this; 226 } 227 228 public SimCameraProperties setCalibError(double avgErrorPx, double errorStdDevPx) { 229 this.avgErrorPx = avgErrorPx; 230 this.errorStdDevPx = errorStdDevPx; 231 return this; 232 } 233 234 /** 235 * @param fps The average frames per second the camera should process at. <b>Exposure time limits 236 * FPS if set!</b> 237 */ 238 public SimCameraProperties setFPS(double fps) { 239 frameSpeedMs = Math.max(1000.0 / fps, exposureTimeMs); 240 241 return this; 242 } 243 244 /** 245 * @param exposureTimeMs The amount of time the "shutter" is open for one frame. Affects motion 246 * blur. <b>Frame speed(from FPS) is limited to this!</b> 247 */ 248 public SimCameraProperties setExposureTimeMs(double exposureTimeMs) { 249 this.exposureTimeMs = exposureTimeMs; 250 frameSpeedMs = Math.max(frameSpeedMs, exposureTimeMs); 251 252 return this; 253 } 254 255 /** 256 * @param avgLatencyMs The average latency (from image capture to data published) in milliseconds 257 * a frame should have 258 */ 259 public SimCameraProperties setAvgLatencyMs(double avgLatencyMs) { 260 this.avgLatencyMs = avgLatencyMs; 261 262 return this; 263 } 264 265 /** 266 * @param latencyStdDevMs The standard deviation in milliseconds of the latency 267 */ 268 public SimCameraProperties setLatencyStdDevMs(double latencyStdDevMs) { 269 this.latencyStdDevMs = latencyStdDevMs; 270 271 return this; 272 } 273 274 public int getResWidth() { 275 return resWidth; 276 } 277 278 public int getResHeight() { 279 return resHeight; 280 } 281 282 public int getResArea() { 283 return resWidth * resHeight; 284 } 285 286 /** Width:height */ 287 public double getAspectRatio() { 288 return (double) resWidth / resHeight; 289 } 290 291 public Matrix<N3, N3> getIntrinsics() { 292 return camIntrinsics.copy(); 293 } 294 295 public Vector<N8> getDistCoeffs() { 296 return new Vector<>(distCoeffs); 297 } 298 299 public double getFPS() { 300 return 1000.0 / frameSpeedMs; 301 } 302 303 public double getFrameSpeedMs() { 304 return frameSpeedMs; 305 } 306 307 public double getExposureTimeMs() { 308 return exposureTimeMs; 309 } 310 311 public double getAvgLatencyMs() { 312 return avgLatencyMs; 313 } 314 315 public double getLatencyStdDevMs() { 316 return latencyStdDevMs; 317 } 318 319 public SimCameraProperties copy() { 320 var newProp = new SimCameraProperties(); 321 newProp.setCalibration(resWidth, resHeight, camIntrinsics, distCoeffs); 322 newProp.setCalibError(avgErrorPx, errorStdDevPx); 323 newProp.setFPS(getFPS()); 324 newProp.setExposureTimeMs(exposureTimeMs); 325 newProp.setAvgLatencyMs(avgLatencyMs); 326 newProp.setLatencyStdDevMs(latencyStdDevMs); 327 return newProp; 328 } 329 330 /** 331 * The percentage(0 - 100) of this camera's resolution the contour takes up in pixels of the 332 * image. 333 * 334 * @param points Points of the contour 335 */ 336 public double getContourAreaPercent(Point[] points) { 337 return Imgproc.contourArea(new MatOfPoint2f(OpenCVHelp.getConvexHull(points))) 338 / getResArea() 339 * 100; 340 } 341 342 /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */ 343 public Rotation2d getPixelYaw(double pixelX) { 344 double fx = camIntrinsics.get(0, 0); 345 // account for principal point not being centered 346 double cx = camIntrinsics.get(0, 2); 347 double xOffset = cx - pixelX; 348 return new Rotation2d(fx, xOffset); 349 } 350 351 /** 352 * The pitch from the principal point of this camera to the pixel y value. Pitch is positive down. 353 * 354 * <p>Note that this angle is naively computed and may be incorrect. See {@link 355 * #getCorrectedPixelRot(Point)}. 356 */ 357 public Rotation2d getPixelPitch(double pixelY) { 358 double fy = camIntrinsics.get(1, 1); 359 // account for principal point not being centered 360 double cy = camIntrinsics.get(1, 2); 361 double yOffset = cy - pixelY; 362 return new Rotation2d(fy, -yOffset); 363 } 364 365 /** 366 * Finds the yaw and pitch to the given image point. Yaw is positive left, and pitch is positive 367 * down. 368 * 369 * <p>Note that pitch is naively computed and may be incorrect. See {@link 370 * #getCorrectedPixelRot(Point)}. 371 */ 372 public Rotation3d getPixelRot(Point point) { 373 return new Rotation3d( 374 0, getPixelPitch(point.y).getRadians(), getPixelYaw(point.x).getRadians()); 375 } 376 377 /** 378 * Gives the yaw and pitch of the line intersecting the camera lens and the given pixel 379 * coordinates on the sensor. Yaw is positive left, and pitch positive down. 380 * 381 * <p>The pitch traditionally calculated from pixel offsets do not correctly account for non-zero 382 * values of yaw because of perspective distortion (not to be confused with lens distortion)-- for 383 * example, the pitch angle is naively calculated as: 384 * 385 * <pre>pitch = arctan(pixel y offset / focal length y)</pre> 386 * 387 * However, using focal length as a side of the associated right triangle is not correct when the 388 * pixel x value is not 0, because the distance from this pixel (projected on the x-axis) to the 389 * camera lens increases. Projecting a line back out of the camera with these naive angles will 390 * not intersect the 3d point that was originally projected into this 2d pixel. Instead, this 391 * length should be: 392 * 393 * <pre>focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))</pre> 394 * 395 * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given 396 * pixel (roll is zero). 397 */ 398 public Rotation3d getCorrectedPixelRot(Point point) { 399 double fx = camIntrinsics.get(0, 0); 400 double cx = camIntrinsics.get(0, 2); 401 double xOffset = cx - point.x; 402 403 double fy = camIntrinsics.get(1, 1); 404 double cy = camIntrinsics.get(1, 2); 405 double yOffset = cy - point.y; 406 407 // calculate yaw normally 408 var yaw = new Rotation2d(fx, xOffset); 409 // correct pitch based on yaw 410 var pitch = new Rotation2d(fy / Math.cos(Math.atan(xOffset / fx)), -yOffset); 411 412 return new Rotation3d(0, pitch.getRadians(), yaw.getRadians()); 413 } 414 415 public Rotation2d getHorizFOV() { 416 // sum of FOV left and right principal point 417 var left = getPixelYaw(0); 418 var right = getPixelYaw(resWidth); 419 return left.minus(right); 420 } 421 422 public Rotation2d getVertFOV() { 423 // sum of FOV above and below principal point 424 var above = getPixelPitch(0); 425 var below = getPixelPitch(resHeight); 426 return below.minus(above); 427 } 428 429 public Rotation2d getDiagFOV() { 430 return new Rotation2d(Math.hypot(getHorizFOV().getRadians(), getVertFOV().getRadians())); 431 } 432 433 /** 434 * Determines where the line segment defined by the two given translations intersects the camera's 435 * frustum/field-of-vision, if at all. 436 * 437 * <p>The line is parametrized so any of its points <code>p = t * (b - a) + a</code>. This method 438 * returns these values of t, minimum first, defining the region of the line segment which is 439 * visible in the frustum. If both ends of the line segment are visible, this simply returns {0, 440 * 1}. If, for example, point b is visible while a is not, and half of the line segment is inside 441 * the camera frustum, {0.5, 1} would be returned. 442 * 443 * @param camRt The change in basis from world coordinates to camera coordinates. See {@link 444 * RotTrlTransform3d#makeRelativeTo(Pose3d)}. 445 * @param a The initial translation of the line 446 * @param b The final translation of the line 447 * @return A Pair of Doubles. The values may be null: 448 * <ul> 449 * <li>{Double, Double} : Two parametrized values(t), minimum first, representing which 450 * segment of the line is visible in the camera frustum. 451 * <li>{Double, null} : One value(t) representing a single intersection point. For example, 452 * the line only intersects the intersection of two adjacent viewplanes. 453 * <li>{null, null} : No values. The line segment is not visible in the camera frustum. 454 * </ul> 455 */ 456 public Pair<Double, Double> getVisibleLine( 457 RotTrlTransform3d camRt, Translation3d a, Translation3d b) { 458 // translations relative to the camera 459 var rela = camRt.apply(a); 460 var relb = camRt.apply(b); 461 462 // check if both ends are behind camera 463 if (rela.getX() <= 0 && relb.getX() <= 0) return new Pair<>(null, null); 464 465 var av = new DMatrix3(rela.getX(), rela.getY(), rela.getZ()); 466 var bv = new DMatrix3(relb.getX(), relb.getY(), relb.getZ()); 467 // a to b 468 var abv = new DMatrix3(); 469 CommonOps_DDF3.subtract(bv, av, abv); 470 471 // check if the ends of the line segment are visible 472 boolean aVisible = true; 473 boolean bVisible = true; 474 for (DMatrix3 normal : viewplanes) { 475 double aVisibility = CommonOps_DDF3.dot(av, normal); 476 if (aVisibility < 0) aVisible = false; 477 double bVisibility = CommonOps_DDF3.dot(bv, normal); 478 if (bVisibility < 0) bVisible = false; 479 // both ends are outside at least one of the same viewplane 480 if (aVisibility <= 0 && bVisibility <= 0) return new Pair<>(null, null); 481 } 482 // both ends are inside frustum 483 if (aVisible && bVisible) return new Pair<>((double) 0, 1.0); 484 485 // parametrized (t=0 at a, t=1 at b) intersections with viewplanes 486 double[] intersections = {Double.NaN, Double.NaN, Double.NaN, Double.NaN}; 487 // intersection points 488 List<DMatrix3> ipts = new ArrayList<>(); 489 for (double val : intersections) ipts.add(null); 490 491 // find intersections 492 for (int i = 0; i < viewplanes.size(); i++) { 493 var normal = viewplanes.get(i); 494 495 // we want to know the value of t when the line intercepts this plane 496 // parametrized: v = t * ab + a, where v lies on the plane 497 // we can find the projection of a onto the plane normal 498 // a_projn = normal.times(av.dot(normal) / normal.dot(normal)); 499 var a_projn = new DMatrix3(); 500 CommonOps_DDF3.scale( 501 CommonOps_DDF3.dot(av, normal) / CommonOps_DDF3.dot(normal, normal), normal, a_projn); 502 // this projection lets us determine the scalar multiple t of ab where 503 // (t * ab + a) is a vector which lies on the plane 504 if (Math.abs(CommonOps_DDF3.dot(abv, a_projn)) < 1e-5) continue; // line is parallel to plane 505 intersections[i] = CommonOps_DDF3.dot(a_projn, a_projn) / -CommonOps_DDF3.dot(abv, a_projn); 506 507 // vector a to the viewplane 508 var apv = new DMatrix3(); 509 CommonOps_DDF3.scale(intersections[i], abv, apv); 510 // av + apv = intersection point 511 var intersectpt = new DMatrix3(); 512 CommonOps_DDF3.add(av, apv, intersectpt); 513 ipts.set(i, intersectpt); 514 515 // discard intersections outside the camera frustum 516 for (int j = 1; j < viewplanes.size(); j++) { 517 int oi = (i + j) % viewplanes.size(); 518 var onormal = viewplanes.get(oi); 519 // if the dot of the intersection point with any plane normal is negative, it is outside 520 if (CommonOps_DDF3.dot(intersectpt, onormal) < 0) { 521 intersections[i] = Double.NaN; 522 ipts.set(i, null); 523 break; 524 } 525 } 526 527 // discard duplicate intersections 528 if (ipts.get(i) == null) continue; 529 for (int j = i - 1; j >= 0; j--) { 530 var oipt = ipts.get(j); 531 if (oipt == null) continue; 532 var diff = new DMatrix3(); 533 CommonOps_DDF3.subtract(oipt, intersectpt, diff); 534 if (CommonOps_DDF3.elementMaxAbs(diff) < 1e-4) { 535 intersections[i] = Double.NaN; 536 ipts.set(i, null); 537 break; 538 } 539 } 540 } 541 542 // determine visible segment (minimum and maximum t) 543 double inter1 = Double.NaN; 544 double inter2 = Double.NaN; 545 for (double inter : intersections) { 546 if (!Double.isNaN(inter)) { 547 if (Double.isNaN(inter1)) inter1 = inter; 548 else inter2 = inter; 549 } 550 } 551 552 // two viewplane intersections 553 if (!Double.isNaN(inter2)) { 554 double max = Math.max(inter1, inter2); 555 double min = Math.min(inter1, inter2); 556 if (aVisible) min = 0; 557 if (bVisible) max = 1; 558 return new Pair<>(min, max); 559 } 560 // one viewplane intersection 561 else if (!Double.isNaN(inter1)) { 562 if (aVisible) return new Pair<>((double) 0, inter1); 563 if (bVisible) return new Pair<>(inter1, 1.0); 564 return new Pair<>(inter1, null); 565 } 566 // no intersections 567 else return new Pair<>(null, null); 568 } 569 570 /** Returns these points after applying this camera's estimated noise. */ 571 public Point[] estPixelNoise(Point[] points) { 572 if (avgErrorPx == 0 && errorStdDevPx == 0) return points; 573 574 Point[] noisyPts = new Point[points.length]; 575 for (int i = 0; i < points.length; i++) { 576 var p = points[i]; 577 // error pixels in random direction 578 double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx; 579 double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI; 580 noisyPts[i] = 581 new Point(p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); 582 } 583 return noisyPts; 584 } 585 586 /** 587 * @return Noisy estimation of a frame's processing latency in milliseconds 588 */ 589 public double estLatencyMs() { 590 return Math.max(avgLatencyMs + rand.nextGaussian() * latencyStdDevMs, 0); 591 } 592 593 /** 594 * @return Estimate how long until the next frame should be processed in milliseconds 595 */ 596 public double estMsUntilNextFrame() { 597 // exceptional processing latency blocks the next frame 598 return frameSpeedMs + Math.max(0, estLatencyMs() - frameSpeedMs); 599 } 600 601 // pre-calibrated example cameras 602 603 /** 960x720 resolution, 90 degree FOV, "perfect" lagless camera */ 604 public static SimCameraProperties PERFECT_90DEG() { 605 return new SimCameraProperties(); 606 } 607 608 public static SimCameraProperties PI4_LIFECAM_320_240() { 609 var prop = new SimCameraProperties(); 610 prop.setCalibration( 611 320, 612 240, 613 MatBuilder.fill( 614 Nat.N3(), 615 Nat.N3(), 616 // intrinsic 617 328.2733242048587, 618 0.0, 619 164.8190261141906, 620 0.0, 621 318.0609794305216, 622 123.8633838438093, 623 0.0, 624 0.0, 625 1.0), 626 VecBuilder.fill( // distort 627 0.09957946553445934, 628 -0.9166265114485799, 629 0.0019519890627236526, 630 -0.0036071725380870333, 631 1.5627234622420942, 632 0, 633 0, 634 0)); 635 prop.setCalibError(0.21, 0.0124); 636 prop.setFPS(30); 637 prop.setAvgLatencyMs(30); 638 prop.setLatencyStdDevMs(10); 639 return prop; 640 } 641 642 public static SimCameraProperties PI4_LIFECAM_640_480() { 643 var prop = new SimCameraProperties(); 644 prop.setCalibration( 645 640, 646 480, 647 MatBuilder.fill( 648 Nat.N3(), 649 Nat.N3(), 650 // intrinsic 651 669.1428078983059, 652 0.0, 653 322.53377249329213, 654 0.0, 655 646.9843137061716, 656 241.26567383784163, 657 0.0, 658 0.0, 659 1.0), 660 VecBuilder.fill( // distort 661 0.12788470750464645, 662 -1.2350335805796528, 663 0.0024990767286192732, 664 -0.0026958287600230705, 665 2.2951386729115537, 666 0, 667 0, 668 0)); 669 prop.setCalibError(0.26, 0.046); 670 prop.setFPS(15); 671 prop.setAvgLatencyMs(65); 672 prop.setLatencyStdDevMs(15); 673 return prop; 674 } 675 676 public static SimCameraProperties LL2_640_480() { 677 var prop = new SimCameraProperties(); 678 prop.setCalibration( 679 640, 680 480, 681 MatBuilder.fill( 682 Nat.N3(), 683 Nat.N3(), // intrinsic 684 511.22843367007755, 685 0.0, 686 323.62049380211096, 687 0.0, 688 514.5452336723849, 689 261.8827920543568, 690 0.0, 691 0.0, 692 1.0), 693 VecBuilder.fill( // distort 694 0.1917469998873756, 695 -0.5142936883324216, 696 0.012461562046896614, 697 0.0014084973492408186, 698 0.35160648971214437, 699 0, 700 0, 701 0)); 702 prop.setCalibError(0.25, 0.05); 703 prop.setFPS(15); 704 prop.setAvgLatencyMs(35); 705 prop.setLatencyStdDevMs(8); 706 return prop; 707 } 708 709 public static SimCameraProperties LL2_960_720() { 710 var prop = new SimCameraProperties(); 711 prop.setCalibration( 712 960, 713 720, 714 MatBuilder.fill( 715 Nat.N3(), 716 Nat.N3(), 717 // intrinsic 718 769.6873145148892, 719 0.0, 720 486.1096609458122, 721 0.0, 722 773.8164483705323, 723 384.66071662358354, 724 0.0, 725 0.0, 726 1.0), 727 VecBuilder.fill( // distort 728 0.189462064814501, 729 -0.49903003669627627, 730 0.007468423590519429, 731 0.002496885298683693, 732 0.3443122090208624, 733 0, 734 0, 735 0)); 736 prop.setCalibError(0.35, 0.10); 737 prop.setFPS(10); 738 prop.setAvgLatencyMs(50); 739 prop.setLatencyStdDevMs(15); 740 return prop; 741 } 742 743 public static SimCameraProperties LL2_1280_720() { 744 var prop = new SimCameraProperties(); 745 prop.setCalibration( 746 1280, 747 720, 748 MatBuilder.fill( 749 Nat.N3(), 750 Nat.N3(), 751 // intrinsic 752 1011.3749416937393, 753 0.0, 754 645.4955139388737, 755 0.0, 756 1008.5391755084075, 757 508.32877656020196, 758 0.0, 759 0.0, 760 1.0), 761 VecBuilder.fill( // distort 762 0.13730101577061535, 763 -0.2904345656989261, 764 8.32475714507539E-4, 765 -3.694397782014239E-4, 766 0.09487962227027584, 767 0, 768 0, 769 0)); 770 prop.setCalibError(0.37, 0.06); 771 prop.setFPS(7); 772 prop.setAvgLatencyMs(60); 773 prop.setLatencyStdDevMs(20); 774 return prop; 775 } 776}