001/*
002 * Copyright (C) Photon Vision.
003 *
004 * This program is free software: you can redistribute it and/or modify
005 * it under the terms of the GNU General Public License as published by
006 * the Free Software Foundation, either version 3 of the License, or
007 * (at your option) any later version.
008 *
009 * This program is distributed in the hope that it will be useful,
010 * but WITHOUT ANY WARRANTY; without even the implied warranty of
011 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
012 * GNU General Public License for more details.
013 *
014 * You should have received a copy of the GNU General Public License
015 * along with this program.  If not, see <https://www.gnu.org/licenses/>.
016 */
017
018package org.photonvision.vision.processes;
019
020import edu.wpi.first.cscore.CameraServerJNI;
021import edu.wpi.first.cscore.VideoException;
022import edu.wpi.first.math.util.Units;
023import io.javalin.websocket.WsContext;
024import java.util.ArrayList;
025import java.util.HashMap;
026import java.util.LinkedList;
027import java.util.List;
028import java.util.function.BiConsumer;
029import org.opencv.core.Size;
030import org.photonvision.common.configuration.CameraConfiguration;
031import org.photonvision.common.configuration.ConfigManager;
032import org.photonvision.common.dataflow.CVPipelineResultConsumer;
033import org.photonvision.common.dataflow.DataChangeService;
034import org.photonvision.common.dataflow.DataChangeService.SubscriberHandle;
035import org.photonvision.common.dataflow.events.OutgoingUIEvent;
036import org.photonvision.common.dataflow.networktables.NTDataPublisher;
037import org.photonvision.common.dataflow.statusLEDs.StatusLEDConsumer;
038import org.photonvision.common.dataflow.websocket.UICameraConfiguration;
039import org.photonvision.common.dataflow.websocket.UIDataPublisher;
040import org.photonvision.common.dataflow.websocket.UIPhotonConfiguration;
041import org.photonvision.common.hardware.HardwareManager;
042import org.photonvision.common.logging.LogGroup;
043import org.photonvision.common.logging.Logger;
044import org.photonvision.common.util.SerializationUtils;
045import org.photonvision.vision.calibration.CameraCalibrationCoefficients;
046import org.photonvision.vision.camera.CameraQuirk;
047import org.photonvision.vision.camera.CameraType;
048import org.photonvision.vision.camera.QuirkyCamera;
049import org.photonvision.vision.camera.csi.LibcameraGpuSource;
050import org.photonvision.vision.frame.Frame;
051import org.photonvision.vision.frame.consumer.FileSaveFrameConsumer;
052import org.photonvision.vision.frame.consumer.MJPGFrameConsumer;
053import org.photonvision.vision.pipeline.AdvancedPipelineSettings;
054import org.photonvision.vision.pipeline.OutputStreamPipeline;
055import org.photonvision.vision.pipeline.ReflectivePipelineSettings;
056import org.photonvision.vision.pipeline.UICalibrationData;
057import org.photonvision.vision.pipeline.result.CVPipelineResult;
058import org.photonvision.vision.target.TargetModel;
059import org.photonvision.vision.target.TrackedTarget;
060
061/**
062 * This is the God Class
063 *
064 * <p>VisionModule has a pipeline manager, vision runner, and data providers. The data providers
065 * provide info on settings changes. VisionModuleManager holds a list of all current vision modules.
066 */
067public class VisionModule {
068    private final Logger logger;
069    protected final PipelineManager pipelineManager;
070    protected final VisionSource visionSource;
071    private final VisionRunner visionRunner;
072    private final StreamRunnable streamRunnable;
073    private final VisionModuleChangeSubscriber changeSubscriber;
074    private final SubscriberHandle changeSubscriberHandle;
075    private final LinkedList<CVPipelineResultConsumer> resultConsumers = new LinkedList<>();
076    // Raw result consumers run before any drawing has been done by the
077    // OutputStreamPipeline
078    private final LinkedList<BiConsumer<Frame, List<TrackedTarget>>> streamResultConsumers =
079            new LinkedList<>();
080    private final NTDataPublisher ntConsumer;
081    private final UIDataPublisher uiDataConsumer;
082    private final StatusLEDConsumer statusLEDsConsumer;
083    protected final QuirkyCamera cameraQuirks;
084
085    protected TrackedTarget lastPipelineResultBestTarget;
086
087    private int inputStreamPort = -1;
088    private int outputStreamPort = -1;
089
090    FileSaveFrameConsumer inputFrameSaver;
091    FileSaveFrameConsumer outputFrameSaver;
092
093    MJPGFrameConsumer inputVideoStreamer;
094    MJPGFrameConsumer outputVideoStreamer;
095
096    public VisionModule(PipelineManager pipelineManager, VisionSource visionSource) {
097        logger =
098                new Logger(
099                        VisionModule.class,
100                        visionSource.getSettables().getConfiguration().nickname,
101                        LogGroup.VisionModule);
102
103        cameraQuirks = visionSource.getCameraConfiguration().cameraQuirks;
104
105        if (visionSource.getCameraConfiguration().cameraQuirks == null)
106            visionSource.getCameraConfiguration().cameraQuirks = QuirkyCamera.DefaultCamera;
107
108        // We don't show gain if the config says it's -1. So check here to make sure
109        // it's non-negative if it _is_ supported
110        if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) {
111            pipelineManager.userPipelineSettings.forEach(
112                    it -> {
113                        if (it.cameraGain == -1) it.cameraGain = 75; // Sane default
114                    });
115        }
116        if (cameraQuirks.hasQuirk(CameraQuirk.AwbRedBlueGain)) {
117            pipelineManager.userPipelineSettings.forEach(
118                    it -> {
119                        if (it.cameraRedGain == -1) it.cameraRedGain = 11; // Sane defaults
120                        if (it.cameraBlueGain == -1) it.cameraBlueGain = 20;
121                    });
122        }
123
124        this.pipelineManager = pipelineManager;
125        this.visionSource = visionSource;
126        changeSubscriber = new VisionModuleChangeSubscriber(this);
127        this.visionRunner =
128                new VisionRunner(
129                        this.visionSource.getFrameProvider(),
130                        this.pipelineManager::getCurrentPipeline,
131                        this::consumeResult,
132                        this.cameraQuirks,
133                        getChangeSubscriber());
134        this.streamRunnable = new StreamRunnable(new OutputStreamPipeline());
135        changeSubscriberHandle = DataChangeService.getInstance().addSubscriber(changeSubscriber);
136
137        createStreams();
138
139        recreateStreamResultConsumers();
140
141        ntConsumer =
142                new NTDataPublisher(
143                        visionSource.getSettables().getConfiguration().nickname,
144                        pipelineManager::getRequestedIndex,
145                        this::setPipeline,
146                        pipelineManager::getDriverMode,
147                        this::setDriverMode);
148        uiDataConsumer = new UIDataPublisher(visionSource.getSettables().getConfiguration().uniqueName);
149        statusLEDsConsumer =
150                new StatusLEDConsumer(visionSource.getSettables().getConfiguration().uniqueName);
151        addResultConsumer(ntConsumer);
152        addResultConsumer(uiDataConsumer);
153        addResultConsumer(statusLEDsConsumer);
154        addResultConsumer(
155                (result) ->
156                        lastPipelineResultBestTarget = result.hasTargets() ? result.targets.get(0) : null);
157
158        // Sync VisionModule state with the first pipeline index
159        setPipeline(visionSource.getSettables().getConfiguration().currentPipelineIndex);
160
161        // Set vendor FOV
162        if (isVendorCamera()) {
163            var fov = ConfigManager.getInstance().getConfig().getHardwareConfig().vendorFOV;
164            logger.info("Setting FOV of vendor camera to " + fov);
165            visionSource.getSettables().setFOV(fov);
166        }
167
168        // Configure LED's if supported by the underlying hardware
169        if (HardwareManager.getInstance().visionLED != null && this.camShouldControlLEDs()) {
170            HardwareManager.getInstance()
171                    .visionLED
172                    .setPipelineModeSupplier(() -> pipelineManager.getCurrentPipelineSettings().ledMode);
173            setVisionLEDs(pipelineManager.getCurrentPipelineSettings().ledMode);
174        }
175
176        saveAndBroadcastAll();
177    }
178
179    private void createStreams() {
180        var camStreamIdx = visionSource.getSettables().getConfiguration().streamIndex;
181        // If idx = 0, we want (1181, 1182)
182        this.inputStreamPort = 1181 + (camStreamIdx * 2);
183        this.outputStreamPort = 1181 + (camStreamIdx * 2) + 1;
184
185        inputFrameSaver =
186                new FileSaveFrameConsumer(
187                        visionSource.getSettables().getConfiguration().nickname,
188                        visionSource.getSettables().getConfiguration().uniqueName,
189                        "input");
190        outputFrameSaver =
191                new FileSaveFrameConsumer(
192                        visionSource.getSettables().getConfiguration().nickname,
193                        visionSource.getSettables().getConfiguration().uniqueName,
194                        "output");
195
196        String camHostname = CameraServerJNI.getHostname();
197        inputVideoStreamer =
198                new MJPGFrameConsumer(
199                        camHostname + "_Port_" + inputStreamPort + "_Input_MJPEG_Server", inputStreamPort);
200        outputVideoStreamer =
201                new MJPGFrameConsumer(
202                        camHostname + "_Port_" + outputStreamPort + "_Output_MJPEG_Server", outputStreamPort);
203    }
204
205    private void recreateStreamResultConsumers() {
206        streamResultConsumers.add(
207                (frame, tgts) -> {
208                    if (frame != null) inputFrameSaver.accept(frame.colorImage);
209                });
210        streamResultConsumers.add(
211                (frame, tgts) -> {
212                    if (frame != null) outputFrameSaver.accept(frame.processedImage);
213                });
214        streamResultConsumers.add(
215                (frame, tgts) -> {
216                    if (frame != null) inputVideoStreamer.accept(frame.colorImage);
217                });
218        streamResultConsumers.add(
219                (frame, tgts) -> {
220                    if (frame != null) outputVideoStreamer.accept(frame.processedImage);
221                });
222    }
223
224    private class StreamRunnable extends Thread {
225        private final OutputStreamPipeline outputStreamPipeline;
226
227        private final Object frameLock = new Object();
228        private Frame latestFrame;
229        private AdvancedPipelineSettings settings = new AdvancedPipelineSettings();
230        private List<TrackedTarget> targets = new ArrayList<>();
231
232        private boolean shouldRun = false;
233
234        public StreamRunnable(OutputStreamPipeline outputStreamPipeline) {
235            this.outputStreamPipeline = outputStreamPipeline;
236        }
237
238        public void updateData(
239                Frame inputOutputFrame, AdvancedPipelineSettings settings, List<TrackedTarget> targets) {
240            synchronized (frameLock) {
241                if (shouldRun && this.latestFrame != null) {
242                    logger.trace("Fell behind; releasing last unused Mats");
243                    this.latestFrame.release();
244                }
245
246                this.latestFrame = inputOutputFrame;
247                this.settings = settings;
248                this.targets = targets;
249
250                shouldRun = inputOutputFrame != null;
251                // && inputOutputFrame.colorImage != null
252                // && !inputOutputFrame.colorImage.getMat().empty()
253                // && inputOutputFrame.processedImage != null
254                // && !inputOutputFrame.processedImage.getMat().empty();
255            }
256        }
257
258        @Override
259        public void run() {
260            while (!Thread.interrupted()) {
261                final Frame m_frame;
262                final AdvancedPipelineSettings settings;
263                final List<TrackedTarget> targets;
264                final boolean shouldRun;
265                synchronized (frameLock) {
266                    m_frame = this.latestFrame;
267                    this.latestFrame = null;
268
269                    settings = this.settings;
270                    targets = this.targets;
271                    shouldRun = this.shouldRun;
272
273                    this.shouldRun = false;
274                }
275                if (shouldRun) {
276                    try {
277                        CVPipelineResult osr = outputStreamPipeline.process(m_frame, settings, targets);
278                        consumeResults(m_frame, targets);
279
280                    } catch (Exception e) {
281                        // Never die
282                        logger.error("Exception while running stream runnable!", e);
283                    }
284                    try {
285                        m_frame.release();
286                    } catch (Exception e) {
287                        logger.error("Exception freeing frames", e);
288                    }
289                } else {
290                    // busy wait! hurray!
291                    try {
292                        Thread.sleep(1);
293                    } catch (InterruptedException e) {
294                        logger.warn("StreamRunnable was interrupted - exiting");
295                        return;
296                    }
297                }
298            }
299        }
300    }
301
302    public void start() {
303        visionSource.cameraConfiguration.deactivated = false;
304        visionRunner.startProcess();
305        streamRunnable.start();
306    }
307
308    public void stop() {
309        visionSource.cameraConfiguration.deactivated = true;
310        visionRunner.stopProcess();
311
312        try {
313            streamRunnable.interrupt();
314            streamRunnable.join();
315        } catch (InterruptedException e) {
316            logger.error("Exception killing process thread", e);
317        }
318
319        visionSource.release();
320
321        inputVideoStreamer.close();
322        outputVideoStreamer.close();
323        inputFrameSaver.close();
324        outputFrameSaver.close();
325
326        changeSubscriberHandle.stop();
327        setVisionLEDs(false);
328    }
329
330    public void setFov(double fov) {
331        var settables = visionSource.getSettables();
332        logger.trace(() -> "Setting " + settables.getConfiguration().nickname + ") FOV (" + fov + ")");
333
334        // Only set FOV if we have no vendor JSON, and we aren't using a PiCAM
335        if (isVendorCamera()) {
336            logger.info("Cannot set FOV on a vendor device! Ignoring...");
337        } else {
338            settables.setFOV(fov);
339        }
340    }
341
342    private boolean isVendorCamera() {
343        return visionSource.isVendorCamera();
344    }
345
346    public VisionModuleChangeSubscriber getChangeSubscriber() {
347        return changeSubscriber;
348    }
349
350    void changePipelineType(int newType) {
351        pipelineManager.changePipelineType(newType);
352        setPipeline(pipelineManager.getRequestedIndex());
353        saveAndBroadcastAll();
354    }
355
356    void setDriverMode(boolean isDriverMode) {
357        pipelineManager.setDriverMode(isDriverMode);
358        setVisionLEDs(!isDriverMode);
359        setPipeline(
360                isDriverMode ? PipelineManager.DRIVERMODE_INDEX : pipelineManager.getRequestedIndex());
361        saveAndBroadcastAll();
362    }
363
364    public void startCalibration(UICalibrationData data) {
365        var settings = pipelineManager.calibration3dPipeline.getSettings();
366
367        var videoMode = visionSource.getSettables().getAllVideoModes().get(data.videoModeIndex);
368        var resolution = new Size(videoMode.width, videoMode.height);
369
370        settings.cameraVideoModeIndex = data.videoModeIndex;
371        visionSource.getSettables().setVideoModeIndex(data.videoModeIndex);
372        logger.info(
373                "Starting calibration at resolution index "
374                        + data.videoModeIndex
375                        + " and settings "
376                        + data);
377        settings.gridSize = Units.inchesToMeters(data.squareSizeIn);
378        settings.markerSize = Units.inchesToMeters(data.markerSizeIn);
379        settings.boardHeight = data.patternHeight;
380        settings.boardWidth = data.patternWidth;
381        settings.boardType = data.boardType;
382        settings.resolution = resolution;
383        settings.useOldPattern = data.useOldPattern;
384        settings.tagFamily = data.tagFamily;
385
386        // Disable gain if not applicable
387        if (!cameraQuirks.hasQuirk(CameraQuirk.Gain)) {
388            settings.cameraGain = -1;
389        }
390        if (!cameraQuirks.hasQuirk(CameraQuirk.AwbRedBlueGain)) {
391            settings.cameraRedGain = -1;
392            settings.cameraBlueGain = -1;
393        }
394
395        settings.cameraAutoExposure = true;
396
397        setPipeline(PipelineManager.CAL_3D_INDEX);
398    }
399
400    public void saveInputSnapshot() {
401        inputFrameSaver.overrideTakeSnapshot();
402    }
403
404    public void saveOutputSnapshot() {
405        outputFrameSaver.overrideTakeSnapshot();
406    }
407
408    public void takeCalibrationSnapshot() {
409        pipelineManager.calibration3dPipeline.takeSnapshot();
410    }
411
412    public CameraCalibrationCoefficients endCalibration() {
413        var ret =
414                pipelineManager.calibration3dPipeline.tryCalibration(
415                        ConfigManager.getInstance()
416                                .getCalibrationImageSavePathWithRes(
417                                        pipelineManager.calibration3dPipeline.getSettings().resolution,
418                                        visionSource.getCameraConfiguration().uniqueName));
419        pipelineManager.setCalibrationMode(false);
420
421        setPipeline(pipelineManager.getRequestedIndex());
422
423        if (ret != null) {
424            logger.debug("Saving calibration...");
425            visionSource.getSettables().addCalibration(ret);
426        } else {
427            logger.error("Calibration failed...");
428        }
429        saveAndBroadcastAll();
430        return ret;
431    }
432
433    boolean setPipeline(int index) {
434        logger.info("Setting pipeline to " + index);
435        logger.info("Pipeline name: " + pipelineManager.getPipelineNickname(index));
436        pipelineManager.setIndex(index);
437
438        VisionSourceSettables settables = visionSource.getSettables();
439
440        var pipelineSettings = pipelineManager.getPipelineSettings(index);
441
442        if (pipelineSettings == null) {
443            logger.error("Config for index " + index + " was null! Not changing pipelines");
444            return false;
445        }
446
447        visionRunner.runSynchronously(
448                () -> {
449                    settables.setVideoModeInternal(pipelineSettings.cameraVideoModeIndex);
450                    settables.setBrightness(pipelineSettings.cameraBrightness);
451
452                    // If manual exposure, force exposure slider to be valid
453                    if (!pipelineSettings.cameraAutoExposure) {
454                        if (pipelineSettings.cameraExposureRaw < 0)
455                            pipelineSettings.cameraExposureRaw = 10; // reasonable default
456                    }
457
458                    settables.setExposureRaw(pipelineSettings.cameraExposureRaw);
459                    try {
460                        settables.setAutoExposure(pipelineSettings.cameraAutoExposure);
461                    } catch (VideoException e) {
462                        logger.error("Unable to set camera auto exposure!");
463                        logger.error(e.toString());
464                    }
465                    if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) {
466                        // If the gain is disabled for some reason, re-enable it
467                        if (pipelineSettings.cameraGain == -1) pipelineSettings.cameraGain = 75;
468                        settables.setGain(Math.max(0, pipelineSettings.cameraGain));
469                    } else {
470                        pipelineSettings.cameraGain = -1;
471                    }
472
473                    if (cameraQuirks.hasQuirk(CameraQuirk.AwbRedBlueGain)) {
474                        // If the AWB gains are disabled for some reason, re-enable it
475                        if (pipelineSettings.cameraRedGain == -1) pipelineSettings.cameraRedGain = 11;
476                        if (pipelineSettings.cameraBlueGain == -1) pipelineSettings.cameraBlueGain = 20;
477                        settables.setRedGain(Math.max(0, pipelineSettings.cameraRedGain));
478                        settables.setBlueGain(Math.max(0, pipelineSettings.cameraBlueGain));
479                    } else {
480                        pipelineSettings.cameraRedGain = -1;
481                        pipelineSettings.cameraBlueGain = -1;
482
483                        // All other cameras (than picams) should support AWB temp
484                        settables.setWhiteBalanceTemp(pipelineSettings.cameraWhiteBalanceTemp);
485                        settables.setAutoWhiteBalance(pipelineSettings.cameraAutoWhiteBalance);
486                    }
487
488                    setVisionLEDs(pipelineSettings.ledMode);
489
490                    settables.getConfiguration().currentPipelineIndex = pipelineManager.getRequestedIndex();
491                });
492
493        return true;
494    }
495
496    private boolean camShouldControlLEDs() {
497        // Heuristic - if the camera has a known FOV or is a piCam, assume it's in use
498        // for
499        // vision processing, and should command stuff to the LED's.
500        // TODO: Make LED control a property of the camera itself and controllable in
501        // the UI.
502        return isVendorCamera();
503    }
504
505    private void setVisionLEDs(boolean on) {
506        if (camShouldControlLEDs() && HardwareManager.getInstance().visionLED != null)
507            HardwareManager.getInstance().visionLED.setState(on);
508    }
509
510    public void saveModule() {
511        ConfigManager.getInstance()
512                .saveModule(
513                        getStateAsCameraConfig(), visionSource.getSettables().getConfiguration().uniqueName);
514    }
515
516    public void saveAndBroadcastAll() {
517        saveModule();
518        DataChangeService.getInstance()
519                .publishEvent(
520                        new OutgoingUIEvent<>(
521                                "fullsettings",
522                                UIPhotonConfiguration.programStateToUi(ConfigManager.getInstance().getConfig())));
523    }
524
525    void saveAndBroadcastSelective(WsContext originContext, String propertyName, Object value) {
526        logger.trace("Broadcasting PSC mutation - " + propertyName + ": " + value);
527        saveModule();
528
529        HashMap<String, Object> map = new HashMap<>();
530        HashMap<String, Object> subMap = new HashMap<>();
531        subMap.put(propertyName, value);
532        map.put("mutatePipelineSettings", subMap);
533
534        DataChangeService.getInstance()
535                .publishEvent(new OutgoingUIEvent<>("mutatePipeline", map, originContext));
536    }
537
538    public void setCameraNickname(String newName) {
539        visionSource.getSettables().getConfiguration().nickname = newName;
540        ntConsumer.updateCameraNickname(newName);
541        inputFrameSaver.updateCameraNickname(newName);
542        outputFrameSaver.updateCameraNickname(newName);
543
544        // Push new data to the UI
545        saveAndBroadcastAll();
546    }
547
548    public UICameraConfiguration toUICameraConfig() {
549        var ret = new UICameraConfiguration();
550
551        var config = visionSource.getCameraConfiguration();
552        ret.matchedCameraInfo = config.matchedCameraInfo;
553        ret.cameraPath = config.getDevicePath();
554        ret.fov = visionSource.getSettables().getFOV();
555        ret.isCSICamera = config.matchedCameraInfo.type() == CameraType.ZeroCopyPicam;
556        ret.nickname = visionSource.getSettables().getConfiguration().nickname;
557        ret.uniqueName = visionSource.getSettables().getConfiguration().uniqueName;
558        ret.currentPipelineSettings =
559                SerializationUtils.objectToHashMap(pipelineManager.getCurrentPipelineSettings());
560        ret.currentPipelineIndex = pipelineManager.getRequestedIndex();
561        ret.pipelineNicknames = pipelineManager.getPipelineNicknames();
562        ret.cameraQuirks = visionSource.getSettables().getConfiguration().cameraQuirks;
563        ret.minExposureRaw = visionSource.getSettables().getMinExposureRaw();
564        ret.maxExposureRaw = visionSource.getSettables().getMaxExposureRaw();
565        ret.minWhiteBalanceTemp = visionSource.getSettables().getMinWhiteBalanceTemp();
566        ret.maxWhiteBalanceTemp = visionSource.getSettables().getMaxWhiteBalanceTemp();
567
568        ret.deactivated = config.deactivated;
569
570        // TODO refactor into helper method
571        var temp = new HashMap<Integer, HashMap<String, Object>>();
572        var videoModes = visionSource.getSettables().getAllVideoModes();
573
574        for (var k : videoModes.keySet()) {
575            var internalMap = new HashMap<String, Object>();
576
577            internalMap.put("width", videoModes.get(k).width);
578            internalMap.put("height", videoModes.get(k).height);
579            internalMap.put("fps", videoModes.get(k).fps);
580            internalMap.put(
581                    "pixelFormat",
582                    ((videoModes.get(k) instanceof LibcameraGpuSource.FPSRatedVideoMode)
583                                    ? "kPicam"
584                                    : videoModes.get(k).pixelFormat.toString())
585                            .substring(1)); // Remove the k prefix
586
587            temp.put(k, internalMap);
588        }
589
590        if (videoModes.size() == 0) {
591            logger.error("no video modes, guhhhhh");
592        }
593
594        ret.videoFormatList = temp;
595        ret.outputStreamPort = this.outputStreamPort;
596        ret.inputStreamPort = this.inputStreamPort;
597
598        ret.calibrations =
599                visionSource.getSettables().getConfiguration().calibrations.stream()
600                        .map(CameraCalibrationCoefficients::cloneWithoutObservations)
601                        .toList();
602
603        ret.isFovConfigurable =
604                !(ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV());
605
606        ret.isConnected = visionSource.getFrameProvider().isConnected();
607        ret.hasConnected = visionSource.getFrameProvider().hasConnected();
608
609        return ret;
610    }
611
612    public CameraConfiguration getStateAsCameraConfig() {
613        var config = visionSource.getSettables().getConfiguration();
614        config.setPipelineSettings(pipelineManager.userPipelineSettings);
615        config.driveModeSettings = pipelineManager.driverModePipeline.getSettings();
616        config.currentPipelineIndex = Math.max(pipelineManager.getRequestedIndex(), -1);
617
618        return config;
619    }
620
621    public void addResultConsumer(CVPipelineResultConsumer dataConsumer) {
622        resultConsumers.add(dataConsumer);
623    }
624
625    private void consumeResult(CVPipelineResult result) {
626        consumePipelineResult(result);
627
628        // Pipelines like DriverMode and Calibrate3dPipeline have null output frames
629        if (result.inputAndOutputFrame != null
630                && (pipelineManager.getCurrentPipelineSettings()
631                        instanceof AdvancedPipelineSettings settings)) {
632            streamRunnable.updateData(result.inputAndOutputFrame, settings, result.targets);
633            // The streamRunnable manages releasing in this case
634        } else {
635            consumeResults(result.inputAndOutputFrame, result.targets);
636
637            result.release();
638            // In this case we don't bother with a separate streaming thread and we release
639        }
640    }
641
642    private void consumePipelineResult(CVPipelineResult result) {
643        for (var dataConsumer : resultConsumers) {
644            dataConsumer.accept(result);
645        }
646    }
647
648    /** Consume stream/target results, no rate limiting applied */
649    private void consumeResults(Frame frame, List<TrackedTarget> targets) {
650        for (var c : streamResultConsumers) {
651            c.accept(frame, targets);
652        }
653    }
654
655    public void setTargetModel(TargetModel targetModel) {
656        var pipelineSettings = pipelineManager.getCurrentPipeline().getSettings();
657        if (pipelineSettings instanceof ReflectivePipelineSettings settings) {
658            settings.targetModel = targetModel;
659            saveAndBroadcastAll();
660        } else {
661            logger.error("Cannot set target model of non-reflective pipe! Ignoring...");
662        }
663    }
664
665    public void addCalibrationToConfig(CameraCalibrationCoefficients newCalibration) {
666        if (newCalibration != null) {
667            logger.info("Got new calibration for " + newCalibration.unrotatedImageSize);
668            visionSource.getSettables().addCalibration(newCalibration);
669        } else {
670            logger.error("Got null calibration?");
671        }
672
673        saveAndBroadcastAll();
674    }
675
676    /**
677     * Add/remove quirks from the camera we're controlling
678     *
679     * @param quirksToChange map of true/false for quirks we should change
680     */
681    public void changeCameraQuirks(HashMap<CameraQuirk, Boolean> quirksToChange) {
682        visionSource.getCameraConfiguration().cameraQuirks.updateQuirks(quirksToChange);
683        visionSource.remakeSettables();
684        saveAndBroadcastAll();
685    }
686
687    public String uniqueName() {
688        return this.visionSource.cameraConfiguration.uniqueName;
689    }
690
691    public CameraConfiguration getCameraConfiguration() {
692        return this.visionSource.cameraConfiguration;
693    }
694}