diff --git a/src/main/deploy/choreo/Path1.traj b/src/main/deploy/choreo/Path1.traj index 08a39c0..3a6f068 100644 --- a/src/main/deploy/choreo/Path1.traj +++ b/src/main/deploy/choreo/Path1.traj @@ -131,4 +131,4 @@ "splits":[0] }, "events":[] -} +} \ No newline at end of file diff --git a/src/main/deploy/choreo/Path2.traj b/src/main/deploy/choreo/Path2.traj index 0ea7dae..11656a4 100644 --- a/src/main/deploy/choreo/Path2.traj +++ b/src/main/deploy/choreo/Path2.traj @@ -50,4 +50,4 @@ "splits":[0] }, "events":[] -} +} \ No newline at end of file diff --git a/src/main/deploy/choreo/Path3.traj b/src/main/deploy/choreo/Path3.traj index 6fd49a5..0b7522a 100644 --- a/src/main/deploy/choreo/Path3.traj +++ b/src/main/deploy/choreo/Path3.traj @@ -37,4 +37,4 @@ "splits":[0] }, "events":[] -} +} \ No newline at end of file diff --git a/src/main/deploy/choreo/choreo.chor b/src/main/deploy/choreo/choreo.chor index e163dfe..d9e2556 100644 --- a/src/main/deploy/choreo/choreo.chor +++ b/src/main/deploy/choreo/choreo.chor @@ -75,4 +75,4 @@ } }, "generationFeatures":[] -} +} \ No newline at end of file diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 6ef0807..b701dd3 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -241,6 +241,15 @@ public static final SparkFlexConfig MOTOR_CONFIG() { } public class VisionConstants { + public static final String ROOT_TABLE = "Vision"; + + public static final Transform3d robotToCamera = + new Transform3d( + Units.Inches.of(8.375), + Units.Inches.of(15), + Units.Inches.of(19.25), + new Rotation3d(Units.Degrees.of(30), Units.Degrees.of(0), (Units.Degrees.of(0)))); + // AprilTag layout public static AprilTagFieldLayout aprilTagLayout = AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index fb15af0..d337e96 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -41,6 +41,7 @@ import frc.robot.subsystems.intake.IntakeIO; import frc.robot.subsystems.intake.IntakeIOSim; import frc.robot.subsystems.shooter.*; +import frc.robot.subsystems.usb_vision.*; import java.util.function.Supplier; /** @@ -57,6 +58,7 @@ public class RobotContainer { private final Intake intake; private final Hopper hopper; private final Shooter shooter; + private final USBVision vision; // private final Vision vision; private final SuperStateMachine superStateMachine; @@ -75,8 +77,9 @@ public RobotContainer() { drive = new Drive(new DriveModules(true), new GyroIOPigeon2(), controller); elevator = new Elevator(new ElevatorIO() {}); intake = new Intake(new IntakeIO() {}); - hopper = new Hopper(new HopperIO() {}); shooter = new Shooter(new ShooterIO() {}); + hopper = new Hopper(new HopperIO() {}); + vision = new USBVision(new USBVisionIOReal() {}, drive::addVisionMeasurement); // vision = // new Vision( // drive::addVisionMeasurement, @@ -90,6 +93,7 @@ public RobotContainer() { intake = new Intake(new IntakeIOSim()); shooter = new Shooter(new ShooterIOSim()); hopper = new Hopper(new HopperIOSim()); + vision = new USBVision(new USBVisionIO() {}, drive::addVisionMeasurement); // vision = // new Vision( // drive::addVisionMeasurement, @@ -104,6 +108,7 @@ public RobotContainer() { intake = new Intake(new IntakeIO() {}); shooter = new Shooter(new ShooterIO() {}); hopper = new Hopper(new HopperIO() {}); + vision = new USBVision(new USBVisionIO() {}, drive::addVisionMeasurement); // vision = new Vision(drive::addVisionMeasurement, new VisionIO() {}, new VisionIO() {}); break; } diff --git a/src/main/java/frc/robot/subsystems/usb_vision/USBVision.java b/src/main/java/frc/robot/subsystems/usb_vision/USBVision.java new file mode 100644 index 0000000..437e361 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/usb_vision/USBVision.java @@ -0,0 +1,93 @@ +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. + +package frc.robot.subsystems.usb_vision; + +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; +import edu.wpi.first.wpilibj2.command.SubsystemBase; +import frc.lib.team2930.TunableNumberGroup; +import frc.lib.team6328.LoggedTunableNumber; +import frc.robot.Constants.VisionConstants; +import org.littletonrobotics.junction.Logger; + +public class USBVision extends SubsystemBase { + // Logging + + // private static final LoggerGroup logGroup = LoggerGroup.build(VisionConstants.ROOT_TABLE); + + // private static final LoggerEntry.Bool logOnTarget = logGroup.buildBoolean("onTarget"); + + // Tunable numbers + + private static final TunableNumberGroup group = + new TunableNumberGroup(VisionConstants.ROOT_TABLE); + + private final LoggedTunableNumber angleTolerance = group.build("angleTolerance", 0.1); + + private final USBVisionIO io; + private final USBVisionInputsAutoLogged inputs = new USBVisionInputsAutoLogged(); + private final VisionConsumer consumer; + + /** Creates a new Vision Subsystem. */ + public USBVision(USBVisionIO io, VisionConsumer consumer) { + this.io = io; + this.consumer = consumer; + } + + @Override + public void periodic() { + io.updateInputs(inputs); + Logger.processInputs(VisionConstants.ROOT_TABLE, inputs); + + for (var observation : inputs.poseObservations) { + double stdDevFactor = + Math.pow(observation.averageTagDistance(), 2.0) / observation.tagCount(); + double linearStdDev = VisionConstants.linearStdDevBaseline * stdDevFactor; + double angularStdDev = VisionConstants.angularStdDevBaseline * stdDevFactor; + + consumer.accept( + observation.pose().toPose2d(), + observation.timestamp(), + VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)); + } + + // Updating tunable numbers + var hc = hashCode(); + if (angleTolerance.hasChanged(hc)) { + setAngleTolerance(); + } + } + + // Setters + + public void setAngleTolerance() { + io.setAngleTolerance(angleTolerance.get()); + } + + // Getters + + // public boolean isOnTarget() { + // return inputs.rotToTarget.getZ <= angleTolerance.get() * (Math.PI/180.0); + // } + + // public Rotation3d getRotateToTarget() { + // return inputs.rotatePose; + // } + + // public Pose3d getDistToTarget() { + // return inputs.targetPose; + // } + + @FunctionalInterface + public static interface VisionConsumer { + public void accept( + Pose2d visionRobotPoseMeters, + double timestampSeconds, + Matrix visionMeasurementStdDevs); + } +} diff --git a/src/main/java/frc/robot/subsystems/usb_vision/USBVisionIO.java b/src/main/java/frc/robot/subsystems/usb_vision/USBVisionIO.java new file mode 100644 index 0000000..b8fe29b --- /dev/null +++ b/src/main/java/frc/robot/subsystems/usb_vision/USBVisionIO.java @@ -0,0 +1,38 @@ +package frc.robot.subsystems.usb_vision; + +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import org.littletonrobotics.junction.AutoLog; + +public interface USBVisionIO { + + public static record PoseObservation( + double timestamp, Pose3d pose, double ambiguity, int tagCount, double averageTagDistance) {} + + public static record TargetObservation(Rotation2d tx, Rotation2d ty) {} + + /** Contains all of the input data received from hardware. */ + @AutoLog + public class USBVisionInputs { + public TargetObservation latestTargetObservation = + new TargetObservation(new Rotation2d(), new Rotation2d()); + public PoseObservation[] poseObservations = new PoseObservation[0]; + + // public Pose3d robotVisionPose; + + // public Pose3d targetPose; + // public double targetX; + // public double targetY; + // public double targetZ; + + // public Rotation3d rotatePose; + // public double rotateX; + // public double rotateY; + // public double rotateZ; + } + + /** Updates the set of loggable inputs. */ + public default void updateInputs(USBVisionInputs inputs) {} + + public default void setAngleTolerance(double angle) {} +} diff --git a/src/main/java/frc/robot/subsystems/usb_vision/USBVisionIOReal.java b/src/main/java/frc/robot/subsystems/usb_vision/USBVisionIOReal.java new file mode 100644 index 0000000..3ad1b14 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/usb_vision/USBVisionIOReal.java @@ -0,0 +1,239 @@ +package frc.robot.subsystems.usb_vision; + +import static frc.robot.Constants.VisionConstants.*; + +import edu.wpi.first.apriltag.AprilTagDetection; +import edu.wpi.first.apriltag.AprilTagDetector; +import edu.wpi.first.apriltag.AprilTagPoseEstimator; +import edu.wpi.first.cameraserver.CameraServer; +import edu.wpi.first.cscore.CvSink; +import edu.wpi.first.cscore.CvSource; +import edu.wpi.first.cscore.UsbCamera; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.networktables.NetworkTable; +import edu.wpi.first.networktables.NetworkTableInstance; +import edu.wpi.first.wpilibj.Timer; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import org.opencv.core.Mat; +import org.opencv.core.Point; +import org.opencv.core.Scalar; +import org.opencv.imgproc.Imgproc; + +public class USBVisionIOReal implements USBVisionIO { + + private Thread visionThread = new Thread(this::apriltagVisionThreadProc); + + private double tolerance = 0.0; + + public USBVisionIOReal() { + visionThread.setDaemon(true); + visionThread.start(); + } + + @Override + public void updateInputs(USBVisionInputs inputs) { + // Save pose observations to inputs object + List poseObservations = DataSync.getObservations(); + inputs.poseObservations = new PoseObservation[poseObservations.size()]; + for (int i = 0; i < poseObservations.size(); i++) { + inputs.poseObservations[i] = poseObservations.get(i); + } + + // Pose3d robotPose = DataSync.getRobotPose(); + // Pose3d tagPose = DataSync.getTagPose(); + // Rotation3d rot = DataSync.getRot(); + // inputs.robotVisionPose = robotPose; + // inputs.targetPose = tagPose; + // if (tagPose != null) { + // inputs.targetX = tagPose.getX(); + // inputs.targetY = tagPose.getY(); + // inputs.targetZ = tagPose.getZ(); + // } + // inputs.rotatePose = rot; + // if (rot != null) { + // inputs.rotateX = rot.getX(); + // inputs.rotateY = rot.getY(); + // inputs.rotateZ = rot.getZ(); + // } + } + + @Override + public void setAngleTolerance(double angle) { + tolerance = angle; + } + + public static class DataSync { + private static List m_poseObservations; + private static Pose3d m_robotPose; + private static Pose3d m_tagPose; + private static Rotation3d m_rot; + + public static synchronized void setObservations(List poseObservations) { + m_poseObservations = poseObservations; + } + + public static synchronized void setRobotPose(Pose3d pose) { + m_robotPose = pose; + } + + public static synchronized void setTagPose(Pose3d pose) { + m_tagPose = pose; + } + + public static synchronized void setRot(Rotation3d rot) { + m_rot = rot; + } + + public static synchronized List getObservations() { + return m_poseObservations; + } + + public static synchronized Pose3d getRobotPose() { + return m_robotPose; + } + + public static synchronized Pose3d getTagPose() { + return m_tagPose; + } + + public static synchronized Rotation3d getRot() { + return m_rot; + } + } + + void apriltagVisionThreadProc() { + var detector = new AprilTagDetector(); + // look for tag36h11, correct 1 error bit (hamming distance 1) + // hamming 1 allocates 781KB, 2 allocates 27.4 MB, 3 allocates 932 MB + // max of 1 recommended for RoboRIO 1, while hamming 2 is feasible on the RoboRIO 2 + detector.addFamily("tag36h11", 1); + + // Set up Pose Estimator - parameters are for a Microsoft Lifecam HD-3000 + // (https://www.chiefdelphi.com/t/wpilib-apriltagdetector-sample-code/421411/21) + var poseEstConfig = + new AprilTagPoseEstimator.Config( + 0.1651, 699.3778103158814, 677.7161226393544, 345.6059345433618, 207.12741326228522); + var estimator = new AprilTagPoseEstimator(poseEstConfig); + + // Get the UsbCamera from CameraServer + UsbCamera camera = CameraServer.startAutomaticCapture(); + // Set the resolution + camera.setResolution(640, 480); + + // Get a CvSink. This will capture Mats from the camera + CvSink cvSink = CameraServer.getVideo(); + // Setup a CvSource. This will send images back to the Dashboard + CvSource outputStream = CameraServer.putVideo("Detected", 640, 480); + + // Mats are very memory expensive. Lets reuse these. + var mat = new Mat(); + var grayMat = new Mat(); + + // Instantiate once + ArrayList tags = new ArrayList<>(); + var outlineColor = new Scalar(0, 255, 0); + var crossColor = new Scalar(0, 0, 255); + + // We'll output to NT + NetworkTable tagsTable = NetworkTableInstance.getDefault().getTable("apriltags"); + // IntegerArrayPublisher pubTags = tagsTable.getIntegerArrayTopic("tags").publish(); + + // This cannot be 'true'. The program will never exit if it is. This + // lets the robot stop this thread when restarting robot code or + // deploying. + while (!Thread.interrupted()) { + // Tell the CvSink to grab a frame from the camera and put it + // in the source mat. If there is an error notify the output. + if (cvSink.grabFrame(mat) == 0) { + // Send the output the error. + outputStream.notifyError(cvSink.getError()); + // skip the rest of the current iteration + continue; + } + + Imgproc.cvtColor(mat, grayMat, Imgproc.COLOR_RGB2GRAY); + + AprilTagDetection[] detections = detector.detect(grayMat); + double timestamp = Timer.getFPGATimestamp(); + + List poseObservations = new LinkedList<>(); + + // have not seen any tags yet + tags.clear(); + + for (AprilTagDetection detection : detections) { + // remember we saw this tag + tags.add((long) detection.getId()); + + // draw lines around the tag + for (var i = 0; i <= 3; i++) { + var j = (i + 1) % 4; + var pt1 = new Point(detection.getCornerX(i), detection.getCornerY(i)); + var pt2 = new Point(detection.getCornerX(j), detection.getCornerY(j)); + Imgproc.line(mat, pt1, pt2, outlineColor, 2); + } + + // mark the center of the tag + var cx = detection.getCenterX(); + var cy = detection.getCenterY(); + var ll = 10; + Imgproc.line(mat, new Point(cx - ll, cy), new Point(cx + ll, cy), crossColor, 2); + Imgproc.line(mat, new Point(cx, cy - ll), new Point(cx, cy + ll), crossColor, 2); + + // identify the tag + Imgproc.putText( + mat, + Integer.toString(detection.getId()), + new Point(cx + ll, cy), + Imgproc.FONT_HERSHEY_SIMPLEX, + 1, + crossColor, + 3); + + // Calculate robot pose + var tagPose = aprilTagLayout.getTagPose(detection.getId()); + if (tagPose.isPresent()) { + Transform3d fieldToTarget = + new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); + Transform3d cameraToTarget = estimator.estimate(detection); + Transform3d fieldToCamera = fieldToTarget.plus(cameraToTarget.inverse()); + Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); + Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); + + Rotation3d rot = cameraToTarget.getRotation(); + + // Add observation + poseObservations.add( + new PoseObservation( + timestamp, // Timestamp + robotPose, // 3D pose estimate + detection.getDecisionMargin(), // Ambiguity + 1, // Tag count + cameraToTarget.getTranslation().getNorm())); // Average tag distance + + tagsTable + .getEntry("pose_" + detection.getId()) + .setDoubleArray( + new double[] { + robotPose.getX(), robotPose.getY(), robotPose.getZ(), + robotPose.getRotation().getX(), robotPose.getRotation().getY(), + robotPose.getRotation().getZ() + }); + } + } + + // put list of tags onto dashboard + // pubTags.set(tags.stream().mapToLong(Long::longValue).toArray()); + DataSync.setObservations(poseObservations); + + // Give the output stream a new image to display + outputStream.putFrame(mat); + } + // pubTags.close(); + detector.close(); + } +}