From c148cf184ac3df2bf2f448228b486af49505d6ea Mon Sep 17 00:00:00 2001 From: Timothy Ellsworth Bowers Date: Tue, 3 Feb 2026 15:37:55 -0700 Subject: [PATCH 01/19] Add a pose buffer for vision measurements (#136) * Add vision examples from good teams * WIP: Getting 254's vision code to compile * FRC254's 2024 vision code builds! -- Next step is to mesh in the parts of the vision code I need for the circular vision buffer to the base vision subsystem. * WIP: Adding SPAM's vision code * SPAM / FRC180 vision now builds -- The vision subsystem from FRC180 / SPAM now builds within the 2486 code base. Next steps are to figure out which parts of the various vision combination schemes I want to use and hack together something! * Add 6328's 2025 drive-to-pose algorithm * Make cleaner diff against Az-RBSI * Update FRC254's vision to 2025 version * Fix rebase error * Clean non-ASCII characters * Compilable Vision with pose buffering -- Create a pose buffer and use timestamps to match vision values with those from odometry to reduce jitter. Also be able to define certain tags in whose location we trust more (e.g. HUB in 2026) in the event of field misshapenness. Weave the new Vision structure together with the drivetrain to allow for timestamped pose insertion into the estimator. --------- Co-authored-by: NutHouse coco-prog-3 --- src/main/java/frc/robot/Constants.java | 10 + src/main/java/frc/robot/Robot.java | 16 + src/main/java/frc/robot/RobotContainer.java | 89 +-- .../frc/robot/subsystems/drive/Drive.java | 267 +++++++-- .../frc/robot/subsystems/vision/Vision.java | 510 +++++++++++++----- .../frc/robot/subsystems/vision/VisionIO.java | 27 +- .../subsystems/vision/VisionIOLimelight.java | 10 +- .../vision/VisionIOPhotonVision.java | 131 +++-- .../vision/VisionIOPhotonVisionSim.java | 24 +- .../ConcurrentTimeInterpolatableBuffer.java | 164 ++++++ src/main/java/frc/robot/util/GeomUtil.java | 165 ------ 11 files changed, 939 insertions(+), 474 deletions(-) create mode 100644 src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java delete mode 100644 src/main/java/frc/robot/util/GeomUtil.java diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 26f25253..0b39dcc5 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -416,6 +416,16 @@ public static final class AutoConstants { /** Vision Constants (Assuming PhotonVision) ***************************** */ public static class VisionConstants { + public static final java.util.Set kTrustedTags = + java.util.Set.of(2, 3, 4, 5, 8, 9, 10, 11, 18, 19, 20, 21, 24, 25, 26, 27); // HUB AprilTags + + // Noise scaling factors (lower = more trusted) + public static final double kTrustedTagStdDevScale = 0.6; // 40% more weight + public static final double kUntrustedTagStdDevScale = 1.3; // 30% less weight + + // Optional: if true, reject observations that contain no trusted tags + public static final boolean kRequireTrustedTag = false; + // AprilTag Identification Constants public static final double kAmbiguityThreshold = 0.4; public static final double kTargetLogTimeSecs = 0.1; diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index 6d58c6d0..b9668ae3 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -19,6 +19,7 @@ import com.revrobotics.util.StatusLogger; import edu.wpi.first.wpilibj.DriverStation; +import edu.wpi.first.wpilibj.DriverStation.Alliance; import edu.wpi.first.wpilibj.Threads; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj2.command.Command; @@ -47,6 +48,7 @@ public class Robot extends LoggedRobot { private Command m_autoCommandPathPlanner; private RobotContainer m_robotContainer; private Timer m_disabledTimer; + private static boolean isBlueAlliance = false; // Define simulation fields here private VisionSystemSim visionSim; @@ -135,9 +137,13 @@ public Robot() { /** TESTING VERSION OF ROBOTPERIODIC FOR OVERRUN SOURCES */ @Override public void robotPeriodic() { + + isBlueAlliance = DriverStation.getAlliance().orElse(Alliance.Blue) == Alliance.Blue; + final long t0 = System.nanoTime(); if (isReal()) { + // Switch thread to high priority to improve loop timing Threads.setCurrentThreadPriority(true, 99); } final long t1 = System.nanoTime(); @@ -186,6 +192,7 @@ public void autonomousInit() { CommandScheduler.getInstance().cancelAll(); m_robotContainer.getDrivebase().setMotorBrake(true); m_robotContainer.getDrivebase().resetHeadingController(); + m_robotContainer.getVision().resetPoseGate(Timer.getFPGATimestamp()); // TODO: Make sure Gyro inits here with whatever is in the path planning thingie switch (Constants.getAutoType()) { @@ -315,4 +322,13 @@ public void simulationPeriodic() { // Update sim each sim tick visionSim.update(m_robotContainer.getDrivebase().getPose()); } + + // Helper method to simplify checking if the robot is blue or red alliance + public static boolean isBlue() { + return isBlueAlliance; + } + + public static boolean isRed() { + return !isBlue(); + } } diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index c38f5781..9c5a167a 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -34,7 +34,6 @@ import edu.wpi.first.wpilibj2.command.button.RobotModeTriggers; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.Constants.CANBuses; -import frc.robot.Constants.Cameras; import frc.robot.Constants.OperatorConstants; import frc.robot.FieldConstants.AprilTagLayoutType; import frc.robot.commands.AutopilotCommands; @@ -75,6 +74,9 @@ /** This is the location for defining robot hardware, commands, and controller button bindings. */ public class RobotContainer { + private static final boolean USE_MAPLESIM = true; + public static final boolean MAPLESIM = USE_MAPLESIM && Robot.isSimulation(); + /** Define the Driver and, optionally, the Operator/Co-Driver Controllers */ // Replace with ``CommandPS4Controller`` or ``CommandJoystick`` if needed final CommandXboxController driverController = new CommandXboxController(0); // Main Driver @@ -128,36 +130,7 @@ public class RobotContainer { // Alerts private final Alert aprilTagLayoutAlert = new Alert("", AlertType.INFO); - // Vision Factories - private VisionIO[] buildVisionIOsReal(Drive drive) { - return switch (Constants.getVisionType()) { - case PHOTON -> - Arrays.stream(Cameras.ALL) - .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) - .toArray(VisionIO[]::new); - - case LIMELIGHT -> - Arrays.stream(Cameras.ALL) - .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) - .toArray(VisionIO[]::new); - - case NONE -> new VisionIO[] {new VisionIO() {}}; - }; - } - - private static VisionIO[] buildVisionIOsSim(Drive drive) { - var cams = Constants.Cameras.ALL; - VisionIO[] ios = new VisionIO[cams.length]; - for (int i = 0; i < cams.length; i++) { - var cfg = cams[i]; - ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); - } - return ios; - } - - private VisionIO[] buildVisionIOsReplay() { - return new VisionIO[] {new VisionIO() {}}; - } + public static RobotContainer instance; /** * Constructor for the Robot Container. This container holds subsystems, opertator interface @@ -179,7 +152,9 @@ public RobotContainer() { m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); - m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); + m_vision = + new Vision( + m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -193,11 +168,9 @@ public RobotContainer() { // ---------------- Vision IOs (robot code) ---------------- var cams = frc.robot.Constants.Cameras.ALL; - - // If you keep Vision expecting exactly two cameras: - VisionIO[] visionIOs = buildVisionIOsSim(m_drivebase); - m_vision = new Vision(m_drivebase::addVisionMeasurement, visionIOs); - + m_vision = + new Vision( + m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsSim(m_drivebase)); m_accel = new Accelerometer(m_imu); // ---------------- CameraSweepEvaluator (sim-only analysis) ---------------- @@ -231,7 +204,8 @@ public RobotContainer() { m_imu = new Imu(new ImuIOSim() {}); m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIO() {}); - m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); + m_vision = + new Vision(m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -493,6 +467,11 @@ public Drive getDrivebase() { return m_drivebase; } + /** Vision getter method for use with Robot.java */ + public Vision getVision() { + return m_vision; + } + /** * Set up the SysID routines from AdvantageKit * @@ -536,6 +515,40 @@ private void definesysIdRoutines() { } } + // Vision Factories + // Vision Factories (REAL) + private VisionIO[] buildVisionIOsReal(Drive drive) { + return switch (Constants.getVisionType()) { + case PHOTON -> + java.util.Arrays.stream(Constants.Cameras.ALL) + .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) + .toArray(VisionIO[]::new); + + case LIMELIGHT -> + java.util.Arrays.stream(Constants.Cameras.ALL) + .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) + .toArray(VisionIO[]::new); + + case NONE -> new VisionIO[] {}; // recommended: no cameras + }; + } + + // Vision Factories (SIM) + private VisionIO[] buildVisionIOsSim(Drive drive) { + var cams = Constants.Cameras.ALL; + VisionIO[] ios = new VisionIO[cams.length]; + for (int i = 0; i < cams.length; i++) { + var cfg = cams[i]; + ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); + } + return ios; + } + + // Vision Factories (REPLAY) + private VisionIO[] buildVisionIOsReplay() { + return new VisionIO[] {}; // simplest: Vision does nothing during replay + } + /** * Example Choreo auto command * diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index fada2245..46788c2b 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -48,10 +48,13 @@ import frc.robot.Constants.DrivebaseConstants; import frc.robot.Constants.RobotConstants; import frc.robot.subsystems.imu.Imu; +import frc.robot.util.ConcurrentTimeInterpolatableBuffer; import frc.robot.util.LocalADStarAK; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; import frc.robot.util.RBSISubsystem; +import java.util.Optional; +import java.util.OptionalDouble; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.littletonrobotics.junction.AutoLogOutput; @@ -59,6 +62,16 @@ public class Drive extends RBSISubsystem { + // --- buffers for time-aligned pose + yaw + yawRate history --- + private final ConcurrentTimeInterpolatableBuffer poseBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createBuffer(1.5); + + private final ConcurrentTimeInterpolatableBuffer yawBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); + + private final ConcurrentTimeInterpolatableBuffer yawRateBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); + static final Lock odometryLock = new ReentrantLock(); private final Imu imu; private final Module[] modules = new Module[4]; // FL, FR, BL, BR @@ -74,6 +87,7 @@ public class Drive extends RBSISubsystem { new SwerveModulePosition(), new SwerveModulePosition() }; + private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; private SwerveDrivePoseEstimator m_PoseEstimator = new SwerveDrivePoseEstimator(kinematics, Rotation2d.kZero, lastModulePositions, Pose2d.kZero); @@ -218,64 +232,98 @@ public Drive(Imu imu) { @Override public void rbsiPeriodic() { odometryLock.lock(); + try { + // Get the IMU inputs + final var imuInputs = imu.getInputs(); + + // Stop modules & log empty setpoint states if disabled + if (DriverStation.isDisabled()) { + for (var module : modules) module.stop(); + Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); + Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); + } - // Get the IMU inputs - final var imuInputs = imu.getInputs(); // primitive inputs + // Module periodic updates, which drains queues this cycle + for (var module : modules) module.periodic(); - // Stop modules & log empty setpoint states if disabled - if (DriverStation.isDisabled()) { - for (var module : modules) { - module.stop(); - } - Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); - Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); - } + // Feed historical samples into odometry if REAL robot + if (Constants.getMode() != Mode.SIM) { - // Module periodic updates, which drains queues this cycle - for (var module : modules) { - module.periodic(); - } + // ---- Gather per-module histories ---- + final double[][] perModuleTs = new double[4][]; + final SwerveModulePosition[][] perModulePos = new SwerveModulePosition[4][]; + int unionCap = 0; + + for (int m = 0; m < 4; m++) { + perModuleTs[m] = modules[m].getOdometryTimestamps(); + perModulePos[m] = modules[m].getOdometryPositions(); + unionCap += perModuleTs[m].length; + } + + if (unionCap == 0) { + gyroDisconnectedAlert.set(!imuInputs.connected); + return; + } - // Feed historical samples into odometry if REAL robot - if (Constants.getMode() != Mode.SIM) { - final double[] sampleTimestamps = modules[0].getOdometryTimestamps(); - final int sampleCount = sampleTimestamps.length; - - // Reuse arrays to reduce GC (you likely already have lastModulePositions as a field) - final SwerveModulePosition[] modulePositions = new SwerveModulePosition[4]; - final SwerveModulePosition[] moduleDeltas = new SwerveModulePosition[4]; - - for (int i = 0; i < sampleCount; i++) { - for (int moduleIndex = 0; moduleIndex < 4; moduleIndex++) { - modulePositions[moduleIndex] = modules[moduleIndex].getOdometryPositions()[i]; - moduleDeltas[moduleIndex] = - new SwerveModulePosition( - modulePositions[moduleIndex].distanceMeters - - lastModulePositions[moduleIndex].distanceMeters, - modulePositions[moduleIndex].angle); - lastModulePositions[moduleIndex] = modulePositions[moduleIndex]; + // ---- Fill yaw buffer from IMU odometry samples (preferred) ---- + // These timestamps are authoritative for yaw interpolation and yaw-rate gating. + if (imuInputs.connected + && imuInputs.odometryYawTimestamps != null + && imuInputs.odometryYawPositionsRad != null + && imuInputs.odometryYawTimestamps.length == imuInputs.odometryYawPositionsRad.length + && imuInputs.odometryYawTimestamps.length > 0) { + + final double[] yts = imuInputs.odometryYawTimestamps; + final double[] yaws = imuInputs.odometryYawPositionsRad; + + for (int i = 0; i < yts.length; i++) { + yawBuffer.addSample(yts[i], yaws[i]); + if (i > 0) { + double dt = yts[i] - yts[i - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(yts[i], (yaws[i] - yaws[i - 1]) / dt); + } + } + } + } else { + // fallback: buffer "now" yaw only + double now = Timer.getFPGATimestamp(); + yawBuffer.addSample(now, imuInputs.yawPositionRad); + yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); } - // Pick yaw sample if available; otherwise fall back to current yaw - final double yawRad = - (imuInputs.connected - && imuInputs.odometryYawPositionsRad != null - && imuInputs.odometryYawPositionsRad.length > i) - ? imuInputs.odometryYawPositionsRad[i] - : imuInputs.yawPositionRad; + // ---- Build union timeline ---- + final double[] unionTs = new double[Math.max(unionCap, 1)]; + final double EPS = 1e-4; // 0.1 ms + final int unionN = buildUnionTimeline(perModuleTs, unionTs, EPS); + + // ---- Replay at union times ---- + for (int i = 0; i < unionN; i++) { + final double t = unionTs[i]; + + // Interpolate each module to union time + for (int m = 0; m < 4; m++) { + SwerveModulePosition p = interpolateModulePosition(perModuleTs[m], perModulePos[m], t); + odomPositions[m] = p; + } - // Boundary conversion: PoseEstimator requires Rotation2d - final Rotation2d yaw = Rotation2d.fromRadians(yawRad); + // Interpolate yaw at union time + final double yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); - // Apply to pose estimator - m_PoseEstimator.updateWithTime(sampleTimestamps[i], yaw, modulePositions); + m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + + // keep buffer in the same timebase as estimator + poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); + } + + Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); } - Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); - } - odometryLock.unlock(); + gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); - gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); + } finally { + odometryLock.unlock(); + } } /** Simulation Periodic Method */ @@ -327,6 +375,8 @@ public void simulationPeriodic() { m_PoseEstimator.addVisionMeasurement(visionPose, visionTimestamp, visionStdDevs); } + poseBuffer.addSample(Timer.getFPGATimestamp(), simPhysics.getPose()); + // 7) Logging Logger.recordOutput("Sim/Pose", simPhysics.getPose()); Logger.recordOutput("Sim/YawRad", yawRad); @@ -463,6 +513,37 @@ public Pose2d getPose() { return m_PoseEstimator.getEstimatedPosition(); } + /** Returns interpolated odometry pose at a given timestamp. */ + public Optional getPoseAtTime(double timestampSeconds) { + return poseBuffer.getSample(timestampSeconds); + } + + /** Adds a vision measurement safely into the PoseEstimator. */ + public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { + odometryLock.lock(); + try { + m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); + } finally { + odometryLock.unlock(); + } + } + + /** Max abs yaw rate over [t0, t1] using buffered yaw-rate history (no streams). */ + public OptionalDouble getMaxAbsYawRateRadPerSec(double t0, double t1) { + if (t1 < t0) return OptionalDouble.empty(); + var sub = yawRateBuffer.getInternalBuffer().subMap(t0, true, t1, true); + if (sub.isEmpty()) return OptionalDouble.empty(); + + double maxAbs = 0.0; + boolean any = false; + for (double v : sub.values()) { + any = true; + double a = Math.abs(v); + if (a > maxAbs) maxAbs = a; + } + return any ? OptionalDouble.of(maxAbs) : OptionalDouble.empty(); + } + /** Returns the current odometry rotation. */ @AutoLogOutput(key = "Odometry/Yaw") public Rotation2d getHeading() { @@ -550,6 +631,7 @@ public double getMaxAngularAccelRadPerSecPerSec() { /** Resets the current odometry pose. */ public void resetPose(Pose2d pose) { m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), pose); + markPoseReset(Timer.getFPGATimestamp()); } /** Zeros the gyro based on alliance color */ @@ -559,21 +641,33 @@ public void zeroHeadingForAlliance() { ? Rotation2d.kZero : Rotation2d.k180deg); resetHeadingController(); + markPoseReset(Timer.getFPGATimestamp()); } /** Zeros the heading */ public void zeroHeading() { imu.zeroYaw(Rotation2d.kZero); resetHeadingController(); + markPoseReset(Timer.getFPGATimestamp()); + } + + // ---- Pose reset gate (vision + anything latency-sensitive) ---- + private volatile long poseResetEpoch = 0; // monotonic counter + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; + + public long getPoseResetEpoch() { + return poseResetEpoch; } - /** Adds a new timestamped vision measurement. */ - public void addVisionMeasurement( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs) { - m_PoseEstimator.addVisionMeasurement( - visionRobotPoseMeters, timestampSeconds, visionMeasurementStdDevs); + public double getLastPoseResetTimestamp() { + return lastPoseResetTimestamp; + } + + private void markPoseReset(double fpgaNow) { + lastPoseResetTimestamp = fpgaNow; + poseResetEpoch++; + Logger.recordOutput("Drive/PoseResetEpoch", poseResetEpoch); + Logger.recordOutput("Drive/PoseResetTimestamp", lastPoseResetTimestamp); } /** CHOREO SECTION (Ignore if AutoType == PATHPLANNER) ******************* */ @@ -670,4 +764,69 @@ private edu.wpi.first.math.Matrix getSimulatedVisionStdDevs() { stdDevs.set(2, 0, Math.toRadians(2)); // rotation standard deviation (radians) return stdDevs; } + + private static SwerveModulePosition interpolateModulePosition( + double[] ts, SwerveModulePosition[] samples, double t) { + + final int n = ts.length; + if (n == 0) return new SwerveModulePosition(); + + if (t <= ts[0]) return samples[0]; + if (t >= ts[n - 1]) return samples[n - 1]; + + int lo = 0, hi = n - 1; + while (lo < hi) { + int mid = (lo + hi) >>> 1; + if (ts[mid] < t) lo = mid + 1; + else hi = mid; + } + int i1 = lo; + int i0 = i1 - 1; + + double t0 = ts[i0]; + double t1 = ts[i1]; + if (t1 <= t0) return samples[i1]; + + double alpha = (t - t0) / (t1 - t0); + + double dist = + samples[i0].distanceMeters + + (samples[i1].distanceMeters - samples[i0].distanceMeters) * alpha; + + Rotation2d angle = samples[i0].angle.interpolate(samples[i1].angle, alpha); + + return new SwerveModulePosition(dist, angle); + } + + private static int buildUnionTimeline(double[][] perModuleTs, double[] outUnion, double epsSec) { + int[] idx = new int[perModuleTs.length]; + int outN = 0; + + while (true) { + double minT = Double.POSITIVE_INFINITY; + boolean any = false; + + for (int m = 0; m < perModuleTs.length; m++) { + double[] ts = perModuleTs[m]; + if (idx[m] >= ts.length) continue; + any = true; + minT = Math.min(minT, ts[idx[m]]); + } + + if (!any) break; + + if (outN == 0 || Math.abs(minT - outUnion[outN - 1]) > epsSec) { + outUnion[outN++] = minT; + } + + for (int m = 0; m < perModuleTs.length; m++) { + double[] ts = perModuleTs[m]; + while (idx[m] < ts.length && Math.abs(ts[idx[m]] - minT) <= epsSec) { + idx[m]++; + } + } + } + + return outN; + } } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 04e20417..908cf9c5 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -1,13 +1,19 @@ // Copyright (c) 2024-2026 Az-FIRST // http://github.com/AZ-First -// Copyright (c) 2024-2025 FRC 2486 -// http://github.com/Coconuts2486-FRC -// Copyright (c) 2021-2026 Littleton Robotics -// http://github.com/Mechanical-Advantage // -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. package frc.robot.subsystems.vision; @@ -18,183 +24,419 @@ import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; -import edu.wpi.first.wpilibj.Alert; -import edu.wpi.first.wpilibj.Alert.AlertType; import frc.robot.Constants; import frc.robot.FieldConstants; +import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; import frc.robot.util.VirtualSubsystem; +import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Optional; +import java.util.OptionalDouble; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; import org.littletonrobotics.junction.Logger; public class Vision extends VirtualSubsystem { + + @FunctionalInterface + public interface VisionConsumer { + void accept(Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix stdDevs); + } + + private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} + private final VisionConsumer consumer; + private final Drive drive; + private long lastSeenPoseResetEpoch = -1; + private final VisionIO[] io; private final VisionIOInputsAutoLogged[] inputs; - private final Alert[] disconnectedAlerts; - // Camera configs (names, transforms, stddev multipliers, sim props) - private final Constants.Cameras.CameraConfig[] camConfigs; + private final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; - // ---------------- Reusable scratch buffers (avoid per-loop allocations) ---------------- - // Summary buffers - private final ArrayList allTagPoses = new ArrayList<>(32); - private final ArrayList allRobotPoses = new ArrayList<>(64); - private final ArrayList allRobotPosesAccepted = new ArrayList<>(64); - private final ArrayList allRobotPosesRejected = new ArrayList<>(64); + // --- Per-camera monotonic gate --- + private final double[] lastAcceptedTsPerCam; - // Per-camera buffers (reused each camera) - private final ArrayList tagPoses = new ArrayList<>(16); - private final ArrayList robotPoses = new ArrayList<>(32); - private final ArrayList robotPosesAccepted = new ArrayList<>(32); - private final ArrayList robotPosesRejected = new ArrayList<>(32); + // --- Pose reset gate --- + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; - public Vision(VisionConsumer consumer, VisionIO... io) { + // --- Smoothing buffer (recent fused estimates) --- + private final ArrayDeque fusedBuffer = new ArrayDeque<>(); + private final double smoothWindowSec = 0.25; + private final int smoothMaxSize = 12; + + // --- Trusted tags configuration (swappable per event/field) --- + private final AtomicReference> trustedTags = new AtomicReference<>(Set.of()); + private volatile boolean requireTrustedTag = false; + + // Scale factors applied based on fraction of trusted tags in an observation + private volatile double trustedTagStdDevScale = 0.70; // < 1 => more trusted + private volatile double untrustedTagStdDevScale = 1.40; // > 1 => less trusted + + // --- Optional 254-style yaw gate for single-tag --- + private volatile boolean enableSingleTagYawGate = true; + private volatile double yawGateLookbackSec = 0.30; + private volatile double yawGateLimitRadPerSec = 5.0; + + public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { + this.drive = drive; this.consumer = consumer; this.io = io; - this.camConfigs = Constants.Cameras.ALL; - - // Initialize inputs this.inputs = new VisionIOInputsAutoLogged[io.length]; - for (int i = 0; i < inputs.length; i++) { + for (int i = 0; i < io.length; i++) { inputs[i] = new VisionIOInputsAutoLogged(); } - // Initialize disconnected alerts - this.disconnectedAlerts = new Alert[io.length]; - for (int i = 0; i < io.length; i++) { - disconnectedAlerts[i] = - new Alert("Vision camera " + i + " is disconnected.", AlertType.kWarning); - } + this.lastAcceptedTsPerCam = new double[io.length]; + java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); - // Log robot-to-camera transforms from the new camera config array - // (Only log as many as exist in BOTH configs and IOs) + // Log robot->camera transforms if available int n = Math.min(camConfigs.length, io.length); for (int i = 0; i < n; i++) { Logger.recordOutput("Vision/RobotToCamera" + i, camConfigs[i].robotToCamera()); } } - /** Returns the X angle to the best target, useful for simple servoing. */ - public Rotation2d getTargetX(int cameraIndex) { - return inputs[cameraIndex].latestTargetObservation.tx(); + // -------- Runtime configuration hooks -------- + + /** Call when you reset odometry (e.g. auto start, manual reset, etc). */ + public void resetPoseGate(double fpgaNowSeconds) { + lastPoseResetTimestamp = fpgaNowSeconds; + fusedBuffer.clear(); + java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); + } + + /** Swap trusted tag set per event/field without redeploy. */ + public void setTrustedTags(Set tags) { + trustedTags.set(Set.copyOf(tags)); + } + + public void setRequireTrustedTag(boolean require) { + requireTrustedTag = require; + } + + public void setTrustedTagStdDevScales(double trustedScale, double untrustedScale) { + trustedTagStdDevScale = trustedScale; + untrustedTagStdDevScale = untrustedScale; } + public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limitRadPerSec) { + enableSingleTagYawGate = enabled; + yawGateLookbackSec = lookbackSec; + yawGateLimitRadPerSec = limitRadPerSec; + } + + // -------- Core periodic -------- + @Override public void rbsiPeriodic() { - // 1) Update inputs + process inputs first (keeps AK logs consistent) + + long epoch = drive.getPoseResetEpoch(); + if (epoch != lastSeenPoseResetEpoch) { + lastSeenPoseResetEpoch = epoch; + resetPoseGate(drive.getLastPoseResetTimestamp()); // your existing method + Logger.recordOutput("Vision/PoseGateResetFromDrive", true); + } else { + Logger.recordOutput("Vision/PoseGateResetFromDrive", false); + } + + // 1) Update/log camera inputs for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); - disconnectedAlerts[i].set(!inputs[i].connected); - } - - // 2) Clear summary buffers (reused) - allTagPoses.clear(); - allRobotPoses.clear(); - allRobotPosesAccepted.clear(); - allRobotPosesRejected.clear(); - - // 3) Process each camera - for (int cameraIndex = 0; cameraIndex < io.length; cameraIndex++) { - // Clear per-camera buffers - tagPoses.clear(); - robotPoses.clear(); - robotPosesAccepted.clear(); - robotPosesRejected.clear(); - - // Add tag poses from ids - for (int tagId : inputs[cameraIndex].tagIds) { - var tagPose = FieldConstants.aprilTagLayout.getTagPose(tagId); - if (tagPose.isPresent()) { - tagPoses.add(tagPose.get()); - } - } + } - // Loop over pose observations - for (var observation : inputs[cameraIndex].poseObservations) { - // Reject rules - boolean rejectPose = - observation.tagCount() == 0 - || (observation.tagCount() == 1 && observation.ambiguity() > maxAmbiguity) - || Math.abs(observation.pose().getZ()) > maxZError - || observation.pose().getX() < 0.0 - || observation.pose().getX() > FieldConstants.aprilTagLayout.getFieldLength() - || observation.pose().getY() < 0.0 - || observation.pose().getY() > FieldConstants.aprilTagLayout.getFieldWidth(); - - // Log pose buckets - robotPoses.add(observation.pose()); - if (rejectPose) { - robotPosesRejected.add(observation.pose()); - } else { - robotPosesAccepted.add(observation.pose()); - } + // 2) Pick one best accepted estimate per camera for this loop + final ArrayList perCamAccepted = new ArrayList<>(io.length); + + for (int cam = 0; cam < io.length; cam++) { + int seen = 0; + int accepted = 0; + int rejected = 0; - if (rejectPose) { + TimedEstimate best = null; + double bestTrustScale = Double.NaN; + int bestTrustedCount = 0; + int bestTagCount = 0; + + for (var obs : inputs[cam].poseObservations) { + seen++; + + GateResult gate = passesHardGatesAndYawGate(cam, obs); + if (!gate.accepted) { + rejected++; continue; } - // Standard deviations - double stdDevFactor = - Math.pow(observation.averageTagDistance(), 2.0) / observation.tagCount(); - double linearStdDev = linearStdDevBaseline * stdDevFactor; - double angularStdDev = angularStdDevBaseline * stdDevFactor; - - if (observation.type() == PoseObservationType.MEGATAG_2) { - linearStdDev *= linearStdDevMegatag2Factor; - angularStdDev *= angularStdDevMegatag2Factor; + BuiltEstimate built = buildEstimate(cam, obs); + if (built == null) { + rejected++; + continue; } - // Apply per-camera multiplier from CameraConfig - if (cameraIndex < camConfigs.length) { - double k = camConfigs[cameraIndex].stdDevFactor(); - linearStdDev *= k; - angularStdDev *= k; + TimedEstimate est = built.estimate; + if (best == null || isBetter(est, best)) { + best = est; + bestTrustScale = built.trustScale; + bestTrustedCount = built.trustedCount; + bestTagCount = obs.tagCount(); } + } + + if (best != null) { + accepted++; + lastAcceptedTsPerCam[cam] = best.ts; + perCamAccepted.add(best); - consumer.accept( - observation.pose().toPose2d(), - observation.timestamp(), - VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedTimestamp", best.ts); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedStdDevs", best.stdDevs); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustScale", bestTrustScale); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustedCount", bestTrustedCount); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTagCount", bestTagCount); } - // Per-camera logs (arrays allocate; acceptable if you’re OK with this in the log loop) - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/TagPoses", tagPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPoses", robotPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPosesAccepted", - robotPosesAccepted.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPosesRejected", - robotPosesRejected.toArray(new Pose3d[0])); - - // Aggregate summary - allTagPoses.addAll(tagPoses); - allRobotPoses.addAll(robotPoses); - allRobotPosesAccepted.addAll(robotPosesAccepted); - allRobotPosesRejected.addAll(robotPosesRejected); - } - - // 4) Summary logs - Logger.recordOutput("Vision/Summary/TagPoses", allTagPoses.toArray(new Pose3d[0])); - Logger.recordOutput("Vision/Summary/RobotPoses", allRobotPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Summary/RobotPosesAccepted", allRobotPosesAccepted.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Summary/RobotPosesRejected", allRobotPosesRejected.toArray(new Pose3d[0])); + Logger.recordOutput("Vision/Camera" + cam + "/ObsSeen", seen); + Logger.recordOutput("Vision/Camera" + cam + "/ObsAccepted", accepted); + Logger.recordOutput("Vision/Camera" + cam + "/ObsRejected", rejected); + } + + if (perCamAccepted.isEmpty()) return; + + // 3) Fusion time is the newest timestamp among accepted per-camera samples + double tF = perCamAccepted.stream().mapToDouble(e -> e.ts).max().orElse(Double.NaN); + if (!Double.isFinite(tF)) return; + + // 4) Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse + TimedEstimate fused = fuseAtTime(perCamAccepted, tF); + if (fused == null) return; + + // 5) Smooth by fusing recent fused estimates (also aligned to tF) + pushFused(fused); + TimedEstimate smoothed = smoothAtTime(tF); + if (smoothed == null) return; + + // 6) Inject + consumer.accept(smoothed.pose, smoothed.ts, smoothed.stdDevs); + + Logger.recordOutput("Vision/FusedPose", fused.pose); + Logger.recordOutput("Vision/SmoothedPose", smoothed.pose); + Logger.recordOutput("Vision/FusedTimestamp", tF); } - @FunctionalInterface - public static interface VisionConsumer { - void accept( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs); + // -------- Gating + scoring -------- + + private static final class GateResult { + final boolean accepted; + + GateResult(boolean accepted) { + this.accepted = accepted; + } + } + + private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation obs) { + final double ts = obs.timestamp(); + + // Monotonic per-camera time + if (ts <= lastAcceptedTsPerCam[cam]) return new GateResult(false); + + // Reject anything older than last pose reset + if (ts < lastPoseResetTimestamp) return new GateResult(false); + + // Must have tags + if (obs.tagCount() <= 0) return new GateResult(false); + + // Single-tag ambiguity gate + if (obs.tagCount() == 1 && obs.ambiguity() > maxAmbiguity) return new GateResult(false); + + // Z sanity + if (Math.abs(obs.pose().getZ()) > maxZError) return new GateResult(false); + + // Field bounds + Pose3d p = obs.pose(); + if (p.getX() < 0.0 || p.getX() > FieldConstants.aprilTagLayout.getFieldLength()) + return new GateResult(false); + if (p.getY() < 0.0 || p.getY() > FieldConstants.aprilTagLayout.getFieldWidth()) + return new GateResult(false); + + // Optional 254-style yaw gate: only meaningful for single-tag + if (enableSingleTagYawGate && obs.tagCount() == 1) { + OptionalDouble maxYaw = drive.getMaxAbsYawRateRadPerSec(ts - yawGateLookbackSec, ts); + if (maxYaw.isPresent() && maxYaw.getAsDouble() > yawGateLimitRadPerSec) { + return new GateResult(false); + } + } + + return new GateResult(true); + } + + private static final class BuiltEstimate { + final TimedEstimate estimate; + final double trustScale; + final int trustedCount; + + BuiltEstimate(TimedEstimate estimate, double trustScale, int trustedCount) { + this.estimate = estimate; + this.trustScale = trustScale; + this.trustedCount = trustedCount; + } + } + + private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { + // Base uncertainty grows with distance^2 / tagCount (your 2486-style) + final double tagCount = Math.max(1, obs.tagCount()); + final double avgDist = Math.max(0.0, obs.averageTagDistance()); + final double distFactor = (avgDist * avgDist) / tagCount; + + final double camFactor = (cam < camConfigs.length) ? camConfigs[cam].stdDevFactor() : 1.0; + + double linearStdDev = linearStdDevBaseline * camFactor * distFactor; + double angularStdDev = angularStdDevBaseline * camFactor * distFactor; + + // MegaTag2 bonus if applicable + if (obs.type() == PoseObservationType.MEGATAG_2) { + linearStdDev *= linearStdDevMegatag2Factor; + angularStdDev *= angularStdDevMegatag2Factor; + } + + // Trusted tag blending + final Set kTrusted = trustedTags.get(); + int trustedCount = 0; + for (int id : obs.usedTagIds()) { + if (kTrusted.contains(id)) trustedCount++; + } + + if (requireTrustedTag && trustedCount == 0) { + return null; + } + + final int usedCount = obs.usedTagIds().size(); + final double fracTrusted = (usedCount == 0) ? 0.0 : ((double) trustedCount / usedCount); + + final double trustScale = + untrustedTagStdDevScale + fracTrusted * (trustedTagStdDevScale - untrustedTagStdDevScale); + + linearStdDev *= trustScale; + angularStdDev *= trustScale; + + // Output logs for tuning + Logger.recordOutput("Vision/Camera" + cam + "/InjectedFracTrusted", fracTrusted); + + return new BuiltEstimate( + new TimedEstimate( + obs.pose().toPose2d(), + obs.timestamp(), + VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)), + trustScale, + trustedCount); + } + + private boolean isBetter(TimedEstimate a, TimedEstimate b) { + // Lower trace of stddev vector (x+y+theta) is better + double ta = a.stdDevs.get(0, 0) + a.stdDevs.get(1, 0) + a.stdDevs.get(2, 0); + double tb = b.stdDevs.get(0, 0) + b.stdDevs.get(1, 0) + b.stdDevs.get(2, 0); + return ta < tb; + } + + // -------- Time alignment + fusion -------- + + private TimedEstimate fuseAtTime(ArrayList estimates, double tF) { + final ArrayList aligned = new ArrayList<>(estimates.size()); + for (var e : estimates) { + Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + if (alignedPose == null) return null; + aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + } + return inverseVarianceFuse(aligned, tF); + } + + private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tF) { + Optional odomAtTsOpt = drive.getPoseAtTime(ts); + Optional odomAtTFOpt = drive.getPoseAtTime(tF); + if (odomAtTsOpt.isEmpty() || odomAtTFOpt.isEmpty()) return null; + + Pose2d odomAtTs = odomAtTsOpt.get(); + Pose2d odomAtTF = odomAtTFOpt.get(); + + // Transform that takes odomAtTs -> odomAtTF + Transform2d ts_T_tf = odomAtTF.minus(odomAtTs); + + // Apply same motion to vision pose to bring it forward + return visionPoseAtTs.transformBy(ts_T_tf); + } + + private TimedEstimate inverseVarianceFuse(ArrayList alignedAtTF, double tF) { + if (alignedAtTF.isEmpty()) return null; + if (alignedAtTF.size() == 1) return alignedAtTF.get(0); + + double sumWx = 0, sumWy = 0, sumWth = 0; + double sumX = 0, sumY = 0; + double sumCos = 0, sumSin = 0; + + for (var e : alignedAtTF) { + double sx = e.stdDevs.get(0, 0); + double sy = e.stdDevs.get(1, 0); + double sth = e.stdDevs.get(2, 0); + + double vx = sx * sx; + double vy = sy * sy; + double vth = sth * sth; + + double wx = 1.0 / vx; + double wy = 1.0 / vy; + double wth = 1.0 / vth; + + sumWx += wx; + sumWy += wy; + sumWth += wth; + + sumX += e.pose.getX() * wx; + sumY += e.pose.getY() * wy; + + sumCos += e.pose.getRotation().getCos() * wth; + sumSin += e.pose.getRotation().getSin() * wth; + } + + Pose2d fused = new Pose2d(sumX / sumWx, sumY / sumWy, new Rotation2d(sumCos, sumSin)); + + Matrix fusedStd = + VecBuilder.fill(Math.sqrt(1.0 / sumWx), Math.sqrt(1.0 / sumWy), Math.sqrt(1.0 / sumWth)); + + return new TimedEstimate(fused, tF, fusedStd); + } + + // -------- Smoothing buffer -------- + + private void pushFused(TimedEstimate fused) { + fusedBuffer.addLast(fused); + + while (fusedBuffer.size() > smoothMaxSize) { + fusedBuffer.removeFirst(); + } + + // Trim by time window relative to newest + while (!fusedBuffer.isEmpty() && fused.ts - fusedBuffer.peekFirst().ts > smoothWindowSec) { + fusedBuffer.removeFirst(); + } + } + + private TimedEstimate smoothAtTime(double tF) { + if (fusedBuffer.isEmpty()) return null; + if (fusedBuffer.size() == 1) return fusedBuffer.peekLast(); + + final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); + for (var e : fusedBuffer) { + Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + if (alignedPose == null) continue; + aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + } + + if (aligned.isEmpty()) return fusedBuffer.peekLast(); + return inverseVarianceFuse(aligned, tF); } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java index c9ed8349..fad294d2 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -11,35 +11,44 @@ import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; +import java.util.Set; import org.littletonrobotics.junction.AutoLog; public interface VisionIO { @AutoLog - public static class VisionIOInputs { + class VisionIOInputs { public boolean connected = false; + + /** Latest "servo to target" observation (optional) */ public TargetObservation latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); + + /** Pose observations generated by the camera pipeline (each has its own timestamp) */ public PoseObservation[] poseObservations = new PoseObservation[0]; + + /** Union of tag IDs seen this loop (for logging/UI only) */ public int[] tagIds = new int[0]; } /** Represents the angle to a simple target, not used for pose estimation. */ - public static record TargetObservation(Rotation2d tx, Rotation2d ty) {} + record TargetObservation(Rotation2d tx, Rotation2d ty) {} /** Represents a robot pose sample used for pose estimation. */ - public static record PoseObservation( - double timestamp, - Pose3d pose, - double ambiguity, + record PoseObservation( + double timestamp, // camera capture time (seconds, same timebase as FPGA) + Pose3d pose, // field->robot pose + double ambiguity, // single-tag ambiguity if available int tagCount, double averageTagDistance, - PoseObservationType type) {} + PoseObservationType type, + Set usedTagIds // immutable per observation + ) {} - public static enum PoseObservationType { + enum PoseObservationType { MEGATAG_1, MEGATAG_2, PHOTONVISION } - public default void updateInputs(VisionIOInputs inputs) {} + default void updateInputs(VisionIOInputs inputs) {} } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java index a4b1eaf6..07c83c7b 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java @@ -96,7 +96,10 @@ public void updateInputs(VisionIOInputs inputs) { rawSample.value[9], // Observation type - PoseObservationType.MEGATAG_1)); + PoseObservationType.MEGATAG_1, + + // Visible Tag IDs + tagIds)); } for (var rawSample : megatag2Subscriber.readQueue()) { if (rawSample.value.length == 0) continue; @@ -121,7 +124,10 @@ public void updateInputs(VisionIOInputs inputs) { rawSample.value[9], // Observation type - PoseObservationType.MEGATAG_2)); + PoseObservationType.MEGATAG_2, + + // Visible Tag IDs + tagIds)); } // Save pose observations to inputs object diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java index f8d9cdd9..14a2867b 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java @@ -19,7 +19,7 @@ import java.util.Set; import org.photonvision.PhotonCamera; -/** IO implementation for real PhotonVision hardware. */ +/** IO implementation for real PhotonVision hardware (pose already solved by PV). */ public class VisionIOPhotonVision implements VisionIO { protected final PhotonCamera camera; protected final Transform3d robotToCamera; @@ -28,10 +28,10 @@ public class VisionIOPhotonVision implements VisionIO { * Creates a new VisionIOPhotonVision. * * @param name The configured name of the camera. - * @param rotationSupplier The 3D position of the camera relative to the robot. + * @param robotToCamera The 3D position of the camera relative to the robot. */ public VisionIOPhotonVision(String name, Transform3d robotToCamera) { - camera = new PhotonCamera(name); + this.camera = new PhotonCamera(name); this.robotToCamera = robotToCamera; } @@ -42,33 +42,30 @@ public void updateInputs(VisionIOInputs inputs) { // Cap the number of unread results processed per loop final int kMaxUnread = 5; - // Use HashSet/ArrayList to avoid LinkedList churn - Set tagIds = new HashSet<>(); - ArrayList poseObservations = new ArrayList<>(kMaxUnread); + final Set unionTagIds = new HashSet<>(); + final ArrayList poseObservations = new ArrayList<>(kMaxUnread); + + double newestTargetTs = Double.NEGATIVE_INFINITY; + Rotation2d bestYaw = Rotation2d.kZero; + Rotation2d bestPitch = Rotation2d.kZero; int processed = 0; for (var result : camera.getAllUnreadResults()) { // Hard cap - if (processed++ >= kMaxUnread) { - break; - } + if (processed++ >= kMaxUnread) break; + + final double ts = result.getTimestampSeconds(); - // Update latest target observation - if (result.hasTargets()) { - inputs.latestTargetObservation = - new TargetObservation( - Rotation2d.fromDegrees(result.getBestTarget().getYaw()), - Rotation2d.fromDegrees(result.getBestTarget().getPitch())); - } else { - inputs.latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); + if (result.hasTargets() && ts >= newestTargetTs) { + newestTargetTs = ts; + bestYaw = Rotation2d.fromDegrees(result.getBestTarget().getYaw()); + bestPitch = Rotation2d.fromDegrees(result.getBestTarget().getPitch()); } - // Add pose observation - if (result.multitagResult.isPresent()) { // Multitag result - var multitagResult = result.multitagResult.get(); + if (result.multitagResult.isPresent()) { + var multitag = result.multitagResult.get(); - // Calculate robot pose - Transform3d fieldToCamera = multitagResult.estimatedPose.best; + Transform3d fieldToCamera = multitag.estimatedPose.best; Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); @@ -77,64 +74,62 @@ public void updateInputs(VisionIOInputs inputs) { for (var target : result.targets) { totalTagDistance += target.bestCameraToTarget.getTranslation().getNorm(); } - - // Add tag IDs - tagIds.addAll(multitagResult.fiducialIDsUsed); - - // Guard against divide-by-zero if targets is empty (defensive) double avgTagDistance = result.targets.isEmpty() ? 0.0 : (totalTagDistance / result.targets.size()); - // Add observation + Set used = new HashSet<>(); + for (int id : multitag.fiducialIDsUsed) { + used.add(id); + unionTagIds.add(id); + } + poseObservations.add( new PoseObservation( - result.getTimestampSeconds(), // Timestamp - robotPose, // 3D pose estimate - multitagResult.estimatedPose.ambiguity, // Ambiguity - multitagResult.fiducialIDsUsed.size(), // Tag count - avgTagDistance, // Average tag distance - PoseObservationType.PHOTONVISION)); // Observation type - - } else if (!result.targets.isEmpty()) { // Single tag result + ts, + robotPose, + multitag.estimatedPose.ambiguity, + multitag.fiducialIDsUsed.size(), + avgTagDistance, + PoseObservationType.PHOTONVISION, + Set.copyOf(used))); + + } else if (!result.targets.isEmpty()) { var target = result.targets.get(0); - // Calculate robot pose var tagPose = aprilTagLayout.getTagPose(target.fiducialId); - if (tagPose.isPresent()) { - Transform3d fieldToTarget = - new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); - Transform3d cameraToTarget = target.bestCameraToTarget; - Transform3d fieldToCamera = fieldToTarget.plus(cameraToTarget.inverse()); - Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); - Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); - - // Add tag ID - tagIds.add((short) target.fiducialId); - - // Add observation - poseObservations.add( - new PoseObservation( - result.getTimestampSeconds(), // Timestamp - robotPose, // 3D pose estimate - target.poseAmbiguity, // Ambiguity - 1, // Tag count - cameraToTarget.getTranslation().getNorm(), // Average tag distance - PoseObservationType.PHOTONVISION)); // Observation type - } + if (tagPose.isEmpty()) continue; + + Transform3d fieldToTarget = + new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); + Transform3d cameraToTarget = target.bestCameraToTarget; + + Transform3d fieldToCamera = fieldToTarget.plus(cameraToTarget.inverse()); + Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); + Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); + + unionTagIds.add(target.fiducialId); + + poseObservations.add( + new PoseObservation( + ts, + robotPose, + target.poseAmbiguity, + 1, + cameraToTarget.getTranslation().getNorm(), + PoseObservationType.PHOTONVISION, + Set.of(target.fiducialId))); } } - // Save pose observations to inputs object - inputs.poseObservations = new PoseObservation[poseObservations.size()]; - for (int i = 0; i < poseObservations.size(); i++) { - inputs.poseObservations[i] = poseObservations.get(i); - } + inputs.latestTargetObservation = + (newestTargetTs > Double.NEGATIVE_INFINITY) + ? new TargetObservation(bestYaw, bestPitch) + : new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); - // Save tag IDs to inputs objects - inputs.tagIds = new int[tagIds.size()]; + inputs.poseObservations = poseObservations.toArray(new PoseObservation[0]); + + inputs.tagIds = new int[unionTagIds.size()]; int i = 0; - for (int id : tagIds) { - inputs.tagIds[i++] = id; - } + for (int id : unionTagIds) inputs.tagIds[i++] = id; } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java index fd4525b6..4e5761b9 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java @@ -22,34 +22,50 @@ public class VisionIOPhotonVisionSim extends VisionIOPhotonVision { private static VisionSystemSim visionSim; private final Supplier poseSupplier; + + @SuppressWarnings("unused") private final PhotonCameraSim cameraSim; /** * Creates a new VisionIOPhotonVisionSim. * - * @param name The name of the camera. - * @param poseSupplier Supplier for the robot pose to use in simulation. + * @param name The name of the camera (PhotonVision camera name). + * @param robotToCamera Camera pose relative to robot frame. + * @param poseSupplier Supplier for the robot pose (field->robot) to use in simulation. */ public VisionIOPhotonVisionSim( String name, Transform3d robotToCamera, Supplier poseSupplier) { super(name, robotToCamera); this.poseSupplier = poseSupplier; - // Initialize vision sim + // Initialize VisionSystemSim once for all cameras if (visionSim == null) { visionSim = new VisionSystemSim("main"); visionSim.addAprilTags(FieldConstants.aprilTagLayout); } - // Add sim camera + // Camera properties: + // - If you have per-camera SimCameraProperties in Constants, pass them here instead. + // - Otherwise keep the default and tune later. var cameraProperties = new SimCameraProperties(); + + // Recommended defaults (feel free to tune) + // cameraProperties.setCalibration(1280, 800, Rotation2d.fromDegrees(100)); + // cameraProperties.setFPS(20); + // cameraProperties.setAvgLatencyMs(35); + // cameraProperties.setLatencyStdDevMs(5); + cameraSim = new PhotonCameraSim(camera, cameraProperties); visionSim.addCamera(cameraSim, robotToCamera); } @Override public void updateInputs(VisionIOInputs inputs) { + // NOTE: This updates the sim world every time a sim camera is polled. + // That's fine (fast enough), but if you want “update once per loop,” see note below. visionSim.update(poseSupplier.get()); + + // Then pull results like normal (and emit PoseObservation + usedTagIds sets) super.updateInputs(inputs); } } diff --git a/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java new file mode 100644 index 00000000..df46efd2 --- /dev/null +++ b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java @@ -0,0 +1,164 @@ +// Copyright (c) 2025 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. + +package frc.robot.util; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.interpolation.Interpolatable; +import edu.wpi.first.math.interpolation.Interpolator; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.concurrent.ConcurrentNavigableMap; +import java.util.concurrent.ConcurrentSkipListMap; + +/** + * A concurrent version of WPIlib's TimeInterpolatableBuffer class to avoid the need for explicit + * synchronization in our robot code. + * + * @param The type stored in this buffer. + */ +public final class ConcurrentTimeInterpolatableBuffer { + private final double m_historySize; + private final Interpolator m_interpolatingFunc; + private final ConcurrentNavigableMap m_pastSnapshots = new ConcurrentSkipListMap<>(); + + private ConcurrentTimeInterpolatableBuffer( + Interpolator interpolateFunction, double historySizeSeconds) { + this.m_historySize = historySizeSeconds; + this.m_interpolatingFunc = interpolateFunction; + } + + /** + * Create a new TimeInterpolatableBuffer. + * + * @param interpolateFunction The function used to interpolate between values. + * @param historySizeSeconds The history size of the buffer. + * @param The type of data to store in the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static ConcurrentTimeInterpolatableBuffer createBuffer( + Interpolator interpolateFunction, double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>(interpolateFunction, historySizeSeconds); + } + + /** + * Create a new TimeInterpolatableBuffer that stores a given subclass of {@link Interpolatable}. + * + * @param historySizeSeconds The history size of the buffer. + * @param The type of {@link Interpolatable} to store in the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static > ConcurrentTimeInterpolatableBuffer createBuffer( + double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>( + Interpolatable::interpolate, historySizeSeconds); + } + + /** + * Create a new TimeInterpolatableBuffer to store Double values. + * + * @param historySizeSeconds The history size of the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static ConcurrentTimeInterpolatableBuffer createDoubleBuffer( + double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>(MathUtil::interpolate, historySizeSeconds); + } + + /** + * Add a sample to the buffer. + * + * @param timeSeconds The timestamp of the sample. + * @param sample The sample object. + */ + public void addSample(double timeSeconds, T sample) { + m_pastSnapshots.put(timeSeconds, sample); + cleanUp(timeSeconds); + } + + /** + * Removes samples older than our current history size. + * + * @param time The current timestamp. + */ + private void cleanUp(double time) { + m_pastSnapshots.headMap(time - m_historySize, false).clear(); + } + + /** Clear all old samples. */ + public void clear() { + m_pastSnapshots.clear(); + } + + /** + * Sample the buffer at the given time. If the buffer is empty, an empty Optional is returned. + * + * @param timeSeconds The time at which to sample. + * @return The interpolated value at that timestamp or an empty Optional. + */ + public Optional getSample(double timeSeconds) { + if (m_pastSnapshots.isEmpty()) { + return Optional.empty(); + } + + // Special case for when the requested time is the same as a sample + var nowEntry = m_pastSnapshots.get(timeSeconds); + if (nowEntry != null) { + return Optional.of(nowEntry); + } + + var bottomBound = m_pastSnapshots.floorEntry(timeSeconds); + var topBound = m_pastSnapshots.ceilingEntry(timeSeconds); + + // Return null if neither sample exists, and the opposite bound if the other is + // null + if (topBound == null && bottomBound == null) { + return Optional.empty(); + } else if (topBound == null) { + return Optional.of(bottomBound.getValue()); + } else if (bottomBound == null) { + return Optional.of(topBound.getValue()); + } else { + // Otherwise, interpolate. Because T is between [0, 1], we want the ratio of + // (the difference + // between the current time and bottom bound) and (the difference between top + // and bottom + // bounds). + return Optional.of( + m_interpolatingFunc.interpolate( + bottomBound.getValue(), + topBound.getValue(), + (timeSeconds - bottomBound.getKey()) / (topBound.getKey() - bottomBound.getKey()))); + } + } + + public Entry getLatest() { + return m_pastSnapshots.lastEntry(); + } + + /** + * Grant access to the internal sample buffer. Used in Pose Estimation to replay odometry inputs + * stored within this buffer. + * + * @return The internal sample buffer. + */ + public ConcurrentNavigableMap getInternalBuffer() { + return m_pastSnapshots; + } +} diff --git a/src/main/java/frc/robot/util/GeomUtil.java b/src/main/java/frc/robot/util/GeomUtil.java deleted file mode 100644 index f77c8b2f..00000000 --- a/src/main/java/frc/robot/util/GeomUtil.java +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright (c) 2024-2026 Az-FIRST -// http://github.com/AZ-First -// Copyright (c) 2021-2026 Littleton Robotics -// http://github.com/Mechanical-Advantage -// -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. - -package frc.robot.util; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.geometry.Twist2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; - -/** Geometry utilities for working with translations, rotations, transforms, and poses. */ -public class GeomUtil { - /** - * Creates a pure translating transform - * - * @param translation The translation to create the transform with - * @return The resulting transform - */ - public static Transform2d toTransform2d(Translation2d translation) { - return new Transform2d(translation, Rotation2d.kZero); - } - - /** - * Creates a pure translating transform - * - * @param x The x coordinate of the translation - * @param y The y coordinate of the translation - * @return The resulting transform - */ - public static Transform2d toTransform2d(double x, double y) { - return new Transform2d(x, y, Rotation2d.kZero); - } - - /** - * Creates a pure rotating transform - * - * @param rotation The rotation to create the transform with - * @return The resulting transform - */ - public static Transform2d toTransform2d(Rotation2d rotation) { - return new Transform2d(Translation2d.kZero, rotation); - } - - /** - * Converts a Pose2d to a Transform2d to be used in a kinematic chain - * - * @param pose The pose that will represent the transform - * @return The resulting transform - */ - public static Transform2d toTransform2d(Pose2d pose) { - return new Transform2d(pose.getTranslation(), pose.getRotation()); - } - - public static Pose2d inverse(Pose2d pose) { - Rotation2d rotationInverse = pose.getRotation().unaryMinus(); - return new Pose2d( - pose.getTranslation().unaryMinus().rotateBy(rotationInverse), rotationInverse); - } - - /** - * Converts a Transform2d to a Pose2d to be used as a position or as the start of a kinematic - * chain - * - * @param transform The transform that will represent the pose - * @return The resulting pose - */ - public static Pose2d toPose2d(Transform2d transform) { - return new Pose2d(transform.getTranslation(), transform.getRotation()); - } - - /** - * Creates a pure translated pose - * - * @param translation The translation to create the pose with - * @return The resulting pose - */ - public static Pose2d toPose2d(Translation2d translation) { - return new Pose2d(translation, Rotation2d.kZero); - } - - /** - * Creates a pure rotated pose - * - * @param rotation The rotation to create the pose with - * @return The resulting pose - */ - public static Pose2d toPose2d(Rotation2d rotation) { - return new Pose2d(Translation2d.kZero, rotation); - } - - /** - * Multiplies a twist by a scaling factor - * - * @param twist The twist to multiply - * @param factor The scaling factor for the twist components - * @return The new twist - */ - public static Twist2d multiply(Twist2d twist, double factor) { - return new Twist2d(twist.dx * factor, twist.dy * factor, twist.dtheta * factor); - } - - /** - * Converts a Pose3d to a Transform3d to be used in a kinematic chain - * - * @param pose The pose that will represent the transform - * @return The resulting transform - */ - public static Transform3d toTransform3d(Pose3d pose) { - return new Transform3d(pose.getTranslation(), pose.getRotation()); - } - - /** - * Converts a Transform3d to a Pose3d to be used as a position or as the start of a kinematic - * chain - * - * @param transform The transform that will represent the pose - * @return The resulting pose - */ - public static Pose3d toPose3d(Transform3d transform) { - return new Pose3d(transform.getTranslation(), transform.getRotation()); - } - - /** - * Converts a ChassisSpeeds to a Twist2d by extracting two dimensions (Y and Z). chain - * - * @param speeds The original translation - * @return The resulting translation - */ - public static Twist2d toTwist2d(ChassisSpeeds speeds) { - return new Twist2d( - speeds.vxMetersPerSecond, speeds.vyMetersPerSecond, speeds.omegaRadiansPerSecond); - } - - /** - * Creates a new pose from an existing one using a different translation value. - * - * @param pose The original pose - * @param translation The new translation to use - * @return The new pose with the new translation and original rotation - */ - public static Pose2d withTranslation(Pose2d pose, Translation2d translation) { - return new Pose2d(translation, pose.getRotation()); - } - - /** - * Creates a new pose from an existing one using a different rotation value. - * - * @param pose The original pose - * @param rotation The new rotation to use - * @return The new pose with the original translation and new rotation - */ - public static Pose2d withRotation(Pose2d pose, Rotation2d rotation) { - return new Pose2d(pose.getTranslation(), rotation); - } -} From 330b07d967196e7d1cd82b5ba112682add402def Mon Sep 17 00:00:00 2001 From: Timothy Ellsworth Bowers Date: Tue, 3 Feb 2026 18:03:44 -0700 Subject: [PATCH 02/19] Code Hygiene (#139) * Clean up the Accelerometer / IMU subsystems -- Clean up the code, but also specifically allow the RIO or IMU to be "on its side" (i.e., Z is not necessarily "up"). * Clean up Drive.java * Clean the Moudule modules a bit * Clean non-ASCII characters * Reduce drivebase periodic cycle time by using primitives * Clean up the Vision module --- src/main/java/frc/robot/Constants.java | 25 +- src/main/java/frc/robot/RobotContainer.java | 23 +- .../accelerometer/Accelerometer.java | 82 +--- .../frc/robot/subsystems/drive/Drive.java | 453 ++++++++++-------- .../subsystems/drive/DriveSimPhysics.java | 5 +- .../frc/robot/subsystems/drive/Module.java | 38 +- .../frc/robot/subsystems/drive/ModuleIO.java | 1 + .../subsystems/drive/ModuleIOTalonFX.java | 74 ++- .../subsystems/drive/SwerveConstants.java | 5 +- .../java/frc/robot/subsystems/imu/Imu.java | 12 +- .../java/frc/robot/subsystems/imu/ImuIO.java | 11 +- .../frc/robot/subsystems/imu/ImuIONavX.java | 53 +- .../robot/subsystems/imu/ImuIOPigeon2.java | 44 +- .../frc/robot/subsystems/imu/ImuIOSim.java | 26 +- .../vision/CameraSweepEvaluator.java | 6 +- .../frc/robot/subsystems/vision/Vision.java | 121 +++-- .../frc/robot/subsystems/vision/VisionIO.java | 2 +- .../vision/VisionIOPhotonVisionSim.java | 2 +- 18 files changed, 519 insertions(+), 464 deletions(-) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 0b39dcc5..7d30eaed 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -45,6 +45,7 @@ import frc.robot.util.RBSIEnum.SwerveType; import frc.robot.util.RBSIEnum.VisionType; import frc.robot.util.RobotDeviceId; +import java.util.Set; import org.photonvision.simulation.SimCameraProperties; import swervelib.math.Matter; @@ -126,19 +127,18 @@ public static final class RobotConstants { // Insert here the orientation (CCW == +) of the Rio and IMU from the robot // An angle of "0." means the x-y-z markings on the device match the robot's intrinsic reference // frame. - // NOTE: It is assumed that both the Rio and the IMU are mounted such that +Z is UP - public static final Rotation2d kRioOrientation = + public static final Rotation3d kRioOrientation = switch (getRobot()) { - case COMPBOT -> Rotation2d.fromDegrees(-90.); - case DEVBOT -> Rotation2d.fromDegrees(0.); - default -> Rotation2d.fromDegrees(0.); + case COMPBOT -> new Rotation3d(0, 0, -90); + case DEVBOT -> Rotation3d.kZero; + default -> Rotation3d.kZero; }; // IMU can be one of Pigeon2 or NavX - public static final Rotation2d kIMUOrientation = + public static final Rotation3d kIMUOrientation = switch (getRobot()) { - case COMPBOT -> Rotation2d.fromDegrees(0.); - case DEVBOT -> Rotation2d.fromDegrees(0.); - default -> Rotation2d.fromDegrees(0.); + case COMPBOT -> Rotation3d.kZero; + case DEVBOT -> Rotation3d.kZero; + default -> Rotation3d.kZero; }; } @@ -324,6 +324,9 @@ public static final class DrivebaseConstants { SwerveConstants.kDriveGearRatio / DCMotor.getKrakenX60Foc(1).KtNMPerAmp; public static final double kSteerP = 400.0; public static final double kSteerD = 20.0; + + // Odometry-related constants + public static final double kHistorySize = 1.5; // seconds } /************************************************************************* */ @@ -416,8 +419,8 @@ public static final class AutoConstants { /** Vision Constants (Assuming PhotonVision) ***************************** */ public static class VisionConstants { - public static final java.util.Set kTrustedTags = - java.util.Set.of(2, 3, 4, 5, 8, 9, 10, 11, 18, 19, 20, 21, 24, 25, 26, 27); // HUB AprilTags + public static final Set kTrustedTags = + Set.of(2, 3, 4, 5, 8, 9, 10, 11, 18, 19, 20, 21, 24, 25, 26, 27); // HUB AprilTags // Noise scaling factors (lower = more trusted) public static final double kTrustedTagStdDevScale = 0.6; // 40% more weight diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 9c5a167a..521e09d5 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -3,9 +3,15 @@ // Copyright (c) 2021-2026 Littleton Robotics // http://github.com/Mechanical-Advantage // -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. // // Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of @@ -34,6 +40,7 @@ import edu.wpi.first.wpilibj2.command.button.RobotModeTriggers; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.Constants.CANBuses; +import frc.robot.Constants.Cameras; import frc.robot.Constants.OperatorConstants; import frc.robot.FieldConstants.AprilTagLayoutType; import frc.robot.commands.AutopilotCommands; @@ -98,6 +105,7 @@ public class RobotContainer { // These are "Virtual Subsystems" that report information but have no motors private final Imu m_imu; + private final Vision m_vision; @SuppressWarnings("unused") private final Accelerometer m_accel; @@ -105,9 +113,6 @@ public class RobotContainer { @SuppressWarnings("unused") private final RBSIPowerMonitor m_power; - @SuppressWarnings("unused") - private final Vision m_vision; - @SuppressWarnings("unused") private List canHealth; @@ -167,7 +172,7 @@ public RobotContainer() { m_flywheel = new Flywheel(new FlywheelIOSim()); // ---------------- Vision IOs (robot code) ---------------- - var cams = frc.robot.Constants.Cameras.ALL; + var cams = Cameras.ALL; m_vision = new Vision( m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsSim(m_drivebase)); @@ -520,12 +525,12 @@ private void definesysIdRoutines() { private VisionIO[] buildVisionIOsReal(Drive drive) { return switch (Constants.getVisionType()) { case PHOTON -> - java.util.Arrays.stream(Constants.Cameras.ALL) + Arrays.stream(Constants.Cameras.ALL) .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) .toArray(VisionIO[]::new); case LIMELIGHT -> - java.util.Arrays.stream(Constants.Cameras.ALL) + Arrays.stream(Constants.Cameras.ALL) .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) .toArray(VisionIO[]::new); diff --git a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java index 3bfc5c1f..fcb7dd6c 100644 --- a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java +++ b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java @@ -13,6 +13,7 @@ package frc.robot.subsystems.accelerometer; +import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.wpilibj.Timer; import frc.robot.Constants; import frc.robot.Constants.RobotConstants; @@ -26,30 +27,20 @@ *

This virtual subsystem pulls the acceleration values from both the RoboRIO and the swerve's * IMU (either Pigeon2 or NavX) and logs them to both AdvantageKitd. In addition to the * accelerations, the jerk (a-dot or x-tripple-dot) is computed from the delta accelerations. - * - *

Primitive-only hot path: no WPILib geometry objects or Units objects. */ public class Accelerometer extends VirtualSubsystem { + + // Gravitational acceleration private static final double G_TO_MPS2 = 9.80665; + // Define hardware interfaces private final RioAccelIO rio; private final RioAccelIO.Inputs rioInputs = new RioAccelIO.Inputs(); private final Imu imu; - // Precompute yaw-only rotation terms - private static final double rioCos = Math.cos(RobotConstants.kRioOrientation.getRadians()); - private static final double rioSin = Math.sin(RobotConstants.kRioOrientation.getRadians()); - private static final double imuCos = Math.cos(RobotConstants.kIMUOrientation.getRadians()); - private static final double imuSin = Math.sin(RobotConstants.kIMUOrientation.getRadians()); - - // Previous Rio accel (m/s^2) - private double prevRioAx = 0.0, prevRioAy = 0.0, prevRioAz = 0.0; - - // Reusable arrays for logging - private final double[] rioAccelArr = new double[3]; - private final double[] rioJerkArr = new double[3]; - private final double[] imuAccelArr = new double[3]; - private final double[] imuJerkArr = new double[3]; + // Variables needed during the periodic + private Translation3d rawRio, rioAcc, rioJerk, imuAcc, imuJerk; + private Translation3d prevRioAcc = Translation3d.kZero; // Log decimation private int loopCount = 0; @@ -74,58 +65,27 @@ public void rbsiPeriodic() { rio.updateInputs(rioInputs); // Compute RIO accelerations and jerks - final double rawX = rioInputs.xG; - final double rawY = rioInputs.yG; - final double rawZ = rioInputs.zG; - - final double rioAx = (rioCos * rawX - rioSin * rawY) * G_TO_MPS2; - final double rioAy = (rioSin * rawX + rioCos * rawY) * G_TO_MPS2; - final double rioAz = rawZ * G_TO_MPS2; - - final double rioJx = (rioAx - prevRioAx) / Constants.loopPeriodSecs; - final double rioJy = (rioAy - prevRioAy) / Constants.loopPeriodSecs; - final double rioJz = (rioAz - prevRioAz) / Constants.loopPeriodSecs; + rawRio = new Translation3d(rioInputs.xG, rioInputs.yG, rioInputs.zG); + rioAcc = rawRio.rotateBy(RobotConstants.kRioOrientation).times(G_TO_MPS2); // Acceleration from previous loop - prevRioAx = rioAx; - prevRioAy = rioAy; - prevRioAz = rioAz; - - // IMU rotate is also compute-only (already primitives) - final double imuAx = (imuCos * imuInputs.linearAccelX - imuSin * imuInputs.linearAccelY); - final double imuAy = (imuSin * imuInputs.linearAccelX + imuCos * imuInputs.linearAccelY); - final double imuAz = imuInputs.linearAccelZ; - - final double imuJx = (imuCos * imuInputs.jerkX - imuSin * imuInputs.jerkY); - final double imuJy = (imuSin * imuInputs.jerkX + imuCos * imuInputs.jerkY); - final double imuJz = imuInputs.jerkZ; - - // Fill accel arrays (still math) - rioAccelArr[0] = rioAx; - rioAccelArr[1] = rioAy; - rioAccelArr[2] = rioAz; - imuAccelArr[0] = imuAx; - imuAccelArr[1] = imuAy; - imuAccelArr[2] = imuAz; + prevRioAcc = rioAcc; - final boolean doHeavyLogs = (++loopCount >= LOG_EVERY_N); - if (doHeavyLogs) { - loopCount = 0; - rioJerkArr[0] = rioJx; - rioJerkArr[1] = rioJy; - rioJerkArr[2] = rioJz; - imuJerkArr[0] = imuJx; - imuJerkArr[1] = imuJy; - imuJerkArr[2] = imuJz; - } + // IMU accelerations and jerks + imuAcc = imuInputs.linearAccel.rotateBy(RobotConstants.kIMUOrientation); // Logging - Logger.recordOutput("Accel/Rio/Accel_mps2", rioAccelArr); - Logger.recordOutput("Accel/IMU/Accel_mps2", imuAccelArr); + Logger.recordOutput("Accel/Rio/Accel_mps2", rioAcc); + Logger.recordOutput("Accel/IMU/Accel_mps2", imuAcc); + // Every N loops, compute and log the Jerk + final boolean doHeavyLogs = (++loopCount >= LOG_EVERY_N); if (doHeavyLogs) { - Logger.recordOutput("Accel/Rio/Jerk_mps3", rioJerkArr); - Logger.recordOutput("Accel/IMU/Jerk_mps3", imuJerkArr); + loopCount = 0; + rioJerk = rioAcc.minus(prevRioAcc).div(Constants.loopPeriodSecs); + imuJerk = imuInputs.linearJerk.rotateBy(RobotConstants.kIMUOrientation); + Logger.recordOutput("Accel/Rio/Jerk_mps3", rioJerk); + Logger.recordOutput("Accel/IMU/Jerk_mps3", imuJerk); final double[] ts = imuInputs.odometryYawTimestamps; if (ts.length > 0) { diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 46788c2b..aeb71e65 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -60,25 +60,33 @@ import org.littletonrobotics.junction.AutoLogOutput; import org.littletonrobotics.junction.Logger; +/** + * Drive subsystem (RBSISubsystem) + * + *

The Drive subsystem controls the individual swerve Modules and owns the odometry of the robot. + * The odometry is updated from both the swerve modules and (optionally) the vision subsystem. + */ public class Drive extends RBSISubsystem { - // --- buffers for time-aligned pose + yaw + yawRate history --- - private final ConcurrentTimeInterpolatableBuffer poseBuffer = - frc.robot.util.ConcurrentTimeInterpolatableBuffer.createBuffer(1.5); + // Declare Hardware + private final Imu imu; + private final Module[] modules = new Module[4]; // FL, FR, BL, BR + private final SysIdRoutine sysId; + // Buffers for necessary things + private final ConcurrentTimeInterpolatableBuffer poseBuffer = + ConcurrentTimeInterpolatableBuffer.createBuffer(DrivebaseConstants.kHistorySize); private final ConcurrentTimeInterpolatableBuffer yawBuffer = - frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); - + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(DrivebaseConstants.kHistorySize); private final ConcurrentTimeInterpolatableBuffer yawRateBuffer = - frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(DrivebaseConstants.kHistorySize); - static final Lock odometryLock = new ReentrantLock(); - private final Imu imu; - private final Module[] modules = new Module[4]; // FL, FR, BL, BR - private final SysIdRoutine sysId; + // Declare an alert private final Alert gyroDisconnectedAlert = new Alert("Disconnected gyro, using kinematics as fallback.", AlertType.kError); + // Declare odometry and pose-related variables + static final Lock odometryLock = new ReentrantLock(); private SwerveDriveKinematics kinematics = new SwerveDriveKinematics(getModuleTranslations()); private SwerveModulePosition[] lastModulePositions = // For delta tracking new SwerveModulePosition[] { @@ -87,15 +95,24 @@ public class Drive extends RBSISubsystem { new SwerveModulePosition(), new SwerveModulePosition() }; - private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; + private final SwerveModulePosition[] odomPositions = { + new SwerveModulePosition(), + new SwerveModulePosition(), + new SwerveModulePosition(), + new SwerveModulePosition() + }; private SwerveDrivePoseEstimator m_PoseEstimator = new SwerveDrivePoseEstimator(kinematics, Rotation2d.kZero, lastModulePositions, Pose2d.kZero); + // Declare PID controller and siumulation physics private ProfiledPIDController angleController; - private DriveSimPhysics simPhysics; - // Constructor + // Pose reset gate (vision + anything latency-sensitive) + private volatile long poseResetEpoch = 0; // monotonic counter + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; + + /** Constructor */ public Drive(Imu imu) { this.imu = imu; @@ -151,8 +168,8 @@ public Drive(Imu imu) { default: throw new RuntimeException("Invalid Swerve Drive Type"); } - // Start odometry thread (for the real robot) + // Start odometry thread (for the real robot) PhoenixOdometryThread.getInstance().start(); } else { @@ -211,12 +228,16 @@ public Drive(Imu imu) { break; case CHOREO: - // TODO: Probably need to add something here for Choreo autonomous path building + // TODO: If your team is using Choreo, you'll know what to do here... + break; + + case MANUAL: + // Nothing to be done for MANUAL; may just use AutoPilot break; default: } - // Configure SysId + // Configure SysId for drivebase characterization sysId = new SysIdRoutine( new SysIdRoutine.Config( @@ -228,126 +249,188 @@ public Drive(Imu imu) { (voltage) -> runCharacterization(voltage.in(Volts)), null, this)); } + /************************************************************************* */ /** Periodic function that is called each robot cycle by the command scheduler */ @Override public void rbsiPeriodic() { odometryLock.lock(); try { - // Get the IMU inputs + // Ensure IMU inputs are fresh for this cycle final var imuInputs = imu.getInputs(); // Stop modules & log empty setpoint states if disabled if (DriverStation.isDisabled()) { - for (var module : modules) module.stop(); + for (var module : modules) { + module.stop(); + } Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); } - // Module periodic updates, which drains queues this cycle - for (var module : modules) module.periodic(); + // Drain per-module odometry queues for this cycle + for (var module : modules) { + module.periodic(); + } - // Feed historical samples into odometry if REAL robot + // -------- REAL ODOMETRY REPLAY (canonical timestamps from module[0]) -------- if (Constants.getMode() != Mode.SIM) { + final double[] ts = modules[0].getOdometryTimestamps(); + final int n = (ts == null) ? 0 : ts.length; - // ---- Gather per-module histories ---- - final double[][] perModuleTs = new double[4][]; - final SwerveModulePosition[][] perModulePos = new SwerveModulePosition[4][]; - int unionCap = 0; - + // Cache per-module histories ONCE (avoid repeated getters in the loop) + final SwerveModulePosition[][] modHist = new SwerveModulePosition[4][]; for (int m = 0; m < 4; m++) { - perModuleTs[m] = modules[m].getOdometryTimestamps(); - perModulePos[m] = modules[m].getOdometryPositions(); - unionCap += perModuleTs[m].length; + modHist[m] = modules[m].getOdometryPositions(); } - if (unionCap == 0) { + // Determine yaw queue availability + final boolean hasYawQueue = + imuInputs.connected + && imuInputs.odometryYawTimestamps != null + && imuInputs.odometryYawPositionsRad != null + && imuInputs.odometryYawTimestamps.length + == imuInputs.odometryYawPositionsRad.length + && imuInputs.odometryYawTimestamps.length > 0; + + final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; + final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; + + // If we have no module samples, still keep yaw buffers “alive” for gating callers + if (n == 0) { + final double now = Timer.getFPGATimestamp(); + yawBuffer.addSample(now, imuInputs.yawPositionRad); + yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); + gyroDisconnectedAlert.set(!imuInputs.connected); return; } - // ---- Fill yaw buffer from IMU odometry samples (preferred) ---- - // These timestamps are authoritative for yaw interpolation and yaw-rate gating. - if (imuInputs.connected - && imuInputs.odometryYawTimestamps != null - && imuInputs.odometryYawPositionsRad != null - && imuInputs.odometryYawTimestamps.length == imuInputs.odometryYawPositionsRad.length - && imuInputs.odometryYawTimestamps.length > 0) { - - final double[] yts = imuInputs.odometryYawTimestamps; - final double[] yaws = imuInputs.odometryYawPositionsRad; - - for (int i = 0; i < yts.length; i++) { - yawBuffer.addSample(yts[i], yaws[i]); - if (i > 0) { - double dt = yts[i] - yts[i - 1]; + // Decide whether yaw queue is index-aligned with module[0] timestamps. + // We only trust index alignment if BOTH: + // - yaw has at least n samples + // - yawTs[i] ~= ts[i] for i in range (tight epsilon) + boolean yawIndexAligned = false; + if (hasYawQueue && yawTs.length >= n) { + yawIndexAligned = true; + final double eps = 1e-3; // 1 ms + for (int i = 0; i < n; i++) { + if (Math.abs(yawTs[i] - ts[i]) > eps) { + yawIndexAligned = false; + break; + } + } + } + + // If yaw is NOT index-aligned but we have a yaw queue, build yaw/yawRate buffers ONCE. + if (hasYawQueue && !yawIndexAligned) { + for (int k = 0; k < yawTs.length; k++) { + yawBuffer.addSample(yawTs[k], yawPos[k]); + if (k > 0) { + final double dt = yawTs[k] - yawTs[k - 1]; if (dt > 1e-6) { - yawRateBuffer.addSample(yts[i], (yaws[i] - yaws[i - 1]) / dt); + yawRateBuffer.addSample(yawTs[k], (yawPos[k] - yawPos[k - 1]) / dt); } } } - } else { - // fallback: buffer "now" yaw only - double now = Timer.getFPGATimestamp(); + } + + // If NO yaw queue, add a single “now” sample once (don’t do this per replay sample) + if (!hasYawQueue) { + final double now = Timer.getFPGATimestamp(); yawBuffer.addSample(now, imuInputs.yawPositionRad); yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); } - // ---- Build union timeline ---- - final double[] unionTs = new double[Math.max(unionCap, 1)]; - final double EPS = 1e-4; // 0.1 ms - final int unionN = buildUnionTimeline(perModuleTs, unionTs, EPS); - - // ---- Replay at union times ---- - for (int i = 0; i < unionN; i++) { - final double t = unionTs[i]; + // Replay each odometry sample + for (int i = 0; i < n; i++) { + final double t = ts[i]; - // Interpolate each module to union time + // Build module positions at this sample index (clamp defensively) for (int m = 0; m < 4; m++) { - SwerveModulePosition p = interpolateModulePosition(perModuleTs[m], perModulePos[m], t); - odomPositions[m] = p; + final SwerveModulePosition[] hist = modHist[m]; + if (hist == null || hist.length == 0) { + odomPositions[m] = modules[m].getPosition(); + } else if (i < hist.length) { + odomPositions[m] = hist[i]; + } else { + odomPositions[m] = hist[hist.length - 1]; + } } - // Interpolate yaw at union time - final double yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); + // Determine yaw at this timestamp + double yawRad = imuInputs.yawPositionRad; // fallback + + if (hasYawQueue) { + if (yawIndexAligned) { + yawRad = yawPos[i]; + + // Keep yaw/yawRate buffers updated in odometry timebase (good for yaw-gate) + yawBuffer.addSample(t, yawRad); + if (i > 0) { + final double dt = yawTs[i] - yawTs[i - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(t, (yawPos[i] - yawPos[i - 1]) / dt); + } + } + } else { + // yawBuffer was pre-filled above; interpolate here + yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); + } + } + // Feed estimator at this historical timestamp m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); - // keep buffer in the same timebase as estimator + // Maintain pose history in SAME timebase as estimator poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); } Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); + gyroDisconnectedAlert.set(!imuInputs.connected); + return; } - gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); + // SIMULATION: Keep sim pose buffer time-aligned, too + double now = Timer.getFPGATimestamp(); + poseBuffer.addSample(now, simPhysics.getPose()); + yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); + yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); + + Logger.recordOutput("Drive/Pose", simPhysics.getPose()); + gyroDisconnectedAlert.set(false); } finally { odometryLock.unlock(); } } - /** Simulation Periodic Method */ + /** + * Simulation Periodic Method + * + *

This function runs only for simulation, but does similar processing to the REAL periodic + * function. Instead of reading back what the modules actually say, use physics to predict where + * the module would have gone. + */ @Override public void simulationPeriodic() { final double dt = Constants.loopPeriodSecs; - // 1) Advance module wheel physics + // Advance module wheel physics for (int i = 0; i < modules.length; i++) { modules[i].simulationPeriodic(); } - // 2) Get module states from modules (authoritative) - NO STREAMS + // Get module states from modules final SwerveModuleState[] moduleStates = new SwerveModuleState[modules.length]; for (int i = 0; i < modules.length; i++) { moduleStates[i] = modules[i].getState(); } - // 3) Update SIM physics (linear + angular) + // Update SIM physics (linear & angular motion of the robot) simPhysics.update(moduleStates, dt); - // 4) Feed IMU from authoritative physics (primitive-only boundary) - final double yawRad = - simPhysics.getYaw().getRadians(); // or simPhysics.getYawRad() if you have it + // Feed the simulated IMU from authoritative physics + final double yawRad = simPhysics.getYaw().getRadians(); final double omegaRadPerSec = simPhysics.getOmegaRadPerSec(); final double ax = simPhysics.getLinearAccel().getX(); @@ -357,17 +440,15 @@ public void simulationPeriodic() { imu.simulationSetOmegaRadPerSec(omegaRadPerSec); imu.simulationSetLinearAccelMps2(ax, ay, 0.0); - // 5) Feed PoseEstimator with authoritative yaw and module positions - // (PoseEstimator still wants objects -> boundary conversion stays here) + // Feed PoseEstimator with authoritative yaw and module positions final SwerveModulePosition[] modulePositions = new SwerveModulePosition[modules.length]; for (int i = 0; i < modules.length; i++) { modulePositions[i] = modules[i].getPosition(); } - m_PoseEstimator.resetPosition( Rotation2d.fromRadians(yawRad), modulePositions, simPhysics.getPose()); - // 6) Optional: inject vision measurement in SIM + // If simulated vision available, inject vision measurement if (simulatedVisionAvailable) { final Pose2d visionPose = getSimulatedVisionPose(); final double visionTimestamp = Timer.getFPGATimestamp(); @@ -377,7 +458,7 @@ public void simulationPeriodic() { poseBuffer.addSample(Timer.getFPGATimestamp(), simPhysics.getPose()); - // 7) Logging + // Logging Logger.recordOutput("Sim/Pose", simPhysics.getPose()); Logger.recordOutput("Sim/YawRad", yawRad); Logger.recordOutput("Sim/OmegaRadPerSec", simPhysics.getOmegaRadPerSec()); @@ -399,7 +480,7 @@ public void setMotorBrake(boolean brake) { } } - /** Stops the drive. */ + /** Stop the drive. */ public void stop() { runVelocity(new ChassisSpeeds()); } @@ -513,37 +594,6 @@ public Pose2d getPose() { return m_PoseEstimator.getEstimatedPosition(); } - /** Returns interpolated odometry pose at a given timestamp. */ - public Optional getPoseAtTime(double timestampSeconds) { - return poseBuffer.getSample(timestampSeconds); - } - - /** Adds a vision measurement safely into the PoseEstimator. */ - public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { - odometryLock.lock(); - try { - m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); - } finally { - odometryLock.unlock(); - } - } - - /** Max abs yaw rate over [t0, t1] using buffered yaw-rate history (no streams). */ - public OptionalDouble getMaxAbsYawRateRadPerSec(double t0, double t1) { - if (t1 < t0) return OptionalDouble.empty(); - var sub = yawRateBuffer.getInternalBuffer().subMap(t0, true, t1, true); - if (sub.isEmpty()) return OptionalDouble.empty(); - - double maxAbs = 0.0; - boolean any = false; - for (double v : sub.values()) { - any = true; - double a = Math.abs(v); - if (a > maxAbs) maxAbs = a; - } - return any ? OptionalDouble.of(maxAbs) : OptionalDouble.empty(); - } - /** Returns the current odometry rotation. */ @AutoLogOutput(key = "Odometry/Yaw") public Rotation2d getHeading() { @@ -553,27 +603,8 @@ public Rotation2d getHeading() { return imu.getYaw(); } - /** Returns an array of module translations. */ - public static Translation2d[] getModuleTranslations() { - return new Translation2d[] { - new Translation2d(kFLXPosMeters, kFLYPosMeters), - new Translation2d(kFRXPosMeters, kFRYPosMeters), - new Translation2d(kBLXPosMeters, kBLYPosMeters), - new Translation2d(kBRXPosMeters, kBRYPosMeters) - }; - } - - /** Returns the position of each module in radians. */ - public double[] getWheelRadiusCharacterizationPositions() { - double[] values = new double[4]; - for (int i = 0; i < 4; i++) { - values[i] = modules[i].getWheelRadiusCharacterizationPosition(); - } - return values; - } - /** - * Returns the measured chassis speeds in FIELD coordinates. + * Returns the measured chassis speeds of the modules in FIELD coordinates. * *

+X = field forward +Y = field left CCW+ = counterclockwise */ @@ -581,7 +612,6 @@ public double[] getWheelRadiusCharacterizationPositions() { public ChassisSpeeds getFieldRelativeSpeeds() { // Robot-relative measured speeds from modules ChassisSpeeds robotRelative = getChassisSpeeds(); - // Convert to field-relative using authoritative yaw return ChassisSpeeds.fromRobotRelativeSpeeds(robotRelative, getHeading()); } @@ -597,13 +627,45 @@ public Translation2d getFieldLinearVelocity() { return new Translation2d(fieldSpeeds.vxMetersPerSecond, fieldSpeeds.vyMetersPerSecond); } - /** Returns the average velocity of the modules in rotations/sec (Phoenix native units). */ - public double getFFCharacterizationVelocity() { - double output = 0.0; - for (int i = 0; i < 4; i++) { - output += modules[i].getFFCharacterizationVelocity() / 4.0; + /** Returns interpolated odometry pose at a given timestamp. */ + public Optional getPoseAtTime(double timestampSeconds) { + return poseBuffer.getSample(timestampSeconds); + } + + /** + * Max abs yaw rate over [t0, t1] using buffered yaw-rate history + * + * @param t0 Interval start + * @param t1 interval end + * @return Maximum yaw rate + */ + public OptionalDouble getMaxAbsYawRateRadPerSec(double t0, double t1) { + // If end before start, return empty + if (t1 < t0) return OptionalDouble.empty(); + + // Get the subset of entries from the buffer + var sub = yawRateBuffer.getInternalBuffer().subMap(t0, true, t1, true); + if (sub.isEmpty()) return OptionalDouble.empty(); + + double maxAbs = 0.0; + boolean any = false; + for (double v : sub.values()) { + any = true; + double a = Math.abs(v); + if (a > maxAbs) maxAbs = a; } - return output; + // Return a value if there's anything to report, else empty + return any ? OptionalDouble.of(maxAbs) : OptionalDouble.empty(); + } + + /** Get the last EPOCH of a pose reset */ + public long getPoseResetEpoch() { + return poseResetEpoch; + } + + /** Get the last TIMESTAMP of a pose reset */ + public double getLastPoseResetTimestamp() { + return lastPoseResetTimestamp; } /** Returns the maximum linear speed in meters per sec. */ @@ -626,6 +688,34 @@ public double getMaxAngularAccelRadPerSecPerSec() { return getMaxLinearAccelMetersPerSecPerSec() / kDriveBaseRadiusMeters; } + /** Returns an array of module translations. */ + public static Translation2d[] getModuleTranslations() { + return new Translation2d[] { + new Translation2d(kFLXPosMeters, kFLYPosMeters), + new Translation2d(kFRXPosMeters, kFRYPosMeters), + new Translation2d(kBLXPosMeters, kBLYPosMeters), + new Translation2d(kBRXPosMeters, kBRYPosMeters) + }; + } + + /** Returns the position of each module in radians. */ + public double[] getWheelRadiusCharacterizationPositions() { + double[] values = new double[4]; + for (int i = 0; i < 4; i++) { + values[i] = modules[i].getWheelRadiusCharacterizationPosition(); + } + return values; + } + + /** Returns the average velocity of the modules in rotations/sec (Phoenix native units). */ + public double getFFCharacterizationVelocity() { + double output = 0.0; + for (int i = 0; i < 4; i++) { + output += modules[i].getFFCharacterizationVelocity() / 4.0; + } + return output; + } + /* Setter Functions ****************************************************** */ /** Resets the current odometry pose. */ @@ -651,18 +741,21 @@ public void zeroHeading() { markPoseReset(Timer.getFPGATimestamp()); } - // ---- Pose reset gate (vision + anything latency-sensitive) ---- - private volatile long poseResetEpoch = 0; // monotonic counter - private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; - - public long getPoseResetEpoch() { - return poseResetEpoch; - } - - public double getLastPoseResetTimestamp() { - return lastPoseResetTimestamp; + /** Adds a vision measurement safely into the PoseEstimator. */ + public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { + odometryLock.lock(); + try { + m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); + } finally { + odometryLock.unlock(); + } } + /** + * Sets the EPOCH and TIMESTAMP for a pose reset + * + * @param fpgaNow The FPGA timestamp of the pose reset + */ private void markPoseReset(double fpgaNow) { lastPoseResetTimestamp = fpgaNow; poseResetEpoch++; @@ -670,7 +763,10 @@ private void markPoseReset(double fpgaNow) { Logger.recordOutput("Drive/PoseResetTimestamp", lastPoseResetTimestamp); } + /** UTILITY FUNCTION SECTION ********************************************* */ + /** CHOREO SECTION (Ignore if AutoType == PATHPLANNER) ******************* */ + /** Choreo: Reset odometry */ public Command resetOdometry(Pose2d orElseGet) { // TODO Auto-generated method stub @@ -725,7 +821,7 @@ public void followTrajectory(SwerveSample sample) { runVelocity(speeds); } - // ---------------- SIM VISION ---------------- + /** SIMULATION VISION FUNCTIONS ****************************************** */ // Vision measurement enabled in simulation private boolean simulatedVisionAvailable = true; @@ -764,69 +860,4 @@ private edu.wpi.first.math.Matrix getSimulatedVisionStdDevs() { stdDevs.set(2, 0, Math.toRadians(2)); // rotation standard deviation (radians) return stdDevs; } - - private static SwerveModulePosition interpolateModulePosition( - double[] ts, SwerveModulePosition[] samples, double t) { - - final int n = ts.length; - if (n == 0) return new SwerveModulePosition(); - - if (t <= ts[0]) return samples[0]; - if (t >= ts[n - 1]) return samples[n - 1]; - - int lo = 0, hi = n - 1; - while (lo < hi) { - int mid = (lo + hi) >>> 1; - if (ts[mid] < t) lo = mid + 1; - else hi = mid; - } - int i1 = lo; - int i0 = i1 - 1; - - double t0 = ts[i0]; - double t1 = ts[i1]; - if (t1 <= t0) return samples[i1]; - - double alpha = (t - t0) / (t1 - t0); - - double dist = - samples[i0].distanceMeters - + (samples[i1].distanceMeters - samples[i0].distanceMeters) * alpha; - - Rotation2d angle = samples[i0].angle.interpolate(samples[i1].angle, alpha); - - return new SwerveModulePosition(dist, angle); - } - - private static int buildUnionTimeline(double[][] perModuleTs, double[] outUnion, double epsSec) { - int[] idx = new int[perModuleTs.length]; - int outN = 0; - - while (true) { - double minT = Double.POSITIVE_INFINITY; - boolean any = false; - - for (int m = 0; m < perModuleTs.length; m++) { - double[] ts = perModuleTs[m]; - if (idx[m] >= ts.length) continue; - any = true; - minT = Math.min(minT, ts[idx[m]]); - } - - if (!any) break; - - if (outN == 0 || Math.abs(minT - outUnion[outN - 1]) > epsSec) { - outUnion[outN++] = minT; - } - - for (int m = 0; m < perModuleTs.length; m++) { - double[] ts = perModuleTs[m]; - while (idx[m] < ts.length && Math.abs(ts[idx[m]] - minT) <= epsSec) { - idx[m]++; - } - } - } - - return outN; - } } diff --git a/src/main/java/frc/robot/subsystems/drive/DriveSimPhysics.java b/src/main/java/frc/robot/subsystems/drive/DriveSimPhysics.java index e58a4b33..ff4402ce 100644 --- a/src/main/java/frc/robot/subsystems/drive/DriveSimPhysics.java +++ b/src/main/java/frc/robot/subsystems/drive/DriveSimPhysics.java @@ -45,6 +45,7 @@ public class DriveSimPhysics { private final SwerveDriveKinematics kinematics; + /** Constructor */ public DriveSimPhysics( SwerveDriveKinematics kinematics, double moiKgMetersSq, double maxTorqueNm) { this.kinematics = kinematics; @@ -97,7 +98,7 @@ public void update(SwerveModuleState[] moduleStates, double dtSeconds) { pose = new Pose2d(newTranslation, newYaw); } - /* ================== Getters ================== */ + /** Getter Functions ***************************************************** */ public Pose2d getPose() { return pose; } @@ -114,7 +115,7 @@ public Translation2d getLinearAccel() { return linearAccel; } - /* ================== Reset ================== */ + /** Reset Functions ****************************************************** */ public void reset(Pose2d pose) { this.pose = pose; this.omegaRadPerSec = 0.0; diff --git a/src/main/java/frc/robot/subsystems/drive/Module.java b/src/main/java/frc/robot/subsystems/drive/Module.java index abc40ee2..b57653d8 100644 --- a/src/main/java/frc/robot/subsystems/drive/Module.java +++ b/src/main/java/frc/robot/subsystems/drive/Module.java @@ -18,16 +18,26 @@ import frc.robot.Constants.DrivebaseConstants; import org.littletonrobotics.junction.Logger; +/** + * Module API Class + * + *

The Module class is the API-level class for a single swerve module, of which there are four on + * the robot. This is not a true subsystem, but an abstraction layer. + */ public class Module { + + // Define IO private final ModuleIO io; private final ModuleIOInputsAutoLogged inputs = new ModuleIOInputsAutoLogged(); private final int index; + // Declare alerts here, and only set/unset during the periodic() loop. private final Alert driveDisconnectedAlert; private final Alert turnDisconnectedAlert; private final Alert turnEncoderDisconnectedAlert; private SwerveModulePosition[] odometryPositions = new SwerveModulePosition[] {}; + /** Constructor */ public Module(ModuleIO io, int index) { this.io = io; this.index = index; @@ -44,6 +54,8 @@ public Module(ModuleIO io, int index) { AlertType.kError); } + /************************************************************************* */ + /** Periodic function that is called each robot cycle by the Drive class */ public void periodic() { io.updateInputs(inputs); Logger.processInputs("Drive/Module" + Integer.toString(index), inputs); @@ -64,7 +76,19 @@ public void periodic() { turnEncoderDisconnectedAlert.set(!inputs.turnEncoderConnected); } - /** Runs the module with the specified setpoint state. Mutates the state to optimize it. */ + /** Forwards the simulation periodic call to the IO layer */ + public void simulationPeriodic() { + io.simulationPeriodic(); + } + + /** Module Action Functions ********************************************** */ + /** + * Runs the module with the specified setpoint state + * + *

Mutates the state to optimize it + * + * @param state The requested Swerve Modeule State + */ public void runSetpoint(SwerveModuleState state) { // Optimize velocity setpoint state.optimize(getAngle()); @@ -87,12 +111,17 @@ public void stop() { io.setTurnOpenLoop(0.0); } - /** Sets whether brake mode is enabled. */ + /** + * Sets whether brake mode is enabled + * + * @param enabled Is the brake enabled? + */ public void setBrakeMode(boolean enabled) { io.setDriveBrakeMode(enabled); io.setTurnBrakeMode(enabled); } + /** Getter functions ***************************************************** */ /** Returns the current turn angle of the module. */ public Rotation2d getAngle() { return inputs.turnPosition; @@ -128,11 +157,6 @@ public double[] getOdometryTimestamps() { return inputs.odometryTimestamps; } - /** Forwards the simulation periodic call to the IO layer */ - public void simulationPeriodic() { - io.simulationPeriodic(); - } - /** Returns the module position in radians. */ public double getWheelRadiusCharacterizationPosition() { return inputs.drivePositionRad; diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIO.java b/src/main/java/frc/robot/subsystems/drive/ModuleIO.java index 10055776..8e9c1132 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIO.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIO.java @@ -13,6 +13,7 @@ import org.littletonrobotics.junction.AutoLog; public interface ModuleIO { + @AutoLog public static class ModuleIOInputs { public boolean driveConnected = false; diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java index 6aec53eb..65da2c71 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java @@ -48,6 +48,7 @@ import frc.robot.generated.TunerConstants; import frc.robot.util.PhoenixUtil; import frc.robot.util.RBSICANBusRegistry; +import java.util.Arrays; import java.util.Queue; import org.littletonrobotics.junction.Logger; @@ -129,7 +130,7 @@ public class ModuleIOTalonFX implements ModuleIO { private long lastTimestampNano = System.nanoTime(); /* - * TalonFX I/O + * TalonFX I/O Constructor */ public ModuleIOTalonFX(int module) { // Record the module number for logging purposes @@ -260,17 +261,18 @@ public ModuleIOTalonFX(int module) { ParentDevice.optimizeBusUtilizationForAll(driveTalon, turnTalon); } + /** Input Updating Loop ************************************************** */ @Override public void updateInputs(ModuleIOInputs inputs) { - // Refresh most signals + // -------------------- Refresh Phoenix signals -------------------- var driveStatus = BaseStatusSignal.refreshAll(drivePosition, driveVelocity, driveAppliedVolts, driveCurrent); var turnStatus = BaseStatusSignal.refreshAll(turnPosition, turnVelocity, turnAppliedVolts, turnCurrent); var encStatus = BaseStatusSignal.refreshAll(turnAbsolutePosition); - // Log *which* groups are failing and what the code is + // Log refresh failures explicitly (debug gold) if (!driveStatus.isOK()) { Logger.recordOutput("CAN/Module" + module + "/DriveRefreshStatus", driveStatus.toString()); } @@ -281,40 +283,66 @@ public void updateInputs(ModuleIOInputs inputs) { Logger.recordOutput("CAN/Module" + module + "/EncRefreshStatus", encStatus.toString()); } - // Update drive inputs + // -------------------- Connectivity flags -------------------- inputs.driveConnected = driveConnectedDebounce.calculate(driveStatus.isOK()); + inputs.turnConnected = turnConnectedDebounce.calculate(turnStatus.isOK()); + inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(encStatus.isOK()); + + // -------------------- Instantaneous state -------------------- inputs.drivePositionRad = Units.rotationsToRadians(drivePosition.getValueAsDouble()); inputs.driveVelocityRadPerSec = Units.rotationsToRadians(driveVelocity.getValueAsDouble()); inputs.driveAppliedVolts = driveAppliedVolts.getValueAsDouble(); inputs.driveCurrentAmps = driveCurrent.getValueAsDouble(); - // Update turn inputs - inputs.turnConnected = turnConnectedDebounce.calculate(turnStatus.isOK()); - inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(encStatus.isOK()); inputs.turnAbsolutePosition = Rotation2d.fromRotations(turnAbsolutePosition.getValueAsDouble()); inputs.turnPosition = Rotation2d.fromRotations(turnPosition.getValueAsDouble()); inputs.turnVelocityRadPerSec = Units.rotationsToRadians(turnVelocity.getValueAsDouble()); inputs.turnAppliedVolts = turnAppliedVolts.getValueAsDouble(); inputs.turnCurrentAmps = turnCurrent.getValueAsDouble(); - // Update odometry inputs - inputs.odometryTimestamps = - timestampQueue.stream().mapToDouble((Double value) -> value).toArray(); - inputs.odometryDrivePositionsRad = - drivePositionQueue.stream() - .mapToDouble((Double value) -> Units.rotationsToRadians(value)) - .toArray(); - inputs.odometryTurnPositions = - turnPositionQueue.stream() - .map((Double value) -> Rotation2d.fromRotations(value)) - .toArray(Rotation2d[]::new); - - // Clear the queues - timestampQueue.clear(); - drivePositionQueue.clear(); - turnPositionQueue.clear(); + // -------------------- Odometry queue drain -------------------- + final int tsCount = timestampQueue.size(); + final int driveCount = drivePositionQueue.size(); + final int turnCount = turnPositionQueue.size(); + + // Only consume the common prefix — guarantees alignment + final int sampleCount = Math.min(tsCount, Math.min(driveCount, turnCount)); + + if (sampleCount <= 0) { + inputs.odometryTimestamps = new double[0]; + inputs.odometryDrivePositionsRad = new double[0]; + inputs.odometryTurnPositions = new Rotation2d[0]; + return; + } + + final double[] outTs = new double[sampleCount]; + final double[] outDriveRad = new double[sampleCount]; + final Rotation2d[] outTurn = new Rotation2d[sampleCount]; + + for (int i = 0; i < sampleCount; i++) { + final Double t = timestampQueue.poll(); + final Double driveRot = drivePositionQueue.poll(); + final Double turnRot = turnPositionQueue.poll(); + + // Defensive guard (should never trigger, but keeps us safe) + if (t == null || driveRot == null || turnRot == null) { + inputs.odometryTimestamps = Arrays.copyOf(outTs, i); + inputs.odometryDrivePositionsRad = Arrays.copyOf(outDriveRad, i); + inputs.odometryTurnPositions = Arrays.copyOf(outTurn, i); + return; + } + + outTs[i] = t.doubleValue(); + outDriveRad[i] = Units.rotationsToRadians(driveRot.doubleValue()); + outTurn[i] = Rotation2d.fromRotations(turnRot.doubleValue()); + } + + inputs.odometryTimestamps = outTs; + inputs.odometryDrivePositionsRad = outDriveRad; + inputs.odometryTurnPositions = outTurn; } + /** Module Action Functions ********************************************** */ /** * Set the drive motor to an open-loop voltage, scaled to battery voltage * diff --git a/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java b/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java index a9332cec..ceaff2ba 100644 --- a/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java +++ b/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java @@ -22,6 +22,7 @@ import frc.robot.subsystems.imu.ImuIONavX; import frc.robot.subsystems.imu.ImuIOPigeon2; import frc.robot.util.YagslConstants; +import java.util.function.Supplier; /** * Holds the proper set of drive constants given the type of drive @@ -310,9 +311,9 @@ public enum ImuType { NAVX(new String[] {"navx", "navx_spi"}, ImuIONavX::new); private final String[] keys; - public final java.util.function.Supplier factory; + public final Supplier factory; - ImuType(String[] keys, java.util.function.Supplier factory) { + ImuType(String[] keys, Supplier factory) { this.keys = keys; this.factory = factory; } diff --git a/src/main/java/frc/robot/subsystems/imu/Imu.java b/src/main/java/frc/robot/subsystems/imu/Imu.java index 2a7a98f4..f5f63152 100644 --- a/src/main/java/frc/robot/subsystems/imu/Imu.java +++ b/src/main/java/frc/robot/subsystems/imu/Imu.java @@ -19,8 +19,9 @@ import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Translation3d; +import frc.robot.util.VirtualSubsystem; -public class Imu { +public class Imu extends VirtualSubsystem { private final ImuIO io; private final ImuIO.ImuIOInputs inputs = new ImuIO.ImuIOInputs(); @@ -30,11 +31,13 @@ public class Imu { private Translation3d cachedAccel = Translation3d.kZero; private Translation3d cachedJerk = Translation3d.kZero; + // Constructor public Imu(ImuIO io) { this.io = io; } - public void periodic() { + // Periodic function to read inputs + public void rbsiPeriodic() { io.updateInputs(inputs); } @@ -70,12 +73,11 @@ private void refreshCachesIfNeeded() { cacheStampNs = stamp; cachedYaw = Rotation2d.fromRadians(inputs.yawPositionRad); - cachedAccel = new Translation3d(inputs.linearAccelX, inputs.linearAccelY, inputs.linearAccelZ); - cachedJerk = new Translation3d(inputs.jerkX, inputs.jerkY, inputs.jerkZ); + cachedAccel = inputs.linearAccel; + cachedJerk = inputs.linearJerk; } // ---------------- SIM PUSH (primitive-only boundary) ---------------- - /** Simulation: push authoritative yaw (radians) into the IO layer */ public void simulationSetYawRad(double yawRad) { io.simulationSetYawRad(yawRad); diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIO.java b/src/main/java/frc/robot/subsystems/imu/ImuIO.java index 37aa695f..bf350c4f 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIO.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIO.java @@ -17,6 +17,7 @@ package frc.robot.subsystems.imu; +import edu.wpi.first.math.geometry.Translation3d; import frc.robot.util.RBSIIO; import org.littletonrobotics.junction.AutoLog; @@ -43,16 +44,10 @@ class ImuIOInputs { public double yawRateRadPerSec = 0.0; /** Linear acceleration in robot frame (m/s^2) */ - public double linearAccelX = 0.0; - - public double linearAccelY = 0.0; - public double linearAccelZ = 0.0; + public Translation3d linearAccel = Translation3d.kZero; /** Linear jerk in robot frame (m/s^3) */ - public double jerkX = 0.0; - - public double jerkY = 0.0; - public double jerkZ = 0.0; + public Translation3d linearJerk = Translation3d.kZero; /** Time spent in the IO update call (seconds) */ public double latencySeconds = 0.0; diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java b/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java index 4bf1edcd..18fa3c23 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java @@ -19,6 +19,7 @@ import com.studica.frc.AHRS; import com.studica.frc.AHRS.NavXComType; +import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStation.Alliance; import frc.robot.subsystems.drive.PhoenixOdometryThread; @@ -38,7 +39,7 @@ public class ImuIONavX implements ImuIO { private final Queue yawTimestampQueue; // Previous accel (m/s^2) for jerk - private double prevAx = 0.0, prevAy = 0.0, prevAz = 0.0; + private Translation3d prevAcc = Translation3d.kZero; private long prevTimestampNs = 0L; // Reusable buffers for queue drain @@ -82,29 +83,23 @@ public void updateInputs(ImuIOInputs inputs) { inputs.yawRateRadPerSec = yawRateDegPerSec * DEG_TO_RAD; // World linear accel (NavX returns "g" typically). Convert to m/s^2. - final double ax = navx.getWorldLinearAccelX() * G_TO_MPS2; - final double ay = navx.getWorldLinearAccelY() * G_TO_MPS2; - final double az = navx.getWorldLinearAccelZ() * G_TO_MPS2; - - inputs.linearAccelX = ax; - inputs.linearAccelY = ay; - inputs.linearAccelZ = az; + inputs.linearAccel = + new Translation3d( + navx.getWorldLinearAccelX(), + navx.getWorldLinearAccelY(), + navx.getWorldLinearAccelZ()) + .times(G_TO_MPS2); // Jerk if (prevTimestampNs != 0L) { final double dt = (start - prevTimestampNs) * 1e-9; if (dt > 1e-6) { - final double invDt = 1.0 / dt; - inputs.jerkX = (ax - prevAx) * invDt; - inputs.jerkY = (ay - prevAy) * invDt; - inputs.jerkZ = (az - prevAz) * invDt; + inputs.linearJerk = inputs.linearAccel.minus(prevAcc).div(dt); } } prevTimestampNs = start; - prevAx = ax; - prevAy = ay; - prevAz = az; + prevAcc = inputs.linearAccel; inputs.timestampNs = start; @@ -126,6 +121,11 @@ public void updateInputs(ImuIOInputs inputs) { inputs.latencySeconds = (end - start) * 1e-9; } + /** + * Zero the YAW to this radian value + * + * @param yawRad The radian value to which to zero + */ @Override public void zeroYawRad(double yawRad) { navx.setAngleAdjustment(yawRad / DEG_TO_RAD); @@ -133,7 +133,7 @@ public void zeroYawRad(double yawRad) { // Reset jerk history so you don't spike on the next frame prevTimestampNs = 0L; - prevAx = prevAy = prevAz = 0.0; + prevAcc = Translation3d.kZero; } private int drainOdomQueues() { @@ -174,25 +174,4 @@ private void ensureOdomCapacity(int n) { odomTsBuf = new double[newCap]; odomYawRadBuf = new double[newCap]; } - - // /** - // * Zero the NavX - // * - // *

This method should always rezero the pigeon in ALWAYS-BLUE-ORIGIN orientation. Testing, - // * however, shows that it's not doing what I think it should be doing. There is likely - // * interference with something else in the odometry - // */ - // @Override - // public void zero() { - // // With the Pigeon facing forward, forward depends on the alliance selected. - // // Set Angle Adjustment based on alliance - // if (DriverStation.getAlliance().get() == Alliance.Blue) { - // navx.setAngleAdjustment(0.0); - // } else { - // navx.setAngleAdjustment(180.0); - // } - // System.out.println("Setting YAW to " + navx.getAngleAdjustment()); - // navx.zeroYaw(); - // } - } diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java b/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java index 0e07f02f..0db40ef9 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java @@ -22,6 +22,8 @@ import com.ctre.phoenix6.StatusSignal; import com.ctre.phoenix6.configs.Pigeon2Configuration; import com.ctre.phoenix6.hardware.Pigeon2; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; import edu.wpi.first.units.measure.Angle; import edu.wpi.first.units.measure.AngularVelocity; import edu.wpi.first.units.measure.LinearAcceleration; @@ -31,12 +33,13 @@ import java.util.Iterator; import java.util.Queue; -/** IMU IO for CTRE Pigeon2 (primitive-only hot path) */ +/** IMU IO for CTRE Pigeon2 */ public class ImuIOPigeon2 implements ImuIO { - private static final double DEG_TO_RAD = Math.PI / 180.0; + // Constants private static final double G_TO_MPS2 = 9.80665; + // Define the Pigeon2 private final Pigeon2 pigeon = new Pigeon2( SwerveConstants.kPigeonId, RBSICANBusRegistry.getBus(SwerveConstants.kCANbusName)); @@ -53,13 +56,14 @@ public class ImuIOPigeon2 implements ImuIO { private final Queue odomYawsDeg; // Previous accel for jerk (m/s^2) - private double prevAx = 0.0, prevAy = 0.0, prevAz = 0.0; + private Translation3d prevAcc = Translation3d.kZero; private long prevTimestampNs = 0L; // Reusable buffers for queue-drain (avoid streams) private double[] odomTsBuf = new double[8]; private double[] odomYawRadBuf = new double[8]; + // Constructor public ImuIOPigeon2() { pigeon.getConfigurator().apply(new Pigeon2Configuration()); pigeon.getConfigurator().setYaw(0.0); @@ -77,6 +81,7 @@ public ImuIOPigeon2() { odomYawsDeg = PhoenixOdometryThread.getInstance().registerSignal(yawSignal); } + /** Update the Inputs */ @Override public void updateInputs(ImuIOInputs inputs) { final long start = System.nanoTime(); @@ -88,33 +93,25 @@ public void updateInputs(ImuIOInputs inputs) { final double yawDeg = yawSignal.getValueAsDouble(); final double yawRateDegPerSec = yawRateSignal.getValueAsDouble(); - inputs.yawPositionRad = yawDeg * DEG_TO_RAD; - inputs.yawRateRadPerSec = yawRateDegPerSec * DEG_TO_RAD; + inputs.yawPositionRad = Units.degreesToRadians(yawDeg); + inputs.yawRateRadPerSec = Units.degreesToRadians(yawRateDegPerSec); // Accel: Phoenix returns "g" for these signals (common for Pigeon2). Convert to m/s^2 - final double ax = accelX.getValueAsDouble() * G_TO_MPS2; - final double ay = accelY.getValueAsDouble() * G_TO_MPS2; - final double az = accelZ.getValueAsDouble() * G_TO_MPS2; - - inputs.linearAccelX = ax; - inputs.linearAccelY = ay; - inputs.linearAccelZ = az; + inputs.linearAccel = + new Translation3d( + accelX.getValueAsDouble(), accelY.getValueAsDouble(), accelZ.getValueAsDouble()) + .times(G_TO_MPS2); // Jerk from delta accel / dt if (prevTimestampNs != 0L) { final double dt = (start - prevTimestampNs) * 1e-9; if (dt > 1e-6) { - final double invDt = 1.0 / dt; - inputs.jerkX = (ax - prevAx) * invDt; - inputs.jerkY = (ay - prevAy) * invDt; - inputs.jerkZ = (az - prevAz) * invDt; + inputs.linearJerk = inputs.linearAccel.minus(prevAcc).div(dt); } } prevTimestampNs = start; - prevAx = ax; - prevAy = ay; - prevAz = az; + prevAcc = inputs.linearAccel; inputs.timestampNs = start; @@ -136,9 +133,14 @@ public void updateInputs(ImuIOInputs inputs) { inputs.latencySeconds = (end - start) * 1e-9; } + /** + * Zero the YAW to this radian value + * + * @param yawRad The radian value to which to zero + */ @Override public void zeroYawRad(double yawRad) { - pigeon.setYaw(yawRad / DEG_TO_RAD); + pigeon.setYaw(Units.radiansToDegrees(yawRad)); } private int drainOdometryQueuesIntoBuffers() { @@ -160,7 +162,7 @@ private int drainOdometryQueuesIntoBuffers() { int i = 0; while (i < n && itT.hasNext() && itY.hasNext()) { odomTsBuf[i] = itT.next(); - odomYawRadBuf[i] = itY.next() * DEG_TO_RAD; + odomYawRadBuf[i] = Units.degreesToRadians(itY.next()); i++; } diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java b/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java index 080dec1c..a28212ce 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java @@ -17,12 +17,13 @@ package frc.robot.subsystems.imu; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.Timer; import org.littletonrobotics.junction.Logger; -/** Simulated IMU for full robot simulation & replay logging (primitive-only) */ +/** Simulated IMU for full robot simulation & replay logging */ public class ImuIOSim implements ImuIO { - private static final double RAD_TO_DEG = 180.0 / Math.PI; // --- AUTHORITATIVE SIM STATE (PRIMITIVES) --- private double yawRad = 0.0; @@ -39,7 +40,6 @@ public class ImuIOSim implements ImuIO { public ImuIOSim() {} // ---------------- SIMULATION INPUTS (PUSH) ---------------- - @Override public void simulationSetYawRad(double yawRad) { this.yawRad = yawRad; @@ -58,7 +58,6 @@ public void simulationSetLinearAccelMps2(double ax, double ay, double az) { } // ---------------- IO UPDATE (PULL) ---------------- - @Override public void updateInputs(ImuIOInputs inputs) { inputs.connected = true; @@ -67,15 +66,11 @@ public void updateInputs(ImuIOInputs inputs) { // Authoritative sim state inputs.yawPositionRad = yawRad; inputs.yawRateRadPerSec = yawRateRadPerSec; - inputs.linearAccelX = ax; - inputs.linearAccelY = ay; - inputs.linearAccelZ = az; + inputs.linearAccel = new Translation3d(ax, ay, az); - // Jerk: SIM doesn’t have a prior accel here unless you want it; set to 0 by default. + // Jerk: SIM doesn't have a prior accel here unless you want it; set to 0 by default. // If you do want jerk, you can add prevAx/prevAy/prevAz + dt just like the real IO. - inputs.jerkX = 0.0; - inputs.jerkY = 0.0; - inputs.jerkZ = 0.0; + inputs.linearJerk = Translation3d.kZero; // Maintain odometry history pushOdomSample(Timer.getFPGATimestamp(), yawRad); @@ -101,10 +96,15 @@ public void updateInputs(ImuIOInputs inputs) { // Optional: SIM logging (primitive-friendly) Logger.recordOutput("IMU/YawRad", yawRad); - Logger.recordOutput("IMU/YawDeg", yawRad * RAD_TO_DEG); - Logger.recordOutput("IMU/YawRateDps", yawRateRadPerSec * RAD_TO_DEG); + Logger.recordOutput("IMU/YawDeg", Units.radiansToDegrees(yawRad)); + Logger.recordOutput("IMU/YawRateDps", Units.radiansToDegrees(yawRateRadPerSec)); } + /** + * Zero the YAW to this radian value + * + * @param yawRad The radian value to which to zero + */ @Override public void zeroYawRad(double yawRad) { this.yawRad = yawRad; diff --git a/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java b/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java index 3cdcbb9d..18f56795 100644 --- a/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java +++ b/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java @@ -82,7 +82,7 @@ private static Quaternion quatFromAxisAngle(double ax, double ay, double az, dou return new Quaternion(c, ax * s, ay * s, az * s); } - // Hamilton product: q = a ⊗ b + // Hamilton product: q = a x b (cross-product) private static Quaternion quatMul(Quaternion a, Quaternion b) { double aw = a.getW(), ax = a.getX(), ay = a.getY(), az = a.getZ(); double bw = b.getW(), bx = b.getX(), by = b.getY(), bz = b.getZ(); @@ -103,8 +103,8 @@ private static Quaternion quatMul(Quaternion a, Quaternion b) { * *

Conventions: yawDeg = rotation about +Z (up), pitchDeg = rotation about +Y. * - *

Order: extra = yaw ⊗ pitch (yaw first, then pitch, both in the camera/base frame) combined = - * base ⊗ extra + *

Order: extra = yaw x pitch (yaw first, then pitch, both in the camera/base frame) combined = + * base x extra */ private static Rotation3d composeCameraExtra(Rotation3d base, double yawDeg, double pitchDeg) { double yaw = Units.degreesToRadians(yawDeg); diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 908cf9c5..74d318aa 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -34,6 +34,7 @@ import frc.robot.util.VirtualSubsystem; import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Arrays; import java.util.Optional; import java.util.OptionalDouble; import java.util.Set; @@ -42,6 +43,7 @@ public class Vision extends VirtualSubsystem { + /** Vision Consumer definition */ @FunctionalInterface public interface VisionConsumer { void accept(Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix stdDevs); @@ -58,18 +60,16 @@ private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} private final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; - // --- Per-camera monotonic gate --- + // Per-camera monotonic and pose reset gates private final double[] lastAcceptedTsPerCam; - - // --- Pose reset gate --- private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; - // --- Smoothing buffer (recent fused estimates) --- + // Smoothing buffer (recent fused estimates) private final ArrayDeque fusedBuffer = new ArrayDeque<>(); private final double smoothWindowSec = 0.25; private final int smoothMaxSize = 12; - // --- Trusted tags configuration (swappable per event/field) --- + // Trusted tags configuration (swappable per event/field) private final AtomicReference> trustedTags = new AtomicReference<>(Set.of()); private volatile boolean requireTrustedTag = false; @@ -77,11 +77,12 @@ private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} private volatile double trustedTagStdDevScale = 0.70; // < 1 => more trusted private volatile double untrustedTagStdDevScale = 1.40; // > 1 => less trusted - // --- Optional 254-style yaw gate for single-tag --- + // Yaw-rate gate for single-tag measurements private volatile boolean enableSingleTagYawGate = true; private volatile double yawGateLookbackSec = 0.30; private volatile double yawGateLimitRadPerSec = 5.0; + /** Constructor */ public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { this.drive = drive; this.consumer = consumer; @@ -93,7 +94,7 @@ public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { } this.lastAcceptedTsPerCam = new double[io.length]; - java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); + Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); // Log robot->camera transforms if available int n = Math.min(camConfigs.length, io.length); @@ -102,37 +103,7 @@ public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { } } - // -------- Runtime configuration hooks -------- - - /** Call when you reset odometry (e.g. auto start, manual reset, etc). */ - public void resetPoseGate(double fpgaNowSeconds) { - lastPoseResetTimestamp = fpgaNowSeconds; - fusedBuffer.clear(); - java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); - } - - /** Swap trusted tag set per event/field without redeploy. */ - public void setTrustedTags(Set tags) { - trustedTags.set(Set.copyOf(tags)); - } - - public void setRequireTrustedTag(boolean require) { - requireTrustedTag = require; - } - - public void setTrustedTagStdDevScales(double trustedScale, double untrustedScale) { - trustedTagStdDevScale = trustedScale; - untrustedTagStdDevScale = untrustedScale; - } - - public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limitRadPerSec) { - enableSingleTagYawGate = enabled; - yawGateLookbackSec = lookbackSec; - yawGateLimitRadPerSec = limitRadPerSec; - } - - // -------- Core periodic -------- - + /** Periodic Function */ @Override public void rbsiPeriodic() { @@ -145,13 +116,13 @@ public void rbsiPeriodic() { Logger.recordOutput("Vision/PoseGateResetFromDrive", false); } - // 1) Update/log camera inputs + // Update/log camera inputs for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); } - // 2) Pick one best accepted estimate per camera for this loop + // Pick one best accepted estimate per camera for this loop final ArrayList perCamAccepted = new ArrayList<>(io.length); for (int cam = 0; cam < io.length; cam++) { @@ -208,20 +179,20 @@ public void rbsiPeriodic() { if (perCamAccepted.isEmpty()) return; - // 3) Fusion time is the newest timestamp among accepted per-camera samples + // Fusion time is the newest timestamp among accepted per-camera samples double tF = perCamAccepted.stream().mapToDouble(e -> e.ts).max().orElse(Double.NaN); if (!Double.isFinite(tF)) return; - // 4) Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse + // Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse TimedEstimate fused = fuseAtTime(perCamAccepted, tF); if (fused == null) return; - // 5) Smooth by fusing recent fused estimates (also aligned to tF) + // Smooth by fusing recent fused estimates (also aligned to tF) pushFused(fused); TimedEstimate smoothed = smoothAtTime(tF); if (smoothed == null) return; - // 6) Inject + // Inject the pose consumer.accept(smoothed.pose, smoothed.ts, smoothed.stdDevs); Logger.recordOutput("Vision/FusedPose", fused.pose); @@ -229,8 +200,62 @@ public void rbsiPeriodic() { Logger.recordOutput("Vision/FusedTimestamp", tF); } - // -------- Gating + scoring -------- + /** Runtime configuration hooks ****************************************** */ + /** + * Call when you reset odometry (e.g. auto start, manual reset, etc). + * + * @param fpgaNowSeconds Timestamp for the pose gate reset + */ + public void resetPoseGate(double fpgaNowSeconds) { + lastPoseResetTimestamp = fpgaNowSeconds; + fusedBuffer.clear(); + Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); + } + + /** + * Swap trusted tag set per event/field without redeploy + * + * @param tags Set of trusted tags to use + */ + public void setTrustedTags(Set tags) { + trustedTags.set(Set.copyOf(tags)); + } + + /** + * Set whether to requite trusted tags + * + * @param require Boolean + */ + public void setRequireTrustedTag(boolean require) { + requireTrustedTag = require; + } + + /** + * Set the (un)trusted standard deviation scales + * + * @param trustedScale The scale for trusted tags + * @param untrustedScale The scale for untrusted tags + */ + public void setTrustedTagStdDevScales(double trustedScale, double untrustedScale) { + trustedTagStdDevScale = trustedScale; + untrustedTagStdDevScale = untrustedScale; + } + + /** + * Set the yaw gate for single-tag measurements + * + * @param enabled Enable the gate? + * @param lookbackSec Lookback time + * @param limitRadPerSec Yaw rate above which single-tag measurements will be ignored + */ + public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limitRadPerSec) { + enableSingleTagYawGate = enabled; + yawGateLookbackSec = lookbackSec; + yawGateLimitRadPerSec = limitRadPerSec; + } + + /** Gating + Scoring ***************************************************** */ private static final class GateResult { final boolean accepted; @@ -343,8 +368,7 @@ private boolean isBetter(TimedEstimate a, TimedEstimate b) { return ta < tb; } - // -------- Time alignment + fusion -------- - + /** Time alignment & fusion ********************************************** */ private TimedEstimate fuseAtTime(ArrayList estimates, double tF) { final ArrayList aligned = new ArrayList<>(estimates.size()); for (var e : estimates) { @@ -410,8 +434,7 @@ private TimedEstimate inverseVarianceFuse(ArrayList alignedAtTF, return new TimedEstimate(fused, tF, fusedStd); } - // -------- Smoothing buffer -------- - + /** Smoothing buffer ***************************************************** */ private void pushFused(TimedEstimate fused) { fusedBuffer.addLast(fused); diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java index fad294d2..7bc61ed2 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -19,7 +19,7 @@ public interface VisionIO { class VisionIOInputs { public boolean connected = false; - /** Latest "servo to target" observation (optional) */ + /** Latest "camera to target" observation (optional) */ public TargetObservation latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java index 4e5761b9..f554cc4c 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java @@ -62,7 +62,7 @@ public VisionIOPhotonVisionSim( @Override public void updateInputs(VisionIOInputs inputs) { // NOTE: This updates the sim world every time a sim camera is polled. - // That's fine (fast enough), but if you want “update once per loop,” see note below. + // That's fine (fast enough), but if you want "update once per loop," see note below. visionSim.update(poseSupplier.get()); // Then pull results like normal (and emit PoseObservation + usedTagIds sets) From f594d2e95970800c65b9c6ad6846b6c3958e0bd6 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Sat, 7 Feb 2026 13:50:54 -0700 Subject: [PATCH 03/19] Was double-adding pose esitmates The SIM mode was not separately gated behind an `else`, so REAL mode was double-adding pose estimates. --- .../java/frc/robot/subsystems/drive/Drive.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index aeb71e65..a6045156 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -388,17 +388,17 @@ public void rbsiPeriodic() { Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); gyroDisconnectedAlert.set(!imuInputs.connected); return; - } - - // SIMULATION: Keep sim pose buffer time-aligned, too - double now = Timer.getFPGATimestamp(); - poseBuffer.addSample(now, simPhysics.getPose()); - yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); - yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); + } else { - Logger.recordOutput("Drive/Pose", simPhysics.getPose()); - gyroDisconnectedAlert.set(false); + // SIMULATION: Keep sim pose buffer time-aligned, too + double now = Timer.getFPGATimestamp(); + poseBuffer.addSample(now, simPhysics.getPose()); + yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); + yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); + Logger.recordOutput("Drive/Pose", simPhysics.getPose()); + gyroDisconnectedAlert.set(false); + } } finally { odometryLock.unlock(); } From e2d8fad903cde1d96a7ab96f433877a1fd1ecc72 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Sat, 7 Feb 2026 16:07:03 -0700 Subject: [PATCH 04/19] Fix double-count of pose estimation; add kS for swerve turn --- src/main/java/frc/robot/Constants.java | 39 ++++++++++--------- .../frc/robot/subsystems/drive/Drive.java | 1 + .../subsystems/drive/ModuleIOBlended.java | 3 +- .../robot/subsystems/drive/ModuleIOSpark.java | 3 +- .../drive/ModuleIOSparkCANcoder.java | 3 +- .../subsystems/drive/ModuleIOTalonFX.java | 2 +- .../subsystems/drive/ModuleIOTalonFXS.java | 2 +- 7 files changed, 29 insertions(+), 24 deletions(-) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 7d30eaed..1887d604 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -74,7 +74,7 @@ public final class Constants { private static SwerveType swerveType = SwerveType.PHOENIX6; // PHOENIX6, YAGSL private static CTREPro phoenixPro = CTREPro.UNLICENSED; // LICENSED, UNLICENSED private static AutoType autoType = AutoType.MANUAL; // MANUAL, PATHPLANNER, CHOREO - private static VisionType visionType = VisionType.NONE; // PHOTON, LIMELIGHT, NONE + private static VisionType visionType = VisionType.PHOTON; // PHOTON, LIMELIGHT, NONE /** Enumerate the robot types (name your robots here) */ public static enum RobotType { @@ -324,6 +324,7 @@ public static final class DrivebaseConstants { SwerveConstants.kDriveGearRatio / DCMotor.getKrakenX60Foc(1).KtNMPerAmp; public static final double kSteerP = 400.0; public static final double kSteerD = 20.0; + public static final double kSteerS = 2.0; // Odometry-related constants public static final double kHistorySize = 1.5; // seconds @@ -466,7 +467,7 @@ public record CameraConfig( // Example Cameras are mounted in the back corners, 18" up from the floor, facing sideways public static final CameraConfig[] ALL = { new CameraConfig( - "camera_0", + "Photon_BW7", new Transform3d( Inches.of(-13.0), Inches.of(13.0), @@ -483,23 +484,23 @@ public record CameraConfig( } }), // - new CameraConfig( - "camera_1", - new Transform3d( - Inches.of(-13.0), - Inches.of(-13.0), - Inches.of(12.0), - new Rotation3d(0.0, 0.0, -Math.PI / 2)), - 1.0, - new SimCameraProperties() { - { - setCalibration(1280, 800, Rotation2d.fromDegrees(120)); - setCalibError(0.25, 0.08); - setFPS(30); - setAvgLatencyMs(20); - setLatencyStdDevMs(5); - } - }), + // new CameraConfig( + // "camera_1", + // new Transform3d( + // Inches.of(-13.0), + // Inches.of(-13.0), + // Inches.of(12.0), + // new Rotation3d(0.0, 0.0, -Math.PI / 2)), + // 1.0, + // new SimCameraProperties() { + // { + // setCalibration(1280, 800, Rotation2d.fromDegrees(120)); + // setCalibError(0.25, 0.08); + // setFPS(30); + // setAvgLatencyMs(20); + // setLatencyStdDevMs(5); + // } + // }), // ... And more, if needed }; diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index a6045156..2dcc9591 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -388,6 +388,7 @@ public void rbsiPeriodic() { Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); gyroDisconnectedAlert.set(!imuInputs.connected); return; + } else { // SIMULATION: Keep sim pose buffer time-aligned, too diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java index 5c68481c..01d0ffc2 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java @@ -243,7 +243,8 @@ public ModuleIOBlended(int module) { SwerveConstants.turnPIDMinInput, SwerveConstants.turnPIDMaxInput) .pid(DrivebaseConstants.kSteerP, 0.0, DrivebaseConstants.kSteerD) .feedForward - .kV(0.0); + .kV(0.0) + .kS(DrivebaseConstants.kSteerS); turnConfig .signals .absoluteEncoderPositionAlwaysOn(true) diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java index 75738618..1c92c68d 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java @@ -183,7 +183,8 @@ public ModuleIOSpark(int module) { SwerveConstants.turnPIDMinInput, SwerveConstants.turnPIDMaxInput) .pid(DrivebaseConstants.kSteerP, 0.0, DrivebaseConstants.kSteerD) .feedForward - .kV(0.0); + .kV(0.0) + .kS(DrivebaseConstants.kSteerS); turnConfig .signals .absoluteEncoderPositionAlwaysOn(true) diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java index fc76b593..587f8ec8 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java @@ -202,7 +202,8 @@ public ModuleIOSparkCANcoder(int module) { SwerveConstants.turnPIDMinInput, SwerveConstants.turnPIDMaxInput) .pid(DrivebaseConstants.kSteerP, 0.0, DrivebaseConstants.kSteerD) .feedForward - .kV(0.0); + .kV(0.0) + .kS(DrivebaseConstants.kSteerS); turnConfig .signals .absoluteEncoderPositionAlwaysOn(true) diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java index 65da2c71..df569b4b 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java @@ -192,7 +192,7 @@ public ModuleIOTalonFX(int module) { .withKP(DrivebaseConstants.kSteerP) .withKI(0.0) .withKD(DrivebaseConstants.kSteerD) - .withKS(0.0) + .withKS(DrivebaseConstants.kSteerS) .withKV(0.0) .withKA(0.0) .withStaticFeedforwardSign(StaticFeedforwardSignValue.UseClosedLoopSign); diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java index 23f2885c..f3cc029a 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java @@ -180,7 +180,7 @@ public ModuleIOTalonFXS( .withKP(DrivebaseConstants.kSteerP) .withKI(0.0) .withKD(DrivebaseConstants.kSteerD) - .withKS(0.0) + .withKS(DrivebaseConstants.kSteerS) .withKV(0.0) .withKA(0.0) .withStaticFeedforwardSign(StaticFeedforwardSignValue.UseClosedLoopSign); From ecff3ea3f965eca3e2fd808705694bc66f3b2726 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Sat, 7 Feb 2026 16:50:25 -0700 Subject: [PATCH 05/19] Clean up the consumer syntax --- .../frc/robot/subsystems/drive/Drive.java | 7 +- .../frc/robot/subsystems/vision/Vision.java | 145 +++++++++++------- src/main/java/frc/robot/util/RBSIPose.java | 21 +++ 3 files changed, 111 insertions(+), 62 deletions(-) create mode 100644 src/main/java/frc/robot/util/RBSIPose.java diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 2dcc9591..041a0e61 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -22,7 +22,6 @@ import edu.wpi.first.hal.FRCNetComm.tInstances; import edu.wpi.first.hal.FRCNetComm.tResourceType; import edu.wpi.first.hal.HAL; -import edu.wpi.first.math.Matrix; import edu.wpi.first.math.controller.PIDController; import edu.wpi.first.math.controller.ProfiledPIDController; import edu.wpi.first.math.estimator.SwerveDrivePoseEstimator; @@ -52,6 +51,7 @@ import frc.robot.util.LocalADStarAK; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; +import frc.robot.util.RBSIPose; import frc.robot.util.RBSISubsystem; import java.util.Optional; import java.util.OptionalDouble; @@ -743,10 +743,11 @@ public void zeroHeading() { } /** Adds a vision measurement safely into the PoseEstimator. */ - public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { + public void addVisionMeasurement(RBSIPose rbsiPose) { odometryLock.lock(); try { - m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); + m_PoseEstimator.addVisionMeasurement( + rbsiPose.pose(), rbsiPose.timestampSeconds(), rbsiPose.stdDevs()); } finally { odometryLock.unlock(); } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 74d318aa..e37b6a72 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -25,12 +25,14 @@ import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Translation2d; import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; import frc.robot.Constants; import frc.robot.FieldConstants; import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; +import frc.robot.util.RBSIPose; import frc.robot.util.VirtualSubsystem; import java.util.ArrayDeque; import java.util.ArrayList; @@ -45,13 +47,11 @@ public class Vision extends VirtualSubsystem { /** Vision Consumer definition */ @FunctionalInterface - public interface VisionConsumer { - void accept(Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix stdDevs); + public interface PoseMeasurementConsumer { + void accept(RBSIPose measurement); } - private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} - - private final VisionConsumer consumer; + private final PoseMeasurementConsumer consumer; private final Drive drive; private long lastSeenPoseResetEpoch = -1; @@ -65,7 +65,7 @@ private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; // Smoothing buffer (recent fused estimates) - private final ArrayDeque fusedBuffer = new ArrayDeque<>(); + private final ArrayDeque fusedBuffer = new ArrayDeque<>(); private final double smoothWindowSec = 0.25; private final int smoothMaxSize = 12; @@ -82,8 +82,10 @@ private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} private volatile double yawGateLookbackSec = 0.30; private volatile double yawGateLimitRadPerSec = 5.0; + private static final double kMinVariance = 1e-12; // prevents divide-by-zero explosions + /** Constructor */ - public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { + public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { this.drive = drive; this.consumer = consumer; this.io = io; @@ -123,14 +125,14 @@ public void rbsiPeriodic() { } // Pick one best accepted estimate per camera for this loop - final ArrayList perCamAccepted = new ArrayList<>(io.length); + final ArrayList perCamAccepted = new ArrayList<>(io.length); for (int cam = 0; cam < io.length; cam++) { int seen = 0; int accepted = 0; int rejected = 0; - TimedEstimate best = null; + RBSIPose best = null; double bestTrustScale = Double.NaN; int bestTrustedCount = 0; int bestTagCount = 0; @@ -150,7 +152,7 @@ public void rbsiPeriodic() { continue; } - TimedEstimate est = built.estimate; + RBSIPose est = built.estimate; if (best == null || isBetter(est, best)) { best = est; bestTrustScale = built.trustScale; @@ -161,12 +163,12 @@ public void rbsiPeriodic() { if (best != null) { accepted++; - lastAcceptedTsPerCam[cam] = best.ts; + lastAcceptedTsPerCam[cam] = best.timestampSeconds(); perCamAccepted.add(best); - Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose); - Logger.recordOutput("Vision/Camera" + cam + "/InjectedTimestamp", best.ts); - Logger.recordOutput("Vision/Camera" + cam + "/InjectedStdDevs", best.stdDevs); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose()); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedTimestamp", best.timestampSeconds()); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedStdDevs", best.stdDevs()); Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustScale", bestTrustScale); Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustedCount", bestTrustedCount); Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTagCount", bestTagCount); @@ -180,23 +182,24 @@ public void rbsiPeriodic() { if (perCamAccepted.isEmpty()) return; // Fusion time is the newest timestamp among accepted per-camera samples - double tF = perCamAccepted.stream().mapToDouble(e -> e.ts).max().orElse(Double.NaN); + double tF = + perCamAccepted.stream().mapToDouble(e -> e.timestampSeconds()).max().orElse(Double.NaN); if (!Double.isFinite(tF)) return; // Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse - TimedEstimate fused = fuseAtTime(perCamAccepted, tF); + RBSIPose fused = fuseAtTime(perCamAccepted, tF); if (fused == null) return; // Smooth by fusing recent fused estimates (also aligned to tF) pushFused(fused); - TimedEstimate smoothed = smoothAtTime(tF); + RBSIPose smoothed = smoothAtTime(tF); if (smoothed == null) return; - // Inject the pose - consumer.accept(smoothed.pose, smoothed.ts, smoothed.stdDevs); + // Inject the pose -- commented out for now... + consumer.accept(smoothed); - Logger.recordOutput("Vision/FusedPose", fused.pose); - Logger.recordOutput("Vision/SmoothedPose", smoothed.pose); + Logger.recordOutput("Vision/FusedPose", fused.pose()); + Logger.recordOutput("Vision/SmoothedPose", smoothed.pose()); Logger.recordOutput("Vision/FusedTimestamp", tF); } @@ -301,11 +304,11 @@ private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation o } private static final class BuiltEstimate { - final TimedEstimate estimate; + final RBSIPose estimate; final double trustScale; final int trustedCount; - BuiltEstimate(TimedEstimate estimate, double trustScale, int trustedCount) { + BuiltEstimate(RBSIPose estimate, double trustScale, int trustedCount) { this.estimate = estimate; this.trustScale = trustScale; this.trustedCount = trustedCount; @@ -353,7 +356,7 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { Logger.recordOutput("Vision/Camera" + cam + "/InjectedFracTrusted", fracTrusted); return new BuiltEstimate( - new TimedEstimate( + new RBSIPose( obs.pose().toPose2d(), obs.timestamp(), VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)), @@ -361,20 +364,20 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { trustedCount); } - private boolean isBetter(TimedEstimate a, TimedEstimate b) { + private boolean isBetter(RBSIPose a, RBSIPose b) { // Lower trace of stddev vector (x+y+theta) is better - double ta = a.stdDevs.get(0, 0) + a.stdDevs.get(1, 0) + a.stdDevs.get(2, 0); - double tb = b.stdDevs.get(0, 0) + b.stdDevs.get(1, 0) + b.stdDevs.get(2, 0); + double ta = a.stdDevs().get(0, 0) + a.stdDevs().get(1, 0) + a.stdDevs().get(2, 0); + double tb = b.stdDevs().get(0, 0) + b.stdDevs().get(1, 0) + b.stdDevs().get(2, 0); return ta < tb; } /** Time alignment & fusion ********************************************** */ - private TimedEstimate fuseAtTime(ArrayList estimates, double tF) { - final ArrayList aligned = new ArrayList<>(estimates.size()); + private RBSIPose fuseAtTime(ArrayList estimates, double tF) { + final ArrayList aligned = new ArrayList<>(estimates.size()); for (var e : estimates) { - Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); if (alignedPose == null) return null; - aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + aligned.add(new RBSIPose(alignedPose, tF, e.stdDevs())); } return inverseVarianceFuse(aligned, tF); } @@ -394,48 +397,70 @@ private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tF) { return visionPoseAtTs.transformBy(ts_T_tf); } - private TimedEstimate inverseVarianceFuse(ArrayList alignedAtTF, double tF) { - if (alignedAtTF.isEmpty()) return null; + private RBSIPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) { + if (alignedAtTF == null || alignedAtTF.isEmpty()) return null; if (alignedAtTF.size() == 1) return alignedAtTF.get(0); - double sumWx = 0, sumWy = 0, sumWth = 0; - double sumX = 0, sumY = 0; - double sumCos = 0, sumSin = 0; + double sumWx = 0.0, sumWy = 0.0, sumWth = 0.0; + double sumX = 0.0, sumY = 0.0; + double sumCos = 0.0, sumSin = 0.0; + + for (int i = 0; i < alignedAtTF.size(); i++) { + final RBSIPose e = alignedAtTF.get(i); + final Pose2d p = e.pose(); + final Matrix s = e.stdDevs(); + + final double sx = s.get(0, 0); + final double sy = s.get(1, 0); + final double sth = s.get(2, 0); - for (var e : alignedAtTF) { - double sx = e.stdDevs.get(0, 0); - double sy = e.stdDevs.get(1, 0); - double sth = e.stdDevs.get(2, 0); + // variance = std^2, clamp away from 0 + final double vx = Math.max(sx * sx, kMinVariance); + final double vy = Math.max(sy * sy, kMinVariance); + final double vth = Math.max(sth * sth, kMinVariance); - double vx = sx * sx; - double vy = sy * sy; - double vth = sth * sth; + final double wx = 1.0 / vx; + final double wy = 1.0 / vy; + final double wth = 1.0 / vth; - double wx = 1.0 / vx; - double wy = 1.0 / vy; - double wth = 1.0 / vth; + // If any weight goes NaN/Inf, skip this measurement rather than poisoning the fuse. + if (!Double.isFinite(wx) || !Double.isFinite(wy) || !Double.isFinite(wth)) { + continue; + } sumWx += wx; sumWy += wy; sumWth += wth; - sumX += e.pose.getX() * wx; - sumY += e.pose.getY() * wy; + sumX += p.getX() * wx; + sumY += p.getY() * wy; - sumCos += e.pose.getRotation().getCos() * wth; - sumSin += e.pose.getRotation().getSin() * wth; + final Rotation2d rot = p.getRotation(); + sumCos += rot.getCos() * wth; + sumSin += rot.getSin() * wth; } - Pose2d fused = new Pose2d(sumX / sumWx, sumY / sumWy, new Rotation2d(sumCos, sumSin)); + // If everything got skipped, fail closed. + if (sumWx <= 0.0 || sumWy <= 0.0 || sumWth <= 0.0) return null; + + final Translation2d fusedTranslation = new Translation2d(sumX / sumWx, sumY / sumWy); + + // Rotation2d(cos, sin) will normalize internally; if both are ~0, fall back to zero. + final Rotation2d fusedRotation = + (Math.abs(sumCos) < 1e-12 && Math.abs(sumSin) < 1e-12) + ? Rotation2d.kZero + : new Rotation2d(sumCos, sumSin); + + final Pose2d fusedPose = new Pose2d(fusedTranslation, fusedRotation); - Matrix fusedStd = + final Matrix fusedStdDevs = VecBuilder.fill(Math.sqrt(1.0 / sumWx), Math.sqrt(1.0 / sumWy), Math.sqrt(1.0 / sumWth)); - return new TimedEstimate(fused, tF, fusedStd); + return new RBSIPose(fusedPose, tF, fusedStdDevs); } /** Smoothing buffer ***************************************************** */ - private void pushFused(TimedEstimate fused) { + private void pushFused(RBSIPose fused) { fusedBuffer.addLast(fused); while (fusedBuffer.size() > smoothMaxSize) { @@ -443,20 +468,22 @@ private void pushFused(TimedEstimate fused) { } // Trim by time window relative to newest - while (!fusedBuffer.isEmpty() && fused.ts - fusedBuffer.peekFirst().ts > smoothWindowSec) { + while (!fusedBuffer.isEmpty() + && fused.timestampSeconds() - fusedBuffer.peekFirst().timestampSeconds() + > smoothWindowSec) { fusedBuffer.removeFirst(); } } - private TimedEstimate smoothAtTime(double tF) { + private RBSIPose smoothAtTime(double tF) { if (fusedBuffer.isEmpty()) return null; if (fusedBuffer.size() == 1) return fusedBuffer.peekLast(); - final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); + final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); for (var e : fusedBuffer) { - Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); if (alignedPose == null) continue; - aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + aligned.add(new RBSIPose(alignedPose, tF, e.stdDevs())); } if (aligned.isEmpty()) return fusedBuffer.peekLast(); diff --git a/src/main/java/frc/robot/util/RBSIPose.java b/src/main/java/frc/robot/util/RBSIPose.java new file mode 100644 index 00000000..4a4df2fc --- /dev/null +++ b/src/main/java/frc/robot/util/RBSIPose.java @@ -0,0 +1,21 @@ +// Copyright (c) 2024-2026 Az-FIRST +// http://github.com/AZ-First +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.util; + +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; + +public record RBSIPose(Pose2d pose, double timestampSeconds, Matrix stdDevs) {} From e4b0e3e1f8f4dfaa17cb7bdbf165651eaeeb392b Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 9 Feb 2026 10:08:40 -0700 Subject: [PATCH 06/19] Add extra logging for when the vision pose doesn't work --- .../frc/robot/subsystems/vision/Vision.java | 45 ++++++++++++------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index e37b6a72..c7849e31 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -82,7 +82,8 @@ public interface PoseMeasurementConsumer { private volatile double yawGateLookbackSec = 0.30; private volatile double yawGateLimitRadPerSec = 5.0; - private static final double kMinVariance = 1e-12; // prevents divide-by-zero explosions + // Variance minimum for fusing poses to prevent divide-by-zero explosions + private static final double kMinVariance = 1e-12; /** Constructor */ public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { @@ -109,43 +110,52 @@ public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { @Override public void rbsiPeriodic() { + // Pose reset logic and logging long epoch = drive.getPoseResetEpoch(); if (epoch != lastSeenPoseResetEpoch) { lastSeenPoseResetEpoch = epoch; - resetPoseGate(drive.getLastPoseResetTimestamp()); // your existing method + resetPoseGate(drive.getLastPoseResetTimestamp()); Logger.recordOutput("Vision/PoseGateResetFromDrive", true); } else { Logger.recordOutput("Vision/PoseGateResetFromDrive", false); } - // Update/log camera inputs + // Update & log camera inputs for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); } - // Pick one best accepted estimate per camera for this loop + // Pick the one best accepted estimate per camera for this loop final ArrayList perCamAccepted = new ArrayList<>(io.length); + // Loop over cameras for (int cam = 0; cam < io.length; cam++) { + + // Instantiate variables for this camera int seen = 0; int accepted = 0; int rejected = 0; - RBSIPose best = null; double bestTrustScale = Double.NaN; int bestTrustedCount = 0; int bestTagCount = 0; + // Loop over observations for this camera this loop for (var obs : inputs[cam].poseObservations) { + + // Increment seen++; + // Check the gating criteria; move on if bad GateResult gate = passesHardGatesAndYawGate(cam, obs); + Logger.recordOutput("Vision/Camera" + cam + "/GateFail", gate.reason); if (!gate.accepted) { rejected++; continue; } + // Build a pose estimate; move on if bad BuiltEstimate built = buildEstimate(cam, obs); if (built == null) { rejected++; @@ -261,9 +271,11 @@ public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limi /** Gating + Scoring ***************************************************** */ private static final class GateResult { final boolean accepted; + final String reason; - GateResult(boolean accepted) { + GateResult(boolean accepted, String reason) { this.accepted = accepted; + this.reason = reason; } } @@ -271,36 +283,37 @@ private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation o final double ts = obs.timestamp(); // Monotonic per-camera time - if (ts <= lastAcceptedTsPerCam[cam]) return new GateResult(false); + if (ts <= lastAcceptedTsPerCam[cam]) return new GateResult(false, "monotonic time"); // Reject anything older than last pose reset - if (ts < lastPoseResetTimestamp) return new GateResult(false); + if (ts < lastPoseResetTimestamp) return new GateResult(false, "older than pose reset"); // Must have tags - if (obs.tagCount() <= 0) return new GateResult(false); + if (obs.tagCount() <= 0) return new GateResult(false, "no tags"); // Single-tag ambiguity gate - if (obs.tagCount() == 1 && obs.ambiguity() > maxAmbiguity) return new GateResult(false); + if (obs.tagCount() == 1 && obs.ambiguity() > maxAmbiguity) + return new GateResult(false, "highly ambiguous"); // Z sanity - if (Math.abs(obs.pose().getZ()) > maxZError) return new GateResult(false); + if (Math.abs(obs.pose().getZ()) > maxZError) return new GateResult(false, "z not sane"); // Field bounds Pose3d p = obs.pose(); if (p.getX() < 0.0 || p.getX() > FieldConstants.aprilTagLayout.getFieldLength()) - return new GateResult(false); + return new GateResult(false, "out of bounds field X"); if (p.getY() < 0.0 || p.getY() > FieldConstants.aprilTagLayout.getFieldWidth()) - return new GateResult(false); + return new GateResult(false, "out of bounds field Y"); - // Optional 254-style yaw gate: only meaningful for single-tag + // Optional yaw gate: only meaningful for single-tag if (enableSingleTagYawGate && obs.tagCount() == 1) { OptionalDouble maxYaw = drive.getMaxAbsYawRateRadPerSec(ts - yawGateLookbackSec, ts); if (maxYaw.isPresent() && maxYaw.getAsDouble() > yawGateLimitRadPerSec) { - return new GateResult(false); + return new GateResult(false, "YAW gate failed"); } } - return new GateResult(true); + return new GateResult(true, ""); } private static final class BuiltEstimate { From 2b8c50e2979b173ebe7c316b05731ff8d03044b4 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 9 Feb 2026 16:33:50 -0700 Subject: [PATCH 07/19] Call the pose estimator "TimedPose" --- .../frc/robot/subsystems/drive/Drive.java | 4 +- .../frc/robot/subsystems/vision/Vision.java | 44 +++++++++---------- .../util/{RBSIPose.java => TimedPose.java} | 2 +- 3 files changed, 25 insertions(+), 25 deletions(-) rename src/main/java/frc/robot/util/{RBSIPose.java => TimedPose.java} (89%) diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 041a0e61..71b9e378 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -51,8 +51,8 @@ import frc.robot.util.LocalADStarAK; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; -import frc.robot.util.RBSIPose; import frc.robot.util.RBSISubsystem; +import frc.robot.util.TimedPose; import java.util.Optional; import java.util.OptionalDouble; import java.util.concurrent.locks.Lock; @@ -743,7 +743,7 @@ public void zeroHeading() { } /** Adds a vision measurement safely into the PoseEstimator. */ - public void addVisionMeasurement(RBSIPose rbsiPose) { + public void addVisionMeasurement(TimedPose rbsiPose) { odometryLock.lock(); try { m_PoseEstimator.addVisionMeasurement( diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index c7849e31..86c10558 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -32,7 +32,7 @@ import frc.robot.FieldConstants; import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; -import frc.robot.util.RBSIPose; +import frc.robot.util.TimedPose; import frc.robot.util.VirtualSubsystem; import java.util.ArrayDeque; import java.util.ArrayList; @@ -48,7 +48,7 @@ public class Vision extends VirtualSubsystem { /** Vision Consumer definition */ @FunctionalInterface public interface PoseMeasurementConsumer { - void accept(RBSIPose measurement); + void accept(TimedPose measurement); } private final PoseMeasurementConsumer consumer; @@ -65,7 +65,7 @@ public interface PoseMeasurementConsumer { private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; // Smoothing buffer (recent fused estimates) - private final ArrayDeque fusedBuffer = new ArrayDeque<>(); + private final ArrayDeque fusedBuffer = new ArrayDeque<>(); private final double smoothWindowSec = 0.25; private final int smoothMaxSize = 12; @@ -127,7 +127,7 @@ public void rbsiPeriodic() { } // Pick the one best accepted estimate per camera for this loop - final ArrayList perCamAccepted = new ArrayList<>(io.length); + final ArrayList perCamAccepted = new ArrayList<>(io.length); // Loop over cameras for (int cam = 0; cam < io.length; cam++) { @@ -136,7 +136,7 @@ public void rbsiPeriodic() { int seen = 0; int accepted = 0; int rejected = 0; - RBSIPose best = null; + TimedPose best = null; double bestTrustScale = Double.NaN; int bestTrustedCount = 0; int bestTagCount = 0; @@ -162,7 +162,7 @@ public void rbsiPeriodic() { continue; } - RBSIPose est = built.estimate; + TimedPose est = built.estimate; if (best == null || isBetter(est, best)) { best = est; bestTrustScale = built.trustScale; @@ -197,12 +197,12 @@ public void rbsiPeriodic() { if (!Double.isFinite(tF)) return; // Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse - RBSIPose fused = fuseAtTime(perCamAccepted, tF); + TimedPose fused = fuseAtTime(perCamAccepted, tF); if (fused == null) return; // Smooth by fusing recent fused estimates (also aligned to tF) pushFused(fused); - RBSIPose smoothed = smoothAtTime(tF); + TimedPose smoothed = smoothAtTime(tF); if (smoothed == null) return; // Inject the pose -- commented out for now... @@ -317,11 +317,11 @@ private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation o } private static final class BuiltEstimate { - final RBSIPose estimate; + final TimedPose estimate; final double trustScale; final int trustedCount; - BuiltEstimate(RBSIPose estimate, double trustScale, int trustedCount) { + BuiltEstimate(TimedPose estimate, double trustScale, int trustedCount) { this.estimate = estimate; this.trustScale = trustScale; this.trustedCount = trustedCount; @@ -369,7 +369,7 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { Logger.recordOutput("Vision/Camera" + cam + "/InjectedFracTrusted", fracTrusted); return new BuiltEstimate( - new RBSIPose( + new TimedPose( obs.pose().toPose2d(), obs.timestamp(), VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)), @@ -377,7 +377,7 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { trustedCount); } - private boolean isBetter(RBSIPose a, RBSIPose b) { + private boolean isBetter(TimedPose a, TimedPose b) { // Lower trace of stddev vector (x+y+theta) is better double ta = a.stdDevs().get(0, 0) + a.stdDevs().get(1, 0) + a.stdDevs().get(2, 0); double tb = b.stdDevs().get(0, 0) + b.stdDevs().get(1, 0) + b.stdDevs().get(2, 0); @@ -385,12 +385,12 @@ private boolean isBetter(RBSIPose a, RBSIPose b) { } /** Time alignment & fusion ********************************************** */ - private RBSIPose fuseAtTime(ArrayList estimates, double tF) { - final ArrayList aligned = new ArrayList<>(estimates.size()); + private TimedPose fuseAtTime(ArrayList estimates, double tF) { + final ArrayList aligned = new ArrayList<>(estimates.size()); for (var e : estimates) { Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); if (alignedPose == null) return null; - aligned.add(new RBSIPose(alignedPose, tF, e.stdDevs())); + aligned.add(new TimedPose(alignedPose, tF, e.stdDevs())); } return inverseVarianceFuse(aligned, tF); } @@ -410,7 +410,7 @@ private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tF) { return visionPoseAtTs.transformBy(ts_T_tf); } - private RBSIPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) { + private TimedPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) { if (alignedAtTF == null || alignedAtTF.isEmpty()) return null; if (alignedAtTF.size() == 1) return alignedAtTF.get(0); @@ -419,7 +419,7 @@ private RBSIPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) double sumCos = 0.0, sumSin = 0.0; for (int i = 0; i < alignedAtTF.size(); i++) { - final RBSIPose e = alignedAtTF.get(i); + final TimedPose e = alignedAtTF.get(i); final Pose2d p = e.pose(); final Matrix s = e.stdDevs(); @@ -469,11 +469,11 @@ private RBSIPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) final Matrix fusedStdDevs = VecBuilder.fill(Math.sqrt(1.0 / sumWx), Math.sqrt(1.0 / sumWy), Math.sqrt(1.0 / sumWth)); - return new RBSIPose(fusedPose, tF, fusedStdDevs); + return new TimedPose(fusedPose, tF, fusedStdDevs); } /** Smoothing buffer ***************************************************** */ - private void pushFused(RBSIPose fused) { + private void pushFused(TimedPose fused) { fusedBuffer.addLast(fused); while (fusedBuffer.size() > smoothMaxSize) { @@ -488,15 +488,15 @@ private void pushFused(RBSIPose fused) { } } - private RBSIPose smoothAtTime(double tF) { + private TimedPose smoothAtTime(double tF) { if (fusedBuffer.isEmpty()) return null; if (fusedBuffer.size() == 1) return fusedBuffer.peekLast(); - final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); + final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); for (var e : fusedBuffer) { Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); if (alignedPose == null) continue; - aligned.add(new RBSIPose(alignedPose, tF, e.stdDevs())); + aligned.add(new TimedPose(alignedPose, tF, e.stdDevs())); } if (aligned.isEmpty()) return fusedBuffer.peekLast(); diff --git a/src/main/java/frc/robot/util/RBSIPose.java b/src/main/java/frc/robot/util/TimedPose.java similarity index 89% rename from src/main/java/frc/robot/util/RBSIPose.java rename to src/main/java/frc/robot/util/TimedPose.java index 4a4df2fc..440cd315 100644 --- a/src/main/java/frc/robot/util/RBSIPose.java +++ b/src/main/java/frc/robot/util/TimedPose.java @@ -18,4 +18,4 @@ import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; -public record RBSIPose(Pose2d pose, double timestampSeconds, Matrix stdDevs) {} +public record TimedPose(Pose2d pose, double timestampSeconds, Matrix stdDevs) {} From 3bf64b5b63d0e47979c647107add4bdc0267cccf Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Tue, 10 Feb 2026 18:07:07 -0700 Subject: [PATCH 08/19] Move the odometry queue draining into a VirtualSubsystem --- src/main/java/frc/robot/RobotContainer.java | 14 +- .../accelerometer/Accelerometer.java | 6 + .../frc/robot/subsystems/drive/Drive.java | 226 +++++-------- .../robot/subsystems/drive/DriveOdometry.java | 297 ++++++++++++++++++ .../java/frc/robot/subsystems/imu/Imu.java | 7 + .../frc/robot/subsystems/vision/Vision.java | 21 +- .../java/frc/robot/util/VirtualSubsystem.java | 22 ++ 7 files changed, 438 insertions(+), 155 deletions(-) create mode 100644 src/main/java/frc/robot/subsystems/drive/DriveOdometry.java diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 521e09d5..8e4fdd9a 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -47,6 +47,7 @@ import frc.robot.commands.DriveCommands; import frc.robot.subsystems.accelerometer.Accelerometer; import frc.robot.subsystems.drive.Drive; +import frc.robot.subsystems.drive.DriveOdometry; import frc.robot.subsystems.drive.SwerveConstants; import frc.robot.subsystems.flywheel_example.Flywheel; import frc.robot.subsystems.flywheel_example.FlywheelIO; @@ -107,6 +108,9 @@ public class RobotContainer { private final Imu m_imu; private final Vision m_vision; + @SuppressWarnings("unused") + private final DriveOdometry m_driveOdometry; + @SuppressWarnings("unused") private final Accelerometer m_accel; @@ -156,10 +160,12 @@ public RobotContainer() { m_imu = new Imu(SwerveConstants.kImu.factory.get()); m_drivebase = new Drive(m_imu); - m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); + m_driveOdometry = + new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); // see note m_vision = new Vision( m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); + m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -169,6 +175,9 @@ public RobotContainer() { m_imu = new Imu(new ImuIOSim()); m_drivebase = new Drive(m_imu); + m_driveOdometry = + new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); // see note + m_flywheel = new Flywheel(new FlywheelIOSim()); // ---------------- Vision IOs (robot code) ---------------- @@ -208,6 +217,9 @@ public RobotContainer() { RBSICANBusRegistry.initSim(CANBuses.RIO, CANBuses.DRIVE); m_imu = new Imu(new ImuIOSim() {}); m_drivebase = new Drive(m_imu); + m_driveOdometry = + new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); // see note + m_flywheel = new Flywheel(new FlywheelIO() {}); m_vision = new Vision(m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); diff --git a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java index fcb7dd6c..0b5ff652 100644 --- a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java +++ b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java @@ -55,6 +55,12 @@ public Accelerometer(Imu imu) { this.rio = new RioAccelIORoboRIO(200.0); // 200 Hz is a good start } + // Priority value for this virtual subsystem + @Override + protected int getPeriodPriority() { + return +10; + } + @Override public void rbsiPeriodic() { final boolean doProfile = (++profileCount >= PROFILE_EVERY_N); diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 71b9e378..fda4cde7 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -253,156 +253,20 @@ public Drive(Imu imu) { /** Periodic function that is called each robot cycle by the command scheduler */ @Override public void rbsiPeriodic() { - odometryLock.lock(); - try { - // Ensure IMU inputs are fresh for this cycle - final var imuInputs = imu.getInputs(); - - // Stop modules & log empty setpoint states if disabled - if (DriverStation.isDisabled()) { - for (var module : modules) { - module.stop(); - } - Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); - Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); - } - - // Drain per-module odometry queues for this cycle - for (var module : modules) { - module.periodic(); - } - - // -------- REAL ODOMETRY REPLAY (canonical timestamps from module[0]) -------- - if (Constants.getMode() != Mode.SIM) { - final double[] ts = modules[0].getOdometryTimestamps(); - final int n = (ts == null) ? 0 : ts.length; - - // Cache per-module histories ONCE (avoid repeated getters in the loop) - final SwerveModulePosition[][] modHist = new SwerveModulePosition[4][]; - for (int m = 0; m < 4; m++) { - modHist[m] = modules[m].getOdometryPositions(); - } - - // Determine yaw queue availability - final boolean hasYawQueue = - imuInputs.connected - && imuInputs.odometryYawTimestamps != null - && imuInputs.odometryYawPositionsRad != null - && imuInputs.odometryYawTimestamps.length - == imuInputs.odometryYawPositionsRad.length - && imuInputs.odometryYawTimestamps.length > 0; - - final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; - final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; - - // If we have no module samples, still keep yaw buffers “alive” for gating callers - if (n == 0) { - final double now = Timer.getFPGATimestamp(); - yawBuffer.addSample(now, imuInputs.yawPositionRad); - yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); - - gyroDisconnectedAlert.set(!imuInputs.connected); - return; - } - - // Decide whether yaw queue is index-aligned with module[0] timestamps. - // We only trust index alignment if BOTH: - // - yaw has at least n samples - // - yawTs[i] ~= ts[i] for i in range (tight epsilon) - boolean yawIndexAligned = false; - if (hasYawQueue && yawTs.length >= n) { - yawIndexAligned = true; - final double eps = 1e-3; // 1 ms - for (int i = 0; i < n; i++) { - if (Math.abs(yawTs[i] - ts[i]) > eps) { - yawIndexAligned = false; - break; - } - } - } - - // If yaw is NOT index-aligned but we have a yaw queue, build yaw/yawRate buffers ONCE. - if (hasYawQueue && !yawIndexAligned) { - for (int k = 0; k < yawTs.length; k++) { - yawBuffer.addSample(yawTs[k], yawPos[k]); - if (k > 0) { - final double dt = yawTs[k] - yawTs[k - 1]; - if (dt > 1e-6) { - yawRateBuffer.addSample(yawTs[k], (yawPos[k] - yawPos[k - 1]) / dt); - } - } - } - } - - // If NO yaw queue, add a single “now” sample once (don’t do this per replay sample) - if (!hasYawQueue) { - final double now = Timer.getFPGATimestamp(); - yawBuffer.addSample(now, imuInputs.yawPositionRad); - yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); - } - - // Replay each odometry sample - for (int i = 0; i < n; i++) { - final double t = ts[i]; - - // Build module positions at this sample index (clamp defensively) - for (int m = 0; m < 4; m++) { - final SwerveModulePosition[] hist = modHist[m]; - if (hist == null || hist.length == 0) { - odomPositions[m] = modules[m].getPosition(); - } else if (i < hist.length) { - odomPositions[m] = hist[i]; - } else { - odomPositions[m] = hist[hist.length - 1]; - } - } - - // Determine yaw at this timestamp - double yawRad = imuInputs.yawPositionRad; // fallback - - if (hasYawQueue) { - if (yawIndexAligned) { - yawRad = yawPos[i]; - - // Keep yaw/yawRate buffers updated in odometry timebase (good for yaw-gate) - yawBuffer.addSample(t, yawRad); - if (i > 0) { - final double dt = yawTs[i] - yawTs[i - 1]; - if (dt > 1e-6) { - yawRateBuffer.addSample(t, (yawPos[i] - yawPos[i - 1]) / dt); - } - } - } else { - // yawBuffer was pre-filled above; interpolate here - yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); - } - } - - // Feed estimator at this historical timestamp - m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); - - // Maintain pose history in SAME timebase as estimator - poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); - } - - Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); - gyroDisconnectedAlert.set(!imuInputs.connected); - return; - - } else { + // NO odometryLock needed unless you touch poseEstimator directly here. + // Keep this focused on control / setpoints / characterization. - // SIMULATION: Keep sim pose buffer time-aligned, too - double now = Timer.getFPGATimestamp(); - poseBuffer.addSample(now, simPhysics.getPose()); - yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); - yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); - - Logger.recordOutput("Drive/Pose", simPhysics.getPose()); - gyroDisconnectedAlert.set(false); - } - } finally { - odometryLock.unlock(); + if (DriverStation.isDisabled()) { + for (var module : modules) module.stop(); + Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); + Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); } + + // If you need module refresh for control, you can either: + // A) leave it in DriveOdometry (recommended) and here just use cached inputs, OR + // B) keep a lightweight "module.controlPeriodic()" that doesn't drain queues. + // + // For now: do nothing here re: odometry. } /** @@ -559,6 +423,9 @@ public Command sysIdDynamic(SysIdRoutine.Direction direction) { } /** Getter Functions ***************************************************** */ + public Module[] getModules() { + return modules; + } /** Returns the module states (turn angles and drive velocities) for all of the modules. */ @AutoLogOutput(key = "SwerveStates/Measured") @@ -862,4 +729,67 @@ private edu.wpi.first.math.Matrix getSimulatedVisionStdDevs() { stdDevs.set(2, 0, Math.toRadians(2)); // rotation standard deviation (radians) return stdDevs; } + + // --- Helpers used by DriveOdometry (package-private) --- + + Pose2d poseEstimatorGetPose() { + return m_PoseEstimator.getEstimatedPosition(); + } + + void poseEstimatorUpdateWithTime(double t, Rotation2d yaw, SwerveModulePosition[] positions) { + m_PoseEstimator.updateWithTime(t, yaw, positions); + } + + void poseBufferAddSample(double t, Pose2d pose) { + poseBuffer.addSample(t, pose); + } + + // Yaw buffer helpers (assuming you already have yawBuffer + yawRateBuffer) + double yawBufferSampleOr(double t, double fallbackYawRad) { + return yawBuffer.getSample(t).orElse(fallbackYawRad); + } + + void yawBuffersAddSample(double t, double yawRad, double yawRateRadPerSec) { + yawBuffer.addSample(t, yawRad); + yawRateBuffer.addSample(t, yawRateRadPerSec); + } + + void yawBuffersFillFromQueue(double[] yawTs, double[] yawPosRad) { + for (int k = 0; k < yawTs.length; k++) { + yawBuffer.addSample(yawTs[k], yawPosRad[k]); + if (k > 0) { + double dt = yawTs[k] - yawTs[k - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(yawTs[k], (yawPosRad[k] - yawPosRad[k - 1]) / dt); + } + } + } + } + + void yawBuffersAddSampleIndexAligned(double t, double[] yawTs, double[] yawPos, int i) { + yawBuffer.addSample(t, yawPos[i]); + if (i > 0) { + double dt = yawTs[i] - yawTs[i - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(t, (yawPos[i] - yawPos[i - 1]) / dt); + } + } + } + + void setGyroDisconnectedAlert(boolean disconnected) { + gyroDisconnectedAlert.set(disconnected); + } + + // Drive.java + public Pose2d getSimPose() { + return simPhysics.getPose(); + } + + public double getSimYawRad() { + return simPhysics.getYaw().getRadians(); + } + + public double getSimYawRateRadPerSec() { + return simPhysics.getOmegaRadPerSec(); + } } diff --git a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java new file mode 100644 index 00000000..494e0ab2 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java @@ -0,0 +1,297 @@ +package frc.robot.subsystems.drive; + +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.kinematics.SwerveModulePosition; +import edu.wpi.first.wpilibj.Timer; +import frc.robot.Constants; +import frc.robot.subsystems.imu.Imu; +import frc.robot.util.RBSIEnum.Mode; +import frc.robot.util.VirtualSubsystem; +import org.littletonrobotics.junction.Logger; + +public final class DriveOdometry extends VirtualSubsystem { + private final Drive drive; + private final Imu imu; + private final Module[] modules; + + // Scratch (no per-loop allocations) + private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; + + public DriveOdometry(Drive drive, Imu imu, Module[] modules) { + this.drive = drive; + this.imu = imu; + this.modules = modules; + } + + // Priority value for this virtual subsystem + @Override + protected int getPeriodPriority() { + return -20; + } + + @Override + public void rbsiPeriodic() { + Drive.odometryLock.lock(); + try { + final var imuInputs = imu.getInputs(); + + // Drain per-module odometry queues ONCE per loop (this also refreshes motor signals) + for (var module : modules) { + module.periodic(); // if you later split, this becomes module.updateInputs() + } + + if (Constants.getMode() == Mode.SIM) { + final double now = Timer.getFPGATimestamp(); + drive.poseBufferAddSample(now, drive.getSimPose()); + drive.yawBuffersAddSample(now, drive.getSimYawRad(), drive.getSimYawRateRadPerSec()); + Logger.recordOutput("Drive/Pose", drive.getSimPose()); + return; + } + + // Canonical timestamp queue from module[0] + final double[] ts = modules[0].getOdometryTimestamps(); + final int n = (ts == null) ? 0 : ts.length; + + // Always keep yaw buffers “alive” even if no samples + if (n == 0) { + final double now = Timer.getFPGATimestamp(); + drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); + drive.setGyroDisconnectedAlert(!imuInputs.connected); + return; + } + + // Cache module histories once + final SwerveModulePosition[][] modHist = new SwerveModulePosition[4][]; + for (int m = 0; m < 4; m++) { + modHist[m] = modules[m].getOdometryPositions(); + } + + // Yaw queue availability + final boolean hasYawQueue = + imuInputs.connected + && imuInputs.odometryYawTimestamps != null + && imuInputs.odometryYawPositionsRad != null + && imuInputs.odometryYawTimestamps.length == imuInputs.odometryYawPositionsRad.length + && imuInputs.odometryYawTimestamps.length > 0; + + final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; + final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; + + // Determine index alignment (cheap + deterministic) + boolean yawIndexAligned = false; + if (hasYawQueue && yawTs.length >= n) { + yawIndexAligned = true; + final double eps = 1e-3; // 1ms + for (int i = 0; i < n; i++) { + if (Math.abs(yawTs[i] - ts[i]) > eps) { + yawIndexAligned = false; + break; + } + } + } + + // If yaw not aligned, pre-fill yaw buffers once and interpolate later + if (hasYawQueue && !yawIndexAligned) { + drive.yawBuffersFillFromQueue(yawTs, yawPos); + } else if (!hasYawQueue) { + // Single “now” sample once (not per replay) + final double now = Timer.getFPGATimestamp(); + drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); + } + + // Replay + for (int i = 0; i < n; i++) { + final double t = ts[i]; + + // Build module positions at sample i (clamp defensively) + for (int m = 0; m < 4; m++) { + final SwerveModulePosition[] hist = modHist[m]; + if (hist == null || hist.length == 0) { + odomPositions[m] = modules[m].getPosition(); + } else if (i < hist.length) { + odomPositions[m] = hist[i]; + } else { + odomPositions[m] = hist[hist.length - 1]; + } + } + + double yawRad = imuInputs.yawPositionRad; + if (hasYawQueue) { + if (yawIndexAligned) { + yawRad = yawPos[i]; + // Keep yaw buffers aligned to replay timeline (good for yaw-rate gate) + drive.yawBuffersAddSampleIndexAligned(t, yawTs, yawPos, i); + } else { + yawRad = drive.yawBufferSampleOr(t, imuInputs.yawPositionRad); + } + } + + drive.poseEstimatorUpdateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + drive.poseBufferAddSample(t, drive.poseEstimatorGetPose()); + } + + Logger.recordOutput("Drive/Pose", drive.poseEstimatorGetPose()); + drive.setGyroDisconnectedAlert(!imuInputs.connected); + + } finally { + Drive.odometryLock.unlock(); + } + } + + // /************************************************************************* */ + // /** Periodic function that is called each robot cycle by the command scheduler */ + // @Override + // public void rbsiPeriodic() { + // odometryLock.lock(); + // try { + // // Ensure IMU inputs are fresh for this cycle + // final var imuInputs = imu.getInputs(); + + // // Stop modules & log empty setpoint states if disabled + // if (DriverStation.isDisabled()) { + // for (var module : modules) { + // module.stop(); + // } + // Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); + // Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); + // } + + // // Drain per-module odometry queues for this cycle + // for (var module : modules) { + // module.periodic(); + // } + + // // -------- REAL ODOMETRY REPLAY (canonical timestamps from module[0]) -------- + // if (Constants.getMode() != Mode.SIM) { + // final double[] ts = modules[0].getOdometryTimestamps(); + // final int n = (ts == null) ? 0 : ts.length; + + // // Cache per-module histories ONCE (avoid repeated getters in the loop) + // final SwerveModulePosition[][] modHist = new SwerveModulePosition[4][]; + // for (int m = 0; m < 4; m++) { + // modHist[m] = modules[m].getOdometryPositions(); + // } + + // // Determine yaw queue availability + // final boolean hasYawQueue = + // imuInputs.connected + // && imuInputs.odometryYawTimestamps != null + // && imuInputs.odometryYawPositionsRad != null + // && imuInputs.odometryYawTimestamps.length + // == imuInputs.odometryYawPositionsRad.length + // && imuInputs.odometryYawTimestamps.length > 0; + + // final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; + // final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; + + // // If we have no module samples, still keep yaw buffers “alive” for gating callers + // if (n == 0) { + // final double now = Timer.getFPGATimestamp(); + // yawBuffer.addSample(now, imuInputs.yawPositionRad); + // yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); + + // gyroDisconnectedAlert.set(!imuInputs.connected); + // return; + // } + + // // Decide whether yaw queue is index-aligned with module[0] timestamps. + // // We only trust index alignment if BOTH: + // // - yaw has at least n samples + // // - yawTs[i] ~= ts[i] for i in range (tight epsilon) + // boolean yawIndexAligned = false; + // if (hasYawQueue && yawTs.length >= n) { + // yawIndexAligned = true; + // final double eps = 1e-3; // 1 ms + // for (int i = 0; i < n; i++) { + // if (Math.abs(yawTs[i] - ts[i]) > eps) { + // yawIndexAligned = false; + // break; + // } + // } + // } + + // // If yaw is NOT index-aligned but we have a yaw queue, build yaw/yawRate buffers ONCE. + // if (hasYawQueue && !yawIndexAligned) { + // for (int k = 0; k < yawTs.length; k++) { + // yawBuffer.addSample(yawTs[k], yawPos[k]); + // if (k > 0) { + // final double dt = yawTs[k] - yawTs[k - 1]; + // if (dt > 1e-6) { + // yawRateBuffer.addSample(yawTs[k], (yawPos[k] - yawPos[k - 1]) / dt); + // } + // } + // } + // } + + // // If NO yaw queue, add a single “now” sample once (don’t do this per replay sample) + // if (!hasYawQueue) { + // final double now = Timer.getFPGATimestamp(); + // yawBuffer.addSample(now, imuInputs.yawPositionRad); + // yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); + // } + + // // Replay each odometry sample + // for (int i = 0; i < n; i++) { + // final double t = ts[i]; + + // // Build module positions at this sample index (clamp defensively) + // for (int m = 0; m < 4; m++) { + // final SwerveModulePosition[] hist = modHist[m]; + // if (hist == null || hist.length == 0) { + // odomPositions[m] = modules[m].getPosition(); + // } else if (i < hist.length) { + // odomPositions[m] = hist[i]; + // } else { + // odomPositions[m] = hist[hist.length - 1]; + // } + // } + + // // Determine yaw at this timestamp + // double yawRad = imuInputs.yawPositionRad; // fallback + + // if (hasYawQueue) { + // if (yawIndexAligned) { + // yawRad = yawPos[i]; + + // // Keep yaw/yawRate buffers updated in odometry timebase (good for yaw-gate) + // yawBuffer.addSample(t, yawRad); + // if (i > 0) { + // final double dt = yawTs[i] - yawTs[i - 1]; + // if (dt > 1e-6) { + // yawRateBuffer.addSample(t, (yawPos[i] - yawPos[i - 1]) / dt); + // } + // } + // } else { + // // yawBuffer was pre-filled above; interpolate here + // yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); + // } + // } + + // // Feed estimator at this historical timestamp + // m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + + // // Maintain pose history in SAME timebase as estimator + // poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); + // } + + // Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); + // gyroDisconnectedAlert.set(!imuInputs.connected); + // return; + + // } else { + + // // SIMULATION: Keep sim pose buffer time-aligned, too + // double now = Timer.getFPGATimestamp(); + // poseBuffer.addSample(now, simPhysics.getPose()); + // yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); + // yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); + + // Logger.recordOutput("Drive/Pose", simPhysics.getPose()); + // gyroDisconnectedAlert.set(false); + // } + // } finally { + // odometryLock.unlock(); + // } + // } + +} diff --git a/src/main/java/frc/robot/subsystems/imu/Imu.java b/src/main/java/frc/robot/subsystems/imu/Imu.java index f5f63152..c0a1ba6e 100644 --- a/src/main/java/frc/robot/subsystems/imu/Imu.java +++ b/src/main/java/frc/robot/subsystems/imu/Imu.java @@ -36,7 +36,14 @@ public Imu(ImuIO io) { this.io = io; } + // Priority value for this virtual subsystem + @Override + protected int getPeriodPriority() { + return -30; + } + // Periodic function to read inputs + @Override public void rbsiPeriodic() { io.updateInputs(inputs); } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 86c10558..0928d659 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -28,7 +28,7 @@ import edu.wpi.first.math.geometry.Translation2d; import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; -import frc.robot.Constants; +import frc.robot.Constants.Cameras; import frc.robot.FieldConstants; import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; @@ -45,20 +45,23 @@ public class Vision extends VirtualSubsystem { + // Declare the Vision IO + private final VisionIO[] io; + private final VisionIOInputsAutoLogged[] inputs; + /** Vision Consumer definition */ @FunctionalInterface public interface PoseMeasurementConsumer { void accept(TimedPose measurement); } + // Declare pose consumer, drivebase, and epoch reset private final PoseMeasurementConsumer consumer; private final Drive drive; private long lastSeenPoseResetEpoch = -1; - private final VisionIO[] io; - private final VisionIOInputsAutoLogged[] inputs; - - private final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; + // Declare the camera configurations + private final Cameras.CameraConfig[] camConfigs = Cameras.ALL; // Per-camera monotonic and pose reset gates private final double[] lastAcceptedTsPerCam; @@ -106,6 +109,12 @@ public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { } } + // Priority value for this virtual subsystem + @Override + protected int getPeriodPriority() { + return -10; + } + /** Periodic Function */ @Override public void rbsiPeriodic() { @@ -205,7 +214,7 @@ public void rbsiPeriodic() { TimedPose smoothed = smoothAtTime(tF); if (smoothed == null) return; - // Inject the pose -- commented out for now... + // Inject the pose consumer.accept(smoothed); Logger.recordOutput("Vision/FusedPose", fused.pose()); diff --git a/src/main/java/frc/robot/util/VirtualSubsystem.java b/src/main/java/frc/robot/util/VirtualSubsystem.java index 2996aa33..4b1c57e0 100644 --- a/src/main/java/frc/robot/util/VirtualSubsystem.java +++ b/src/main/java/frc/robot/util/VirtualSubsystem.java @@ -10,6 +10,7 @@ package frc.robot.util; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; import org.littletonrobotics.junction.Logger; @@ -18,14 +19,35 @@ */ public abstract class VirtualSubsystem { private static final List subsystems = new ArrayList<>(); + private static boolean needsSort = false; + private final String name = getClass().getSimpleName(); // Load all defined virtual subsystems into a list public VirtualSubsystem() { subsystems.add(this); + needsSort = true; // a new subsystem changed ordering + } + + /** + * Override to control ordering. Lower runs earlier. + * + *

Example: IMU inputs -30, Drive odometry -20, Vision -10, Coordinator 0. + */ + protected int getPeriodPriority() { + return 0; } public static void periodicAll() { + // Sort once (and again only if new subsystems are constructed) + if (needsSort) { + subsystems.sort( + Comparator.comparingInt(VirtualSubsystem::getPeriodPriority) + // deterministic tie-break to avoid “random” order when priorities match + .thenComparingInt(System::identityHashCode)); + needsSort = false; + } + // Call each virtual subsystem during robotPeriodic() for (VirtualSubsystem subsystem : subsystems) { subsystem.periodic(); From 592314512ded7009357c65a194c79ab51f1d28ac Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Fri, 13 Feb 2026 14:53:30 -0700 Subject: [PATCH 09/19] Update AprilTag definitions --- src/main/deploy/apriltags/2024-crescendo.json | 296 +++++++++ .../apriltags/2025-reefscape-andymark.json | 404 ++++++++++++ ...ficial.json => 2025-reefscape-welded.json} | 0 src/main/deploy/apriltags/2026-none.json | 7 - .../apriltags/2026-rebuilt-andymark.json | 584 ++++++++++++++++++ ...official.json => 2026-rebuilt-welded.json} | 0 src/main/deploy/apriltags/none-andymark.json | 7 + src/main/deploy/apriltags/none-welded.json | 7 + src/main/java/frc/robot/FieldConstants.java | 21 +- src/main/java/frc/robot/RobotContainer.java | 3 +- 10 files changed, 1314 insertions(+), 15 deletions(-) create mode 100644 src/main/deploy/apriltags/2024-crescendo.json create mode 100644 src/main/deploy/apriltags/2025-reefscape-andymark.json rename src/main/deploy/apriltags/{2025-official.json => 2025-reefscape-welded.json} (100%) delete mode 100644 src/main/deploy/apriltags/2026-none.json create mode 100644 src/main/deploy/apriltags/2026-rebuilt-andymark.json rename src/main/deploy/apriltags/{2026-official.json => 2026-rebuilt-welded.json} (100%) create mode 100644 src/main/deploy/apriltags/none-andymark.json create mode 100644 src/main/deploy/apriltags/none-welded.json diff --git a/src/main/deploy/apriltags/2024-crescendo.json b/src/main/deploy/apriltags/2024-crescendo.json new file mode 100644 index 00000000..8cb837a5 --- /dev/null +++ b/src/main/deploy/apriltags/2024-crescendo.json @@ -0,0 +1,296 @@ +{ + "tags": [ + { + "ID": 1, + "pose": { + "translation": { + "x": 15.079471999999997, + "y": 0.24587199999999998, + "z": 1.355852 + }, + "rotation": { + "quaternion": { + "W": 0.5000000000000001, + "X": 0.0, + "Y": 0.0, + "Z": 0.8660254037844386 + } + } + } + }, + { + "ID": 2, + "pose": { + "translation": { + "x": 16.185134, + "y": 0.883666, + "z": 1.355852 + }, + "rotation": { + "quaternion": { + "W": 0.5000000000000001, + "X": 0.0, + "Y": 0.0, + "Z": 0.8660254037844386 + } + } + } + }, + { + "ID": 3, + "pose": { + "translation": { + "x": 16.579342, + "y": 4.982717999999999, + "z": 1.4511020000000001 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 4, + "pose": { + "translation": { + "x": 16.579342, + "y": 5.547867999999999, + "z": 1.4511020000000001 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 5, + "pose": { + "translation": { + "x": 14.700757999999999, + "y": 8.2042, + "z": 1.355852 + }, + "rotation": { + "quaternion": { + "W": -0.7071067811865475, + "X": -0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 6, + "pose": { + "translation": { + "x": 1.8415, + "y": 8.2042, + "z": 1.355852 + }, + "rotation": { + "quaternion": { + "W": -0.7071067811865475, + "X": -0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 7, + "pose": { + "translation": { + "x": -0.038099999999999995, + "y": 5.547867999999999, + "z": 1.4511020000000001 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 8, + "pose": { + "translation": { + "x": -0.038099999999999995, + "y": 4.982717999999999, + "z": 1.4511020000000001 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 9, + "pose": { + "translation": { + "x": 0.356108, + "y": 0.883666, + "z": 1.355852 + }, + "rotation": { + "quaternion": { + "W": 0.8660254037844387, + "X": 0.0, + "Y": 0.0, + "Z": 0.49999999999999994 + } + } + } + }, + { + "ID": 10, + "pose": { + "translation": { + "x": 1.4615159999999998, + "y": 0.24587199999999998, + "z": 1.355852 + }, + "rotation": { + "quaternion": { + "W": 0.8660254037844387, + "X": 0.0, + "Y": 0.0, + "Z": 0.49999999999999994 + } + } + } + }, + { + "ID": 11, + "pose": { + "translation": { + "x": 11.904726, + "y": 3.7132259999999997, + "z": 1.3208 + }, + "rotation": { + "quaternion": { + "W": -0.8660254037844387, + "X": -0.0, + "Y": 0.0, + "Z": 0.49999999999999994 + } + } + } + }, + { + "ID": 12, + "pose": { + "translation": { + "x": 11.904726, + "y": 4.49834, + "z": 1.3208 + }, + "rotation": { + "quaternion": { + "W": 0.8660254037844387, + "X": 0.0, + "Y": 0.0, + "Z": 0.49999999999999994 + } + } + } + }, + { + "ID": 13, + "pose": { + "translation": { + "x": 11.220196, + "y": 4.105148, + "z": 1.3208 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 14, + "pose": { + "translation": { + "x": 5.320792, + "y": 4.105148, + "z": 1.3208 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 15, + "pose": { + "translation": { + "x": 4.641342, + "y": 4.49834, + "z": 1.3208 + }, + "rotation": { + "quaternion": { + "W": 0.5000000000000001, + "X": 0.0, + "Y": 0.0, + "Z": 0.8660254037844386 + } + } + } + }, + { + "ID": 16, + "pose": { + "translation": { + "x": 4.641342, + "y": 3.7132259999999997, + "z": 1.3208 + }, + "rotation": { + "quaternion": { + "W": -0.4999999999999998, + "X": -0.0, + "Y": 0.0, + "Z": 0.8660254037844387 + } + } + } + } + ], + "field": { + "length": 16.541, + "width": 8.211 + } +} diff --git a/src/main/deploy/apriltags/2025-reefscape-andymark.json b/src/main/deploy/apriltags/2025-reefscape-andymark.json new file mode 100644 index 00000000..60b60e5e --- /dev/null +++ b/src/main/deploy/apriltags/2025-reefscape-andymark.json @@ -0,0 +1,404 @@ +{ + "tags": [ + { + "ID": 1, + "pose": { + "translation": { + "x": 16.687292, + "y": 0.628142, + "z": 1.4859 + }, + "rotation": { + "quaternion": { + "W": 0.4539904997395468, + "X": 0.0, + "Y": 0.0, + "Z": 0.8910065241883678 + } + } + } + }, + { + "ID": 2, + "pose": { + "translation": { + "x": 16.687292, + "y": 7.414259999999999, + "z": 1.4859 + }, + "rotation": { + "quaternion": { + "W": -0.45399049973954675, + "X": -0.0, + "Y": 0.0, + "Z": 0.8910065241883679 + } + } + } + }, + { + "ID": 3, + "pose": { + "translation": { + "x": 11.49096, + "y": 8.031733999999998, + "z": 1.30175 + }, + "rotation": { + "quaternion": { + "W": -0.7071067811865475, + "X": -0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 4, + "pose": { + "translation": { + "x": 9.276079999999999, + "y": 6.132575999999999, + "z": 1.8679160000000001 + }, + "rotation": { + "quaternion": { + "W": 0.9659258262890683, + "X": 0.0, + "Y": 0.25881904510252074, + "Z": 0.0 + } + } + } + }, + { + "ID": 5, + "pose": { + "translation": { + "x": 9.276079999999999, + "y": 1.9098259999999998, + "z": 1.8679160000000001 + }, + "rotation": { + "quaternion": { + "W": 0.9659258262890683, + "X": 0.0, + "Y": 0.25881904510252074, + "Z": 0.0 + } + } + } + }, + { + "ID": 6, + "pose": { + "translation": { + "x": 13.474446, + "y": 3.3012379999999997, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": -0.8660254037844387, + "X": -0.0, + "Y": 0.0, + "Z": 0.49999999999999994 + } + } + } + }, + { + "ID": 7, + "pose": { + "translation": { + "x": 13.890498, + "y": 4.0208200000000005, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 8, + "pose": { + "translation": { + "x": 13.474446, + "y": 4.740402, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": 0.8660254037844387, + "X": 0.0, + "Y": 0.0, + "Z": 0.49999999999999994 + } + } + } + }, + { + "ID": 9, + "pose": { + "translation": { + "x": 12.643358, + "y": 4.740402, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": 0.5000000000000001, + "X": 0.0, + "Y": 0.0, + "Z": 0.8660254037844386 + } + } + } + }, + { + "ID": 10, + "pose": { + "translation": { + "x": 12.227305999999999, + "y": 4.0208200000000005, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 11, + "pose": { + "translation": { + "x": 12.643358, + "y": 3.3012379999999997, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": -0.4999999999999998, + "X": -0.0, + "Y": 0.0, + "Z": 0.8660254037844387 + } + } + } + }, + { + "ID": 12, + "pose": { + "translation": { + "x": 0.8613139999999999, + "y": 0.628142, + "z": 1.4859 + }, + "rotation": { + "quaternion": { + "W": 0.8910065241883679, + "X": 0.0, + "Y": 0.0, + "Z": 0.45399049973954675 + } + } + } + }, + { + "ID": 13, + "pose": { + "translation": { + "x": 0.8613139999999999, + "y": 7.414259999999999, + "z": 1.4859 + }, + "rotation": { + "quaternion": { + "W": -0.8910065241883678, + "X": -0.0, + "Y": 0.0, + "Z": 0.45399049973954686 + } + } + } + }, + { + "ID": 14, + "pose": { + "translation": { + "x": 8.272272, + "y": 6.132575999999999, + "z": 1.8679160000000001 + }, + "rotation": { + "quaternion": { + "W": 5.914589856893349e-17, + "X": -0.25881904510252074, + "Y": 1.5848095757158825e-17, + "Z": 0.9659258262890683 + } + } + } + }, + { + "ID": 15, + "pose": { + "translation": { + "x": 8.272272, + "y": 1.9098259999999998, + "z": 1.8679160000000001 + }, + "rotation": { + "quaternion": { + "W": 5.914589856893349e-17, + "X": -0.25881904510252074, + "Y": 1.5848095757158825e-17, + "Z": 0.9659258262890683 + } + } + } + }, + { + "ID": 16, + "pose": { + "translation": { + "x": 6.057646, + "y": 0.010667999999999999, + "z": 1.30175 + }, + "rotation": { + "quaternion": { + "W": 0.7071067811865476, + "X": 0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 17, + "pose": { + "translation": { + "x": 4.073905999999999, + "y": 3.3012379999999997, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": -0.4999999999999998, + "X": -0.0, + "Y": 0.0, + "Z": 0.8660254037844387 + } + } + } + }, + { + "ID": 18, + "pose": { + "translation": { + "x": 3.6576, + "y": 4.0208200000000005, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 19, + "pose": { + "translation": { + "x": 4.073905999999999, + "y": 4.740402, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": 0.5000000000000001, + "X": 0.0, + "Y": 0.0, + "Z": 0.8660254037844386 + } + } + } + }, + { + "ID": 20, + "pose": { + "translation": { + "x": 4.904739999999999, + "y": 4.740402, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": 0.8660254037844387, + "X": 0.0, + "Y": 0.0, + "Z": 0.49999999999999994 + } + } + } + }, + { + "ID": 21, + "pose": { + "translation": { + "x": 5.321046, + "y": 4.0208200000000005, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 22, + "pose": { + "translation": { + "x": 4.904739999999999, + "y": 3.3012379999999997, + "z": 0.308102 + }, + "rotation": { + "quaternion": { + "W": -0.8660254037844387, + "X": -0.0, + "Y": 0.0, + "Z": 0.49999999999999994 + } + } + } + } + ], + "field": { + "length": 17.548, + "width": 8.042 + } +} diff --git a/src/main/deploy/apriltags/2025-official.json b/src/main/deploy/apriltags/2025-reefscape-welded.json similarity index 100% rename from src/main/deploy/apriltags/2025-official.json rename to src/main/deploy/apriltags/2025-reefscape-welded.json diff --git a/src/main/deploy/apriltags/2026-none.json b/src/main/deploy/apriltags/2026-none.json deleted file mode 100644 index 4763fdac..00000000 --- a/src/main/deploy/apriltags/2026-none.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "tags": [], - "field": { - "length": 16.4592, - "width": 8.2296 - } -} diff --git a/src/main/deploy/apriltags/2026-rebuilt-andymark.json b/src/main/deploy/apriltags/2026-rebuilt-andymark.json new file mode 100644 index 00000000..ecc03907 --- /dev/null +++ b/src/main/deploy/apriltags/2026-rebuilt-andymark.json @@ -0,0 +1,584 @@ +{ + "tags": [ + { + "ID": 1, + "pose": { + "translation": { + "x": 11.863959, + "y": 7.411491399999999, + "z": 0.889 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 2, + "pose": { + "translation": { + "x": 11.9013986, + "y": 4.6247558, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 0.7071067811865476, + "X": 0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 3, + "pose": { + "translation": { + "x": 11.2978438, + "y": 4.3769534, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 4, + "pose": { + "translation": { + "x": 11.2978438, + "y": 4.0213534, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 5, + "pose": { + "translation": { + "x": 11.9013986, + "y": 3.417951, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": -0.7071067811865475, + "X": -0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 6, + "pose": { + "translation": { + "x": 11.863959, + "y": 0.6312154, + "z": 0.889 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 7, + "pose": { + "translation": { + "x": 11.9388636, + "y": 0.6312154, + "z": 0.889 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 8, + "pose": { + "translation": { + "x": 12.2569986, + "y": 3.417951, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": -0.7071067811865475, + "X": -0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 9, + "pose": { + "translation": { + "x": 12.5051566, + "y": 3.6657534, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 10, + "pose": { + "translation": { + "x": 12.5051566, + "y": 4.0213534, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 11, + "pose": { + "translation": { + "x": 12.2569986, + "y": 4.6247558, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 0.7071067811865476, + "X": 0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 12, + "pose": { + "translation": { + "x": 11.9388636, + "y": 7.411491399999999, + "z": 0.889 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 13, + "pose": { + "translation": { + "x": 16.499332, + "y": 7.391907999999999, + "z": 0.55245 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 14, + "pose": { + "translation": { + "x": 16.499332, + "y": 6.960107999999999, + "z": 0.55245 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 15, + "pose": { + "translation": { + "x": 16.4989764, + "y": 4.3124882, + "z": 0.55245 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 16, + "pose": { + "translation": { + "x": 16.4989764, + "y": 3.8806881999999994, + "z": 0.55245 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 17, + "pose": { + "translation": { + "x": 4.6490636, + "y": 0.6312154, + "z": 0.889 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 18, + "pose": { + "translation": { + "x": 4.6115986, + "y": 3.417951, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": -0.7071067811865475, + "X": -0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 19, + "pose": { + "translation": { + "x": 5.2151534, + "y": 3.6657534, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 20, + "pose": { + "translation": { + "x": 5.2151534, + "y": 4.0213534, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 21, + "pose": { + "translation": { + "x": 4.6115986, + "y": 4.6247558, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 0.7071067811865476, + "X": 0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 22, + "pose": { + "translation": { + "x": 4.6490636, + "y": 7.411491399999999, + "z": 0.889 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 23, + "pose": { + "translation": { + "x": 4.574159, + "y": 7.411491399999999, + "z": 0.889 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 24, + "pose": { + "translation": { + "x": 4.2559986, + "y": 4.6247558, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 0.7071067811865476, + "X": 0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 25, + "pose": { + "translation": { + "x": 4.007866, + "y": 4.3769534, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 26, + "pose": { + "translation": { + "x": 4.007866, + "y": 4.0213534, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 27, + "pose": { + "translation": { + "x": 4.2559986, + "y": 3.417951, + "z": 1.12395 + }, + "rotation": { + "quaternion": { + "W": -0.7071067811865475, + "X": -0.0, + "Y": 0.0, + "Z": 0.7071067811865476 + } + } + } + }, + { + "ID": 28, + "pose": { + "translation": { + "x": 4.574159, + "y": 0.6312154, + "z": 0.889 + }, + "rotation": { + "quaternion": { + "W": 6.123233995736766e-17, + "X": 0.0, + "Y": 0.0, + "Z": 1.0 + } + } + } + }, + { + "ID": 29, + "pose": { + "translation": { + "x": 0.0136906, + "y": 0.6507734, + "z": 0.55245 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 30, + "pose": { + "translation": { + "x": 0.0136906, + "y": 1.0825734, + "z": 0.55245 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 31, + "pose": { + "translation": { + "x": 0.0140462, + "y": 3.7301932, + "z": 0.55245 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + }, + { + "ID": 32, + "pose": { + "translation": { + "x": 0.0140462, + "y": 4.1619931999999995, + "z": 0.55245 + }, + "rotation": { + "quaternion": { + "W": 1.0, + "X": 0.0, + "Y": 0.0, + "Z": 0.0 + } + } + } + } + ], + "field": { + "length": 16.518, + "width": 8.043 + } +} diff --git a/src/main/deploy/apriltags/2026-official.json b/src/main/deploy/apriltags/2026-rebuilt-welded.json similarity index 100% rename from src/main/deploy/apriltags/2026-official.json rename to src/main/deploy/apriltags/2026-rebuilt-welded.json diff --git a/src/main/deploy/apriltags/none-andymark.json b/src/main/deploy/apriltags/none-andymark.json new file mode 100644 index 00000000..318fdac6 --- /dev/null +++ b/src/main/deploy/apriltags/none-andymark.json @@ -0,0 +1,7 @@ +{ + "tags": [], + "field": { + "length": 16.518, + "width": 8.043 + } +} diff --git a/src/main/deploy/apriltags/none-welded.json b/src/main/deploy/apriltags/none-welded.json new file mode 100644 index 00000000..63337983 --- /dev/null +++ b/src/main/deploy/apriltags/none-welded.json @@ -0,0 +1,7 @@ +{ + "tags": [], + "field": { + "length": 16.541, + "width": 8.069 + } +} diff --git a/src/main/java/frc/robot/FieldConstants.java b/src/main/java/frc/robot/FieldConstants.java index 729b45fe..34c356df 100644 --- a/src/main/java/frc/robot/FieldConstants.java +++ b/src/main/java/frc/robot/FieldConstants.java @@ -36,25 +36,32 @@ */ public class FieldConstants { + /** Specify which type of field your robot is using */ + private static final AprilTagLayoutType fieldType = AprilTagLayoutType.REBUILT_WELDED; + /** AprilTag Field Layout ************************************************ */ public static final double aprilTagWidth = Inches.of(6.50).in(Meters); public static final String aprilTagFamily = "36h11"; - public static final double fieldLength = AprilTagLayoutType.OFFICIAL.getLayout().getFieldLength(); - public static final double fieldWidth = AprilTagLayoutType.OFFICIAL.getLayout().getFieldWidth(); + public static final double fieldLength = fieldType.getLayout().getFieldLength(); + public static final double fieldWidth = fieldType.getLayout().getFieldWidth(); - public static final int aprilTagCount = AprilTagLayoutType.OFFICIAL.getLayout().getTags().size(); - public static final AprilTagLayoutType defaultAprilTagType = AprilTagLayoutType.OFFICIAL; + public static final int aprilTagCount = fieldType.getLayout().getTags().size(); + public static final AprilTagLayoutType defaultAprilTagType = fieldType; public static final AprilTagFieldLayout aprilTagLayout = AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); @Getter public enum AprilTagLayoutType { - OFFICIAL("2026-official"), - NONE("2026-none"), - REEFSCAPE("2025-official"); + REBUILT_WELDED("2026-rebuilt-welded"), + REBUILT_ANDYMARK("2026-rebuilt-welded"), + REEFSCAPE_WELDED("2025-reefscape-welded"), + REEFSCAPE_ANDYMARK("2025-reefscape-andymark"), + CRESCENDO("2024-crescendo"), + NONE_WELDED("none-welded"), + NONE_ANDYMARK("none-andymark"); AprilTagLayoutType(String name) { try { diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 8e4fdd9a..5b74cf60 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -469,7 +469,8 @@ public void getAutonomousCommandChoreo() { /** Updates the alerts. */ public void updateAlerts() { // AprilTag layout alert - boolean aprilTagAlertActive = Constants.getAprilTagLayoutType() != AprilTagLayoutType.OFFICIAL; + boolean aprilTagAlertActive = + Constants.getAprilTagLayoutType() != AprilTagLayoutType.REBUILT_WELDED; aprilTagLayoutAlert.set(aprilTagAlertActive); if (aprilTagAlertActive) { aprilTagLayoutAlert.setText( From 1c0875fd57e8dc5cad6f70dfff2ab88862108dec Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Fri, 13 Feb 2026 14:55:03 -0700 Subject: [PATCH 10/19] Clean up Subsystem defs --- .../java/frc/robot/subsystems/drive/ModuleIOTalonFX.java | 2 +- src/main/java/frc/robot/util/RBSISubsystem.java | 4 ++-- src/main/java/frc/robot/util/VirtualSubsystem.java | 5 +++++ 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java index df569b4b..c7015bfc 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java @@ -305,7 +305,7 @@ public void updateInputs(ModuleIOInputs inputs) { final int driveCount = drivePositionQueue.size(); final int turnCount = turnPositionQueue.size(); - // Only consume the common prefix — guarantees alignment + // Only consume the common prefix -- guarantees alignment final int sampleCount = Math.min(tsCount, Math.min(driveCount, turnCount)); if (sampleCount <= 0) { diff --git a/src/main/java/frc/robot/util/RBSISubsystem.java b/src/main/java/frc/robot/util/RBSISubsystem.java index 3aa3b8b9..cb135207 100644 --- a/src/main/java/frc/robot/util/RBSISubsystem.java +++ b/src/main/java/frc/robot/util/RBSISubsystem.java @@ -38,8 +38,8 @@ public abstract class RBSISubsystem extends SubsystemBase { public final void periodic() { long start = System.nanoTime(); rbsiPeriodic(); - long end = System.nanoTime(); - Logger.recordOutput("Loop/Mech/" + name + "_ms", (end - start) / 1e6); + // Log the timing for this subsystem + Logger.recordOutput("Loop/Mech/" + name + "_ms", (System.nanoTime() - start) / 1e6); } /** Subclasses must implement this instead of periodic(). */ diff --git a/src/main/java/frc/robot/util/VirtualSubsystem.java b/src/main/java/frc/robot/util/VirtualSubsystem.java index 4b1c57e0..36ece7bd 100644 --- a/src/main/java/frc/robot/util/VirtualSubsystem.java +++ b/src/main/java/frc/robot/util/VirtualSubsystem.java @@ -38,6 +38,10 @@ protected int getPeriodPriority() { return 0; } + /** + * Run the periodic functions of each subsystem in the order determined by the getPeriodPriority() + * of each. + */ public static void periodicAll() { // Sort once (and again only if new subsystems are constructed) if (needsSort) { @@ -68,6 +72,7 @@ public static void periodicAll() { public final void periodic() { long start = System.nanoTime(); rbsiPeriodic(); + // Log the timing for this subsystem Logger.recordOutput("Loop/Virtual/" + name + "_ms", (System.nanoTime() - start) / 1e6); } From 5569b2b367daedc4bcd9c36b9ff1063db13f93e0 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Fri, 13 Feb 2026 17:29:27 -0700 Subject: [PATCH 11/19] Clean up the IMU code a little --- doc/RBSI-PoseBuffer.md | 131 ++++++++++++++++++ doc/RBSI-Vision.md | 2 +- src/main/java/frc/robot/Constants.java | 2 + .../robot/subsystems/drive/DriveOdometry.java | 17 +++ .../java/frc/robot/subsystems/imu/Imu.java | 53 +++++-- .../java/frc/robot/subsystems/imu/ImuIO.java | 26 ++-- .../frc/robot/subsystems/imu/ImuIONavX.java | 56 +++++--- .../robot/subsystems/imu/ImuIOPigeon2.java | 50 ++++--- 8 files changed, 270 insertions(+), 67 deletions(-) create mode 100644 doc/RBSI-PoseBuffer.md diff --git a/doc/RBSI-PoseBuffer.md b/doc/RBSI-PoseBuffer.md new file mode 100644 index 00000000..efbb828b --- /dev/null +++ b/doc/RBSI-PoseBuffer.md @@ -0,0 +1,131 @@ +# Az-RBSI Pose Buffering + +This page contains documentation for the Odometry and Vision Pose Buffering +system included in Az-RBSI. It is more for reference than instructing teams in +how to do something. + +-------- + +### Background for Need + +In previous versions of the Az-RBSI (or the base [AdvantageKit templates]( +https://docs.advantagekit.org/getting-started/template-projects)), there is a +temporal disconnect between the "now" state of the drivetrain odometry (wheel +encoders + gyro) and the "delayed" state of vision measurements (by exposure +time, pipeline processing, and network transport). Essentially, **the +different sensors on the robot do not all report data at the same time**. This +delay can be 30–120 ms -- which is huge when your robot can move a foot in that +time. Attempting to correct the "now" odometric pose with a "delayed" vision +estimate introduces systematic error than can cause jitter in the pose estimate +and incorrect downstream computed values. + + +### What is Pose Buffering + +A pose buffer lets you store a time history of your robot’s estimated pose so +that when a delayed vision measurement arrives, you can rewind the state +estimator to the exact timestamp the image was captured, inject the correction, +and then replay forward to the present. In essence, pose buffers enable **time +alignment between subsystems**. Since the Az-RBSI assumes input from multiple +cameras and combining that with the IMU yaw queues and high-frequency module +odometry, everything must agree on a common timebase. Introducing a pose +buffer allows us to query, "What did odometry think the robot pose was at time +`t`?" and compute the transform between two timestamps. This is the key to +making atency-aware fusion mathematically valid. + +Pose buffers dramatically improve **stability and predictability** as well. +They can prevent feedback oscillations caused by injecting corrections at +inconsistent times and enable smoothing, gating, and replay-based estimators. +These are incredibly important for accurate autonomous paths, reliable +auto-aim, and multi-camera fusion. + + +### Implementation as Virtual Subsystems + +The Az-RBSI pose buffer implementation is based on the principle that **Drive +owns the authoritative pose history** via a `poseBuffer` keyed by FPGA +timestamp, and we make sure that buffer is populated using the *same timebase* +as the estimator. Rather than updating the estimator only “once per loop,” +**all high-rate odometry samples** collected since the last loop are replayed +and inserted into the buffer. + +We have a series of three Virtual Subsystems that work together to compute the +estimated position of the robot each loop (20 ms), polled in this order: +* Imu +* DriveOdometry +* Vision + +The IMU is treated separately from the rest of the drive odometry because we +use its values in the Accelerometer virtual subsystem to compute the +accelerations the robot undergoes. Its `inputs` snapshot is refreshed before +odometry replay runs so that during odometry replay, we prefer using the IMU’s +**odometry yaw queue** when it exists and is aligned to the drivetrain odometry +timestamps. If now, we fall back to interpolating yaw from `yawBuffer` (or +"now" yaw if we have no queue). This allows for stable yaw-rate gating +(single-camera measurements discarded if the robot is spinning too quickly) +because `yawRateBuffer` is updated in the same timebase as the odometry replay. +When vision asks, "what is the max yaw rate over `[ts-lookback, ts]`," it is +querying a consistent history instead of a mix of current-time samples. + +The `DriveOdometry` virtual subsystem drains the PhoenixOdometryThread queues +to get a canonical timestamp array upon which is built the +`SwerveModulePosition` snapshots for each sample index. The YAW from the IMU +is computed for that same sample time, then the module positions and YAW are +added to the pose estimator using the `.updateWithTime()` function for each +timestamp in the queue. At the same time, we add the sample to the pose buffer +so that later consumers (vision alignment, gating, smoothing) can ask, "What +did the robot think at time `t`?" Practically, it runs early in the loop, +drains module odometry queues, performs the `updateWithTime(...)` replay, and +keeps the `poseBuffer`, `yawBuffer`, and `yawRateBuffer` coherent. + +Vision measurements get included *after* odometry has advanced and buffered +poses for the relevant timestamps. Vision reads all camera observations, +applies various gates, chooses one best observation per camera, then fuses them +by picking a fusion time `tF` (newest accepted timestamp), and **time-aligning +each camera estimate from its `ts` to `tF` using Drive’s pose buffer**. The +smoothed/fused result is then injected through the `addVisionMeasurement()` +consumer in `Drive` at the correct timestamp. The key is: we never try to +"correct the present" with delayed vision; we correct the past, and the +estimator/pose buffer machinery carries that correction forward coherently. + +To guarantee the right computation order, we implemented a simple +**priority/ordering mechanism for VirtualSubsystems** rather than relying on +construction order. Conceptually: `Imu` runs first (refresh sensor snapshot and +yaw queue), then `DriveOdometry` runs (drain odometry queues, replay estimator, +update buffers), then `Vision` runs (query pose history, fuse, inject +measurements), and finally anything downstream (targeting, coordinators, etc.). +With explicit ordering, vision always sees a pose buffer that is current +through the latest replayed timestamps, and its time alignment transform uses +consistent odometry states. + + +### Relationship Between Pose Buffering and Hardware Control Subsystems + +The `DriveOdometry` virtual subsystem exists to **isolate the heavy, +timing-sensitive replay logic** from the rest of `Drive` "control" behavior. +This separation allows `Drive`’s main subsystem to focus on setpoints/commands, +while `DriveOdometry` guarantees that by the time anything else runs, odometry +state and buffers are already up to date for the current cycle. + +The pose buffering system sits cleanly between **raw hardware sampling** and +**high-level control**, acting as a time-synchronized "memory layer" for the +robot’s physical motion. At the bottom, hardware devices are sampled at high +frequency with timestamps measurements in FPGA time and compensation for CAN +latency. Those timestamped samples are drained and replayed inside +`DriveOdometry`, which feeds the `SwerveDrivePoseEstimator` object. This means +the estimator is not tied to the 20 ms main loop -- it advances according to +the *actual sensor sample times*. The result is a pose estimate that reflects +real drivetrain physics at the rate the hardware can provide, not just the +scheduler tick rate. + +On the control side, everything -- heading controllers, auto-alignment, vision +fusion, targeting -- consumes pose data that is both temporally accurate and +historically queryable. Controllers still run at the 20 ms loop cycle, but +they operate on a pose that was built from high-rate, latency-compensated +measurements. When vision injects corrections, it does so at the correct +historical timestamp, and the estimator propagates that correction forward +consistently. The net effect is tighter autonomous path tracking, more stable +aiming, and reduced oscillation under aggressive maneuvers -- because control +decisions are based on a pose model that more faithfully represents the real +robot’s motion and sensor timing rather than a simplified "latest value only" +approximation. diff --git a/doc/RBSI-Vision.md b/doc/RBSI-Vision.md index 43270986..18892855 100644 --- a/doc/RBSI-Vision.md +++ b/doc/RBSI-Vision.md @@ -72,7 +72,7 @@ using a measuring tape to compare the reported vision-derived distance from each camera to one or more AprilTags with reality. -#### Using PhotonVision for vision simulation +#### Using PhotonVision for Vision Simulation This is an advanced topic, and is therefore in the Restricted Section. (More information about vision simulation to come in a future release.) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 23a3dc33..df22b2c7 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -106,6 +106,8 @@ public static void disableHAL() { public static final boolean tuningMode = false; + public static final double G_TO_MPS2 = 9.80665; // Gravitational acceleration in m/s/s + /************************************************************************* */ /** Physical Constants for Robot Operation ******************************* */ public static final class RobotConstants { diff --git a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java index 494e0ab2..f26a07c8 100644 --- a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java +++ b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java @@ -1,3 +1,20 @@ +// Copyright (c) 2024-2026 Az-FIRST +// http://github.com/AZ-First +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. + package frc.robot.subsystems.drive; import edu.wpi.first.math.geometry.Rotation2d; diff --git a/src/main/java/frc/robot/subsystems/imu/Imu.java b/src/main/java/frc/robot/subsystems/imu/Imu.java index c0a1ba6e..854f7182 100644 --- a/src/main/java/frc/robot/subsystems/imu/Imu.java +++ b/src/main/java/frc/robot/subsystems/imu/Imu.java @@ -22,58 +22,95 @@ import frc.robot.util.VirtualSubsystem; public class Imu extends VirtualSubsystem { + + // Declare the io and inputs private final ImuIO io; private final ImuIO.ImuIOInputs inputs = new ImuIO.ImuIOInputs(); - // Optional per-cycle cached objects (avoid repeated allocations) + // Per-cycle cached objects (to avoid repeated allocations) private long cacheStampNs = -1L; private Rotation2d cachedYaw = Rotation2d.kZero; private Translation3d cachedAccel = Translation3d.kZero; private Translation3d cachedJerk = Translation3d.kZero; - // Constructor + /** Constructor */ public Imu(ImuIO io) { this.io = io; } - // Priority value for this virtual subsystem + /** + * Priority value for this virtual subsystem + * + *

See `frc.robot.util.VirtualSubsystem` for a description of the suggested values for various + * virtual subsystems. + */ @Override protected int getPeriodPriority() { return -30; } - // Periodic function to read inputs - @Override + /** Periodic function to read inputs */ public void rbsiPeriodic() { io.updateInputs(inputs); } - /** Hot-path access: primitive-only snapshot */ + /** + * Get the inputs objects + * + *

Hot-path access: primitive-only snapshot + * + * @return The inputs objects + */ public ImuIO.ImuIOInputs getInputs() { return inputs; } - /** Readable boundary: Rotation2d (alloc/cached per timestamp) */ + /** + * Get the current YAW + * + *

This function updates the caches if needed. + * + * @return The current YAW as a Rotation2d object + */ public Rotation2d getYaw() { refreshCachesIfNeeded(); return cachedYaw; } - /** Readable boundary: Translation3d accel (alloc/cached per timestamp) */ + /** + * Get the current linear acceleration + * + *

This function updates the caches as needed. + * + * @return The current linear acceleration as a Translation3d object + */ public Translation3d getLinearAccel() { refreshCachesIfNeeded(); return cachedAccel; } + /** + * Get the current jerk + * + *

This function updates the caches ad needed. + * + * @return The current jerk as a Translation3d object + */ public Translation3d getJerk() { refreshCachesIfNeeded(); return cachedJerk; } + /** + * Zero the YAW to this input value + * + * @param yaw Input YAW + */ public void zeroYaw(Rotation2d yaw) { io.zeroYawRad(yaw.getRadians()); } + /** Refresh the caches from the inputs, if needed */ private void refreshCachesIfNeeded() { final long stamp = inputs.timestampNs; if (stamp == cacheStampNs) return; diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIO.java b/src/main/java/frc/robot/subsystems/imu/ImuIO.java index bf350c4f..8ee521e4 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIO.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIO.java @@ -24,9 +24,6 @@ /** * Single IMU interface exposing all relevant state: orientation, rates, linear accel, jerk, and * odometry samples. - * - *

Primitive-only core: NO WPILib geometry objects, NO Units objects. Conversions happen at the - * boundary in the Imu subsystem (wrapper methods). */ public interface ImuIO extends RBSIIO { @@ -34,28 +31,21 @@ public interface ImuIO extends RBSIIO { class ImuIOInputs { public boolean connected = false; - /** FPGA-local timestamp when inputs were captured (ns) */ + // FPGA-local timestamp when inputs were captured (ns) public long timestampNs = 0L; - - /** Yaw angle (robot frame) in radians */ + // Yaw angle (robot frame) in radians public double yawPositionRad = 0.0; - - /** Yaw angular rate in radians/sec */ + // Yaw angular rate in radians/sec public double yawRateRadPerSec = 0.0; - - /** Linear acceleration in robot frame (m/s^2) */ + // Linear acceleration in robot frame (m/s^2) public Translation3d linearAccel = Translation3d.kZero; - - /** Linear jerk in robot frame (m/s^3) */ + // Linear jerk in robot frame (m/s^3) public Translation3d linearJerk = Translation3d.kZero; - - /** Time spent in the IO update call (seconds) */ + // Time spent in the IO update call (seconds) public double latencySeconds = 0.0; - - /** Optional odometry samples (timestamps in seconds) */ + // Odometry samples (timestamps in seconds) public double[] odometryYawTimestamps = new double[] {}; - - /** Optional odometry samples (yaw positions in radians) */ + // Odometry samples (yaw positions in radians) public double[] odometryYawPositionsRad = new double[] {}; } diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java b/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java index 18fa3c23..97d47100 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java @@ -20,8 +20,10 @@ import com.studica.frc.AHRS; import com.studica.frc.AHRS.NavXComType; import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStation.Alliance; +import frc.robot.Constants; import frc.robot.subsystems.drive.PhoenixOdometryThread; import frc.robot.subsystems.drive.SwerveConstants; import java.util.Iterator; @@ -29,23 +31,23 @@ /** IMU IO for NavX. Primitive-only: yaw/rate in radians, accel in m/s^2, jerk in m/s^3. */ public class ImuIONavX implements ImuIO { - private static final double DEG_TO_RAD = Math.PI / 180.0; - private static final double G_TO_MPS2 = 9.80665; + // Define the NavX Hardware private final AHRS navx; - // Phoenix odometry queues (boxed Doubles, but we drain without streams) + // Queues private final Queue yawPositionDegQueue; private final Queue yawTimestampQueue; - // Previous accel (m/s^2) for jerk + // Previous accel for jerk calculation (m/s/s) private Translation3d prevAcc = Translation3d.kZero; private long prevTimestampNs = 0L; - // Reusable buffers for queue drain + // Reusable buffers for queue-drain (to avoid using streams) private double[] odomTsBuf = new double[8]; private double[] odomYawRadBuf = new double[8]; + /** Constructor */ public ImuIONavX() { // Initialize NavX over SPI navx = new AHRS(NavXComType.kMXP_SPI, (byte) SwerveConstants.kOdometryFrequency); @@ -63,10 +65,14 @@ public ImuIONavX() { yawPositionDegQueue = PhoenixOdometryThread.getInstance().registerSignal(navx::getYaw); } + /** Update the Inputs */ @Override public void updateInputs(ImuIOInputs inputs) { final long start = System.nanoTime(); + // Load the nanosecond timestamp + inputs.timestampNs = start; + inputs.connected = navx.isConnected(); // Your original sign convention: @@ -76,21 +82,17 @@ public void updateInputs(ImuIOInputs inputs) { // NavX: // - getAngle() is degrees (continuous) // - getRawGyroZ() is deg/sec - final double yawDeg = -navx.getAngle(); - final double yawRateDegPerSec = -navx.getRawGyroZ(); - - inputs.yawPositionRad = yawDeg * DEG_TO_RAD; - inputs.yawRateRadPerSec = yawRateDegPerSec * DEG_TO_RAD; + inputs.yawPositionRad = Units.degreesToRadians(-navx.getAngle()); + inputs.yawRateRadPerSec = Units.degreesToRadians(-navx.getRawGyroZ()); - // World linear accel (NavX returns "g" typically). Convert to m/s^2. + // World linear accel (NavX returns "g" typically); convert to m/s/s inputs.linearAccel = new Translation3d( - navx.getWorldLinearAccelX(), - navx.getWorldLinearAccelY(), - navx.getWorldLinearAccelZ()) - .times(G_TO_MPS2); + navx.getWorldLinearAccelX() * Constants.G_TO_MPS2, + navx.getWorldLinearAccelY() * Constants.G_TO_MPS2, + navx.getWorldLinearAccelZ() * Constants.G_TO_MPS2); - // Jerk + // Jerk computed as (delta accel) / dt if (prevTimestampNs != 0L) { final double dt = (start - prevTimestampNs) * 1e-9; if (dt > 1e-6) { @@ -98,14 +100,14 @@ public void updateInputs(ImuIOInputs inputs) { } } + // Load "previous values" for the next loop prevTimestampNs = start; prevAcc = inputs.linearAccel; - inputs.timestampNs = start; - - // Odometry history + // Drain odometry queues to primitive arrays (timestamps == doubles; yaws == degrees) final int n = drainOdomQueues(); if (n > 0) { + // If there's anything to drain... final double[] tsOut = new double[n]; final double[] yawOut = new double[n]; System.arraycopy(odomTsBuf, 0, tsOut, 0, n); @@ -113,10 +115,12 @@ public void updateInputs(ImuIOInputs inputs) { inputs.odometryYawTimestamps = tsOut; inputs.odometryYawPositionsRad = yawOut; } else { + // ...otherwise return empty arrays inputs.odometryYawTimestamps = new double[] {}; inputs.odometryYawPositionsRad = new double[] {}; } + // Compute how long this took in seconds final long end = System.nanoTime(); inputs.latencySeconds = (end - start) * 1e-9; } @@ -128,7 +132,7 @@ public void updateInputs(ImuIOInputs inputs) { */ @Override public void zeroYawRad(double yawRad) { - navx.setAngleAdjustment(yawRad / DEG_TO_RAD); + navx.setAngleAdjustment(Units.radiansToDegrees(yawRad)); navx.zeroYaw(); // Reset jerk history so you don't spike on the next frame @@ -136,6 +140,11 @@ public void zeroYawRad(double yawRad) { prevAcc = Translation3d.kZero; } + /** + * Drain the Odometry Queues into a Buffer + * + *

Private function that does the heavy lifting of draining the queues + */ private int drainOdomQueues() { final int nTs = yawTimestampQueue.size(); final int nYaw = yawPositionDegQueue.size(); @@ -157,7 +166,7 @@ private int drainOdomQueues() { // queue provides degrees (navx::getYaw). Apply your sign convention (-d) then rad. final double yawDeg = -itY.next(); - odomYawRadBuf[i] = yawDeg * DEG_TO_RAD; + odomYawRadBuf[i] = Units.degreesToRadians(yawDeg); i++; } @@ -167,6 +176,11 @@ private int drainOdomQueues() { return i; } + /** + * Check that buffer is big enough for this queue + * + *

Private function that ensures odometry buffer capacity + */ private void ensureOdomCapacity(int n) { if (odomTsBuf.length >= n) return; int newCap = odomTsBuf.length; diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java b/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java index 0db40ef9..a1f708b0 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java @@ -27,6 +27,7 @@ import edu.wpi.first.units.measure.Angle; import edu.wpi.first.units.measure.AngularVelocity; import edu.wpi.first.units.measure.LinearAcceleration; +import frc.robot.Constants; import frc.robot.subsystems.drive.PhoenixOdometryThread; import frc.robot.subsystems.drive.SwerveConstants; import frc.robot.util.RBSICANBusRegistry; @@ -36,10 +37,7 @@ /** IMU IO for CTRE Pigeon2 */ public class ImuIOPigeon2 implements ImuIO { - // Constants - private static final double G_TO_MPS2 = 9.80665; - - // Define the Pigeon2 + // Define the Pigeon2 Hardware private final Pigeon2 pigeon = new Pigeon2( SwerveConstants.kPigeonId, RBSICANBusRegistry.getBus(SwerveConstants.kCANbusName)); @@ -55,15 +53,15 @@ public class ImuIOPigeon2 implements ImuIO { private final Queue odomTimestamps; private final Queue odomYawsDeg; - // Previous accel for jerk (m/s^2) + // Previous accel for jerk calculation (m/s/s) private Translation3d prevAcc = Translation3d.kZero; private long prevTimestampNs = 0L; - // Reusable buffers for queue-drain (avoid streams) + // Reusable buffers for queue-drain (to avoid using streams) private double[] odomTsBuf = new double[8]; private double[] odomYawRadBuf = new double[8]; - // Constructor + /** Constructor */ public ImuIOPigeon2() { pigeon.getConfigurator().apply(new Pigeon2Configuration()); pigeon.getConfigurator().setYaw(0.0); @@ -86,38 +84,40 @@ public ImuIOPigeon2() { public void updateInputs(ImuIOInputs inputs) { final long start = System.nanoTime(); + // Load the nanosecond timestamp + inputs.timestampNs = start; + StatusCode code = BaseStatusSignal.refreshAll(yawSignal, yawRateSignal, accelX, accelY, accelZ); inputs.connected = code.isOK(); // Yaw / rate: Phoenix returns degrees and deg/s here; convert to radians - final double yawDeg = yawSignal.getValueAsDouble(); - final double yawRateDegPerSec = yawRateSignal.getValueAsDouble(); + inputs.yawPositionRad = Units.degreesToRadians(yawSignal.getValueAsDouble()); + inputs.yawRateRadPerSec = Units.degreesToRadians(yawRateSignal.getValueAsDouble()); - inputs.yawPositionRad = Units.degreesToRadians(yawDeg); - inputs.yawRateRadPerSec = Units.degreesToRadians(yawRateDegPerSec); - - // Accel: Phoenix returns "g" for these signals (common for Pigeon2). Convert to m/s^2 + // Accel: Phoenix returns "g" for these signals; convert to m/s/s inputs.linearAccel = new Translation3d( - accelX.getValueAsDouble(), accelY.getValueAsDouble(), accelZ.getValueAsDouble()) - .times(G_TO_MPS2); + accelX.getValueAsDouble() * Constants.G_TO_MPS2, + accelY.getValueAsDouble() * Constants.G_TO_MPS2, + accelZ.getValueAsDouble() * Constants.G_TO_MPS2); - // Jerk from delta accel / dt + // Jerk computed as (delta accel) / dt if (prevTimestampNs != 0L) { final double dt = (start - prevTimestampNs) * 1e-9; + // Only compute if `dt` is larger than 1 ms. if (dt > 1e-6) { inputs.linearJerk = inputs.linearAccel.minus(prevAcc).div(dt); } } + // Load "previous values" for the next loop prevTimestampNs = start; prevAcc = inputs.linearAccel; - inputs.timestampNs = start; - - // Drain odometry queues to primitive arrays (timestamps are already doubles; yaws are degrees) + // Drain odometry queues to primitive arrays (timestamps == doubles; yaws == degrees) final int n = drainOdometryQueuesIntoBuffers(); if (n > 0) { + // If there's anything to drain... final double[] tsOut = new double[n]; final double[] yawOut = new double[n]; System.arraycopy(odomTsBuf, 0, tsOut, 0, n); @@ -125,10 +125,12 @@ public void updateInputs(ImuIOInputs inputs) { inputs.odometryYawTimestamps = tsOut; inputs.odometryYawPositionsRad = yawOut; } else { + // ...otherwise return empty arrays inputs.odometryYawTimestamps = new double[] {}; inputs.odometryYawPositionsRad = new double[] {}; } + // Compute how long this took in seconds final long end = System.nanoTime(); inputs.latencySeconds = (end - start) * 1e-9; } @@ -143,6 +145,11 @@ public void zeroYawRad(double yawRad) { pigeon.setYaw(Units.radiansToDegrees(yawRad)); } + /** + * Drain the Odometry Queues into a Buffer + * + *

Private function that does the heavy lifting of draining the queues + */ private int drainOdometryQueuesIntoBuffers() { final int nTs = odomTimestamps.size(); final int nYaw = odomYawsDeg.size(); @@ -171,6 +178,11 @@ private int drainOdometryQueuesIntoBuffers() { return i; } + /** + * Check that buffer is big enough for this queue + * + *

Private function that ensures odometry buffer capacity + */ private void ensureOdomCapacity(int n) { if (odomTsBuf.length >= n) return; int newCap = odomTsBuf.length; From 5fa0ae4bc22def50ee606d07e798b5003abb37c3 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Sat, 14 Feb 2026 12:47:55 -0700 Subject: [PATCH 12/19] Clean up Drive and DriveOdometry code --- src/main/java/frc/robot/Constants.java | 12 +- .../frc/robot/subsystems/drive/Drive.java | 248 ++++++++---------- .../robot/subsystems/drive/DriveOdometry.java | 186 ++----------- 3 files changed, 131 insertions(+), 315 deletions(-) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index df22b2c7..7e3af173 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -74,7 +74,7 @@ public final class Constants { private static SwerveType swerveType = SwerveType.PHOENIX6; // PHOENIX6, YAGSL private static CTREPro phoenixPro = CTREPro.UNLICENSED; // LICENSED, UNLICENSED private static AutoType autoType = AutoType.MANUAL; // MANUAL, PATHPLANNER, CHOREO - private static VisionType visionType = VisionType.PHOTON; // PHOTON, LIMELIGHT, NONE + private static VisionType visionType = VisionType.NONE; // PHOTON, LIMELIGHT, NONE /** Enumerate the robot types (name your robots here) */ public static enum RobotType { @@ -313,19 +313,15 @@ public static final class DrivebaseConstants { public static final double kNominalFFVolts = 2.0; // Volts // Default TalonFX Gains (Replaces what's in Phoenix X's Tuner Constants) - // NOTE: Default values from 6328's 2025 Public Code - // - // IMPORTANT:: These values are valid only for CTRE LICENSED operation!! - // Adjust these downward until your modules behave correctly - public static final double kDriveP = 40.0; + public static final double kDriveP = 4.0; public static final double kDriveD = 0.03; public static final double kDriveV = 0.83; public static final double kDriveA = 0.0; public static final double kDriveS = 2.00; public static final double kDriveT = SwerveConstants.kDriveGearRatio / DCMotor.getKrakenX60Foc(1).KtNMPerAmp; - public static final double kSteerP = 400.0; - public static final double kSteerD = 20.0; + public static final double kSteerP = 4.0; + public static final double kSteerD = 0.02; public static final double kSteerS = 2.0; // Odometry-related constants diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index fda4cde7..8ebda99f 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -32,8 +32,6 @@ import edu.wpi.first.math.kinematics.SwerveDriveKinematics; import edu.wpi.first.math.kinematics.SwerveModulePosition; import edu.wpi.first.math.kinematics.SwerveModuleState; -import edu.wpi.first.math.numbers.N1; -import edu.wpi.first.math.numbers.N3; import edu.wpi.first.math.trajectory.TrapezoidProfile; import edu.wpi.first.wpilibj.Alert; import edu.wpi.first.wpilibj.Alert.AlertType; @@ -73,7 +71,7 @@ public class Drive extends RBSISubsystem { private final Module[] modules = new Module[4]; // FL, FR, BL, BR private final SysIdRoutine sysId; - // Buffers for necessary things + // Pose Buffer Declarations private final ConcurrentTimeInterpolatableBuffer poseBuffer = ConcurrentTimeInterpolatableBuffer.createBuffer(DrivebaseConstants.kHistorySize); private final ConcurrentTimeInterpolatableBuffer yawBuffer = @@ -95,12 +93,6 @@ public class Drive extends RBSISubsystem { new SwerveModulePosition(), new SwerveModulePosition() }; - private final SwerveModulePosition[] odomPositions = { - new SwerveModulePosition(), - new SwerveModulePosition(), - new SwerveModulePosition(), - new SwerveModulePosition() - }; private SwerveDrivePoseEstimator m_PoseEstimator = new SwerveDrivePoseEstimator(kinematics, Rotation2d.kZero, lastModulePositions, Pose2d.kZero); @@ -250,23 +242,17 @@ public Drive(Imu imu) { } /************************************************************************* */ - /** Periodic function that is called each robot cycle by the command scheduler */ + /** Periodic function that is called each cycle by the command scheduler */ @Override public void rbsiPeriodic() { - // NO odometryLock needed unless you touch poseEstimator directly here. - // Keep this focused on control / setpoints / characterization. + // The only function of the drive periodic() is to stop the modules if the DriverStation is + // diabled. if (DriverStation.isDisabled()) { for (var module : modules) module.stop(); Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); } - - // If you need module refresh for control, you can either: - // A) leave it in DriveOdometry (recommended) and here just use cached inputs, OR - // B) keep a lightweight "module.controlPeriodic()" that doesn't drain queues. - // - // For now: do nothing here re: odometry. } /** @@ -285,7 +271,7 @@ public void simulationPeriodic() { modules[i].simulationPeriodic(); } - // Get module states from modules + // Get module states from modules (ok to allocate; can be cached later if desired) final SwerveModuleState[] moduleStates = new SwerveModuleState[modules.length]; for (int i = 0; i < modules.length; i++) { moduleStates[i] = modules[i].getState(); @@ -305,31 +291,14 @@ public void simulationPeriodic() { imu.simulationSetOmegaRadPerSec(omegaRadPerSec); imu.simulationSetLinearAccelMps2(ax, ay, 0.0); - // Feed PoseEstimator with authoritative yaw and module positions - final SwerveModulePosition[] modulePositions = new SwerveModulePosition[modules.length]; - for (int i = 0; i < modules.length; i++) { - modulePositions[i] = modules[i].getPosition(); - } - m_PoseEstimator.resetPosition( - Rotation2d.fromRadians(yawRad), modulePositions, simPhysics.getPose()); - - // If simulated vision available, inject vision measurement - if (simulatedVisionAvailable) { - final Pose2d visionPose = getSimulatedVisionPose(); - final double visionTimestamp = Timer.getFPGATimestamp(); - final var visionStdDevs = getSimulatedVisionStdDevs(); - m_PoseEstimator.addVisionMeasurement(visionPose, visionTimestamp, visionStdDevs); - } - - poseBuffer.addSample(Timer.getFPGATimestamp(), simPhysics.getPose()); - - // Logging + // Logging ONLY for physics (NOT estimator) Logger.recordOutput("Sim/Pose", simPhysics.getPose()); Logger.recordOutput("Sim/YawRad", yawRad); - Logger.recordOutput("Sim/OmegaRadPerSec", simPhysics.getOmegaRadPerSec()); + Logger.recordOutput("Sim/OmegaRadPerSec", omegaRadPerSec); Logger.recordOutput("Sim/LinearAccelXY_mps2", new double[] {ax, ay}); } + /************************************************************************* */ /** Drive Base Action Functions ****************************************** */ /** @@ -387,7 +356,11 @@ public void runVelocity(ChassisSpeeds speeds) { Logger.recordOutput("SwerveStates/SetpointsOptimized", setpointStates); } - /** Runs the drive in a straight line with the specified drive output. */ + /** + * Runs the drive in a straight line with the specified drive output + * + * @param output Specified drive output for characterization + */ public void runCharacterization(double output) { for (int i = 0; i < 4; i++) { modules[i].runCharacterization(output); @@ -408,6 +381,7 @@ public ProfiledPIDController getAngleController() { return angleController; } + /************************************************************************* */ /** SysId Characterization Routines ************************************** */ /** Returns a command to run a quasistatic test in the specified direction. */ @@ -422,7 +396,10 @@ public Command sysIdDynamic(SysIdRoutine.Direction direction) { return run(() -> runCharacterization(0.0)).withTimeout(1.0).andThen(sysId.dynamic(direction)); } + /************************************************************************* */ /** Getter Functions ***************************************************** */ + + /** Returns the module array */ public Module[] getModules() { return modules; } @@ -462,7 +439,7 @@ public Pose2d getPose() { return m_PoseEstimator.getEstimatedPosition(); } - /** Returns the current odometry rotation. */ + /** Returns the current odometry YAW. */ @AutoLogOutput(key = "Odometry/Yaw") public Rotation2d getHeading() { if (Constants.getMode() == Mode.SIM) { @@ -584,9 +561,14 @@ public double getFFCharacterizationVelocity() { return output; } + /************************************************************************* */ /* Setter Functions ****************************************************** */ - /** Resets the current odometry pose. */ + /** + * Resets the current odometry pose + * + * @param pose The specified pose to which to reset the poseEsitmator + */ public void resetPose(Pose2d pose) { m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), pose); markPoseReset(Timer.getFPGATimestamp()); @@ -602,19 +584,23 @@ public void zeroHeadingForAlliance() { markPoseReset(Timer.getFPGATimestamp()); } - /** Zeros the heading */ + /** Zeros the gyro regardless of the alliance */ public void zeroHeading() { imu.zeroYaw(Rotation2d.kZero); resetHeadingController(); markPoseReset(Timer.getFPGATimestamp()); } - /** Adds a vision measurement safely into the PoseEstimator. */ - public void addVisionMeasurement(TimedPose rbsiPose) { + /** + * Adds a vision measurement safely into the PoseEstimator + * + * @param timedPose The pose @ timestamp to add to the pose estimator + */ + public void addVisionMeasurement(TimedPose timedPose) { odometryLock.lock(); try { m_PoseEstimator.addVisionMeasurement( - rbsiPose.pose(), rbsiPose.timestampSeconds(), rbsiPose.stdDevs()); + timedPose.pose(), timedPose.timestampSeconds(), timedPose.stdDevs()); } finally { odometryLock.unlock(); } @@ -632,128 +618,42 @@ private void markPoseReset(double fpgaNow) { Logger.recordOutput("Drive/PoseResetTimestamp", lastPoseResetTimestamp); } - /** UTILITY FUNCTION SECTION ********************************************* */ - - /** CHOREO SECTION (Ignore if AutoType == PATHPLANNER) ******************* */ - - /** Choreo: Reset odometry */ - public Command resetOdometry(Pose2d orElseGet) { - // TODO Auto-generated method stub - throw new UnsupportedOperationException("Unimplemented method 'resetOdometry'"); - } - - /** Swerve request to apply during field-centric path following */ - @SuppressWarnings("unused") - private final SwerveRequest.ApplyFieldSpeeds m_pathApplyFieldSpeeds = - new SwerveRequest.ApplyFieldSpeeds(); - - // Choreo Controller Values - private final PIDController m_pathXController = new PIDController(10, 0, 0); - private final PIDController m_pathYController = new PIDController(10, 0, 0); - private final PIDController m_pathThetaController = new PIDController(7, 0, 0); - + /************************************************************************* */ /** - * Follows the given field-centric path sample with PID for Choreo + * DriveOdometry Helpers (package-private) * - * @param pose Current pose of the robot - * @param sample Sample along the path to follow - */ - public void choreoController(Pose2d pose, SwerveSample sample) { - m_pathThetaController.enableContinuousInput(-Math.PI, Math.PI); - - var targetSpeeds = sample.getChassisSpeeds(); - targetSpeeds.vxMetersPerSecond += m_pathXController.calculate(pose.getX(), sample.x); - targetSpeeds.vyMetersPerSecond += m_pathYController.calculate(pose.getY(), sample.y); - targetSpeeds.omegaRadiansPerSecond += - m_pathThetaController.calculate(pose.getRotation().getRadians(), sample.heading); - - // setControl( - // m_pathApplyFieldSpeeds - // .withSpeeds(targetSpeeds) - // .withWheelForceFeedforwardsX(sample.moduleForcesX()) - // .withWheelForceFeedforwardsY(sample.moduleForcesY())); - } - - public void followTrajectory(SwerveSample sample) { - // Get the current pose of the robot - Pose2d pose = getPose(); - - // Generate the next speeds for the robot - ChassisSpeeds speeds = - new ChassisSpeeds( - sample.vx + m_pathXController.calculate(pose.getX(), sample.x), - sample.vy + m_pathXController.calculate(pose.getX(), sample.y), - sample.omega - + m_pathXController.calculate(pose.getRotation().getRadians(), sample.heading)); - - // Apply the generated speeds - runVelocity(speeds); - } - - /** SIMULATION VISION FUNCTIONS ****************************************** */ - - // Vision measurement enabled in simulation - private boolean simulatedVisionAvailable = true; - - // Maximum simulated noise in meters/radians - private static final double SIM_VISION_POS_NOISE_M = 0.02; // +/- 2cm - private static final double SIM_VISION_YAW_NOISE_RAD = Math.toRadians(2); // +/- 2 degrees - - /** - * Returns a simulated Pose2d for vision in field coordinates. Adds a small random jitter to - * simulate measurement error. + *

The pose estimator and pose buffers are owned by Drive, but DriveOdometry needs access to + * them in order to update and process the odometry. These functions are the appropriate + * pass-throughs to allow this functionality. */ - private Pose2d getSimulatedVisionPose() { - Pose2d truePose = simPhysics.getPose(); // authoritative pose - - // Add small random noise - double dx = (Math.random() * 2 - 1) * SIM_VISION_POS_NOISE_M; - double dy = (Math.random() * 2 - 1) * SIM_VISION_POS_NOISE_M; - double dTheta = (Math.random() * 2 - 1) * SIM_VISION_YAW_NOISE_RAD; - - return new Pose2d( - truePose.getX() + dx, - truePose.getY() + dy, - truePose.getRotation().plus(new Rotation2d(dTheta))); - } - - /** - * Returns the standard deviations for the simulated vision measurement. These values are used by - * the PoseEstimator to weight vision updates. - */ - private edu.wpi.first.math.Matrix getSimulatedVisionStdDevs() { - edu.wpi.first.math.Matrix stdDevs = - new edu.wpi.first.math.Matrix<>(N3.instance, N1.instance); - stdDevs.set(0, 0, 0.02); // X standard deviation (meters) - stdDevs.set(1, 0, 0.02); // Y standard deviation (meters) - stdDevs.set(2, 0, Math.toRadians(2)); // rotation standard deviation (radians) - return stdDevs; - } - - // --- Helpers used by DriveOdometry (package-private) --- + /** Get the pose estimator current pose */ Pose2d poseEstimatorGetPose() { return m_PoseEstimator.getEstimatedPosition(); } + /** Update the pose estimator at a timestamp */ void poseEstimatorUpdateWithTime(double t, Rotation2d yaw, SwerveModulePosition[] positions) { m_PoseEstimator.updateWithTime(t, yaw, positions); } + /** Add a sample to the pose buffer */ void poseBufferAddSample(double t, Pose2d pose) { poseBuffer.addSample(t, pose); } - // Yaw buffer helpers (assuming you already have yawBuffer + yawRateBuffer) + /** Yaw buffer helper */ double yawBufferSampleOr(double t, double fallbackYawRad) { return yawBuffer.getSample(t).orElse(fallbackYawRad); } + /** Yaw buffer helper */ void yawBuffersAddSample(double t, double yawRad, double yawRateRadPerSec) { yawBuffer.addSample(t, yawRad); yawRateBuffer.addSample(t, yawRateRadPerSec); } + /** Yaw buffer helper */ void yawBuffersFillFromQueue(double[] yawTs, double[] yawPosRad) { for (int k = 0; k < yawTs.length; k++) { yawBuffer.addSample(yawTs[k], yawPosRad[k]); @@ -766,6 +666,7 @@ void yawBuffersFillFromQueue(double[] yawTs, double[] yawPosRad) { } } + /** Yaw buffer helper */ void yawBuffersAddSampleIndexAligned(double t, double[] yawTs, double[] yawPos, int i) { yawBuffer.addSample(t, yawPos[i]); if (i > 0) { @@ -776,11 +677,13 @@ void yawBuffersAddSampleIndexAligned(double t, double[] yawTs, double[] yawPos, } } + /** Set the gyroDisconnectedAlert */ void setGyroDisconnectedAlert(boolean disconnected) { gyroDisconnectedAlert.set(disconnected); } - // Drive.java + /************************************************************************* */ + /** Simulation Getter Functions (from simPhysics) */ public Pose2d getSimPose() { return simPhysics.getPose(); } @@ -792,4 +695,61 @@ public double getSimYawRad() { public double getSimYawRateRadPerSec() { return simPhysics.getOmegaRadPerSec(); } + + /************************************************************************* */ + /** CHOREO SECTION (Ignore if AutoType == PATHPLANNER) ******************* */ + + /** Choreo: Reset odometry */ + public Command resetOdometry(Pose2d orElseGet) { + // TODO Auto-generated method stub + throw new UnsupportedOperationException("Unimplemented method 'resetOdometry'"); + } + + /** Swerve request to apply during field-centric path following */ + @SuppressWarnings("unused") + private final SwerveRequest.ApplyFieldSpeeds m_pathApplyFieldSpeeds = + new SwerveRequest.ApplyFieldSpeeds(); + + // Choreo Controller Values + private final PIDController m_pathXController = new PIDController(10, 0, 0); + private final PIDController m_pathYController = new PIDController(10, 0, 0); + private final PIDController m_pathThetaController = new PIDController(7, 0, 0); + + /** + * Follows the given field-centric path sample with PID for Choreo + * + * @param pose Current pose of the robot + * @param sample Sample along the path to follow + */ + public void choreoController(Pose2d pose, SwerveSample sample) { + m_pathThetaController.enableContinuousInput(-Math.PI, Math.PI); + + var targetSpeeds = sample.getChassisSpeeds(); + targetSpeeds.vxMetersPerSecond += m_pathXController.calculate(pose.getX(), sample.x); + targetSpeeds.vyMetersPerSecond += m_pathYController.calculate(pose.getY(), sample.y); + targetSpeeds.omegaRadiansPerSecond += + m_pathThetaController.calculate(pose.getRotation().getRadians(), sample.heading); + + // setControl( + // m_pathApplyFieldSpeeds + // .withSpeeds(targetSpeeds) + // .withWheelForceFeedforwardsX(sample.moduleForcesX()) + // .withWheelForceFeedforwardsY(sample.moduleForcesY())); + } + + public void followTrajectory(SwerveSample sample) { + // Get the current pose of the robot + Pose2d pose = getPose(); + + // Generate the next speeds for the robot + ChassisSpeeds speeds = + new ChassisSpeeds( + sample.vx + m_pathXController.calculate(pose.getX(), sample.x), + sample.vy + m_pathXController.calculate(pose.getX(), sample.y), + sample.omega + + m_pathXController.calculate(pose.getRotation().getRadians(), sample.heading)); + + // Apply the generated speeds + runVelocity(speeds); + } } diff --git a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java index f26a07c8..99250649 100644 --- a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java +++ b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java @@ -27,37 +27,48 @@ import org.littletonrobotics.junction.Logger; public final class DriveOdometry extends VirtualSubsystem { + + // Declare the io and inputs private final Drive drive; private final Imu imu; private final Module[] modules; - // Scratch (no per-loop allocations) + // Per-cycle cached objects (to avoid repeated allocations) private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; + /** Constructor */ public DriveOdometry(Drive drive, Imu imu, Module[] modules) { this.drive = drive; this.imu = imu; this.modules = modules; } - // Priority value for this virtual subsystem + /** + * Priority value for this virtual subsystem + * + *

See `frc.robot.util.VirtualSubsystem` for a description of the suggested values for various + * virtual subsystems. + */ @Override protected int getPeriodPriority() { return -20; } + /** Periodic function to read inputs */ @Override public void rbsiPeriodic() { Drive.odometryLock.lock(); try { + // Ensure IMU inputs are fresh for this cycle final var imuInputs = imu.getInputs(); // Drain per-module odometry queues ONCE per loop (this also refreshes motor signals) for (var module : modules) { - module.periodic(); // if you later split, this becomes module.updateInputs() + module.periodic(); } if (Constants.getMode() == Mode.SIM) { + // SIMULATION: Keep sim pose buffer time-aligned, too final double now = Timer.getFPGATimestamp(); drive.poseBufferAddSample(now, drive.getSimPose()); drive.yawBuffersAddSample(now, drive.getSimYawRad(), drive.getSimYawRateRadPerSec()); @@ -83,7 +94,7 @@ public void rbsiPeriodic() { modHist[m] = modules[m].getOdometryPositions(); } - // Yaw queue availability + // Determine YAW queue availability (everything exists and lines up) final boolean hasYawQueue = imuInputs.connected && imuInputs.odometryYawTimestamps != null @@ -95,6 +106,9 @@ public void rbsiPeriodic() { final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; // Determine index alignment (cheap + deterministic) + // We only trust index alignment if BOTH: + // - yaw has at least n samples + // - yawTs[i] ~= ts[i] for i in range (tight epsilon) boolean yawIndexAligned = false; if (hasYawQueue && yawTs.length >= n) { yawIndexAligned = true; @@ -116,7 +130,7 @@ public void rbsiPeriodic() { drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); } - // Replay + // Replay each odometry sample for (int i = 0; i < n; i++) { final double t = ts[i]; @@ -132,18 +146,21 @@ public void rbsiPeriodic() { } } + // Determine yaw at this timestamp double yawRad = imuInputs.yawPositionRad; if (hasYawQueue) { if (yawIndexAligned) { yawRad = yawPos[i]; - // Keep yaw buffers aligned to replay timeline (good for yaw-rate gate) + // Keep yaw buffers aligned to replay timeline drive.yawBuffersAddSampleIndexAligned(t, yawTs, yawPos, i); } else { yawRad = drive.yawBufferSampleOr(t, imuInputs.yawPositionRad); } } + // Feed estimator at this historical timestamp drive.poseEstimatorUpdateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + // Maintain pose history in SAME timebase as estimator drive.poseBufferAddSample(t, drive.poseEstimatorGetPose()); } @@ -154,161 +171,4 @@ public void rbsiPeriodic() { Drive.odometryLock.unlock(); } } - - // /************************************************************************* */ - // /** Periodic function that is called each robot cycle by the command scheduler */ - // @Override - // public void rbsiPeriodic() { - // odometryLock.lock(); - // try { - // // Ensure IMU inputs are fresh for this cycle - // final var imuInputs = imu.getInputs(); - - // // Stop modules & log empty setpoint states if disabled - // if (DriverStation.isDisabled()) { - // for (var module : modules) { - // module.stop(); - // } - // Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); - // Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); - // } - - // // Drain per-module odometry queues for this cycle - // for (var module : modules) { - // module.periodic(); - // } - - // // -------- REAL ODOMETRY REPLAY (canonical timestamps from module[0]) -------- - // if (Constants.getMode() != Mode.SIM) { - // final double[] ts = modules[0].getOdometryTimestamps(); - // final int n = (ts == null) ? 0 : ts.length; - - // // Cache per-module histories ONCE (avoid repeated getters in the loop) - // final SwerveModulePosition[][] modHist = new SwerveModulePosition[4][]; - // for (int m = 0; m < 4; m++) { - // modHist[m] = modules[m].getOdometryPositions(); - // } - - // // Determine yaw queue availability - // final boolean hasYawQueue = - // imuInputs.connected - // && imuInputs.odometryYawTimestamps != null - // && imuInputs.odometryYawPositionsRad != null - // && imuInputs.odometryYawTimestamps.length - // == imuInputs.odometryYawPositionsRad.length - // && imuInputs.odometryYawTimestamps.length > 0; - - // final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; - // final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; - - // // If we have no module samples, still keep yaw buffers “alive” for gating callers - // if (n == 0) { - // final double now = Timer.getFPGATimestamp(); - // yawBuffer.addSample(now, imuInputs.yawPositionRad); - // yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); - - // gyroDisconnectedAlert.set(!imuInputs.connected); - // return; - // } - - // // Decide whether yaw queue is index-aligned with module[0] timestamps. - // // We only trust index alignment if BOTH: - // // - yaw has at least n samples - // // - yawTs[i] ~= ts[i] for i in range (tight epsilon) - // boolean yawIndexAligned = false; - // if (hasYawQueue && yawTs.length >= n) { - // yawIndexAligned = true; - // final double eps = 1e-3; // 1 ms - // for (int i = 0; i < n; i++) { - // if (Math.abs(yawTs[i] - ts[i]) > eps) { - // yawIndexAligned = false; - // break; - // } - // } - // } - - // // If yaw is NOT index-aligned but we have a yaw queue, build yaw/yawRate buffers ONCE. - // if (hasYawQueue && !yawIndexAligned) { - // for (int k = 0; k < yawTs.length; k++) { - // yawBuffer.addSample(yawTs[k], yawPos[k]); - // if (k > 0) { - // final double dt = yawTs[k] - yawTs[k - 1]; - // if (dt > 1e-6) { - // yawRateBuffer.addSample(yawTs[k], (yawPos[k] - yawPos[k - 1]) / dt); - // } - // } - // } - // } - - // // If NO yaw queue, add a single “now” sample once (don’t do this per replay sample) - // if (!hasYawQueue) { - // final double now = Timer.getFPGATimestamp(); - // yawBuffer.addSample(now, imuInputs.yawPositionRad); - // yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); - // } - - // // Replay each odometry sample - // for (int i = 0; i < n; i++) { - // final double t = ts[i]; - - // // Build module positions at this sample index (clamp defensively) - // for (int m = 0; m < 4; m++) { - // final SwerveModulePosition[] hist = modHist[m]; - // if (hist == null || hist.length == 0) { - // odomPositions[m] = modules[m].getPosition(); - // } else if (i < hist.length) { - // odomPositions[m] = hist[i]; - // } else { - // odomPositions[m] = hist[hist.length - 1]; - // } - // } - - // // Determine yaw at this timestamp - // double yawRad = imuInputs.yawPositionRad; // fallback - - // if (hasYawQueue) { - // if (yawIndexAligned) { - // yawRad = yawPos[i]; - - // // Keep yaw/yawRate buffers updated in odometry timebase (good for yaw-gate) - // yawBuffer.addSample(t, yawRad); - // if (i > 0) { - // final double dt = yawTs[i] - yawTs[i - 1]; - // if (dt > 1e-6) { - // yawRateBuffer.addSample(t, (yawPos[i] - yawPos[i - 1]) / dt); - // } - // } - // } else { - // // yawBuffer was pre-filled above; interpolate here - // yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); - // } - // } - - // // Feed estimator at this historical timestamp - // m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); - - // // Maintain pose history in SAME timebase as estimator - // poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); - // } - - // Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); - // gyroDisconnectedAlert.set(!imuInputs.connected); - // return; - - // } else { - - // // SIMULATION: Keep sim pose buffer time-aligned, too - // double now = Timer.getFPGATimestamp(); - // poseBuffer.addSample(now, simPhysics.getPose()); - // yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); - // yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); - - // Logger.recordOutput("Drive/Pose", simPhysics.getPose()); - // gyroDisconnectedAlert.set(false); - // } - // } finally { - // odometryLock.unlock(); - // } - // } - } From 01d0cc2c5a16b72fc96f65a5558445175c68fb0b Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Sat, 14 Feb 2026 14:15:24 -0700 Subject: [PATCH 13/19] Commenting the Vision.java file --- .../frc/robot/subsystems/vision/Vision.java | 134 ++++++++++++++---- 1 file changed, 104 insertions(+), 30 deletions(-) diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 0928d659..271904be 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -109,7 +109,12 @@ public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { } } - // Priority value for this virtual subsystem + /** + * Priority value for this virtual subsystem + * + *

See `frc.robot.util.VirtualSubsystem` for a description of the suggested values for various + * virtual subsystems. + */ @Override protected int getPeriodPriority() { return -10; @@ -171,6 +176,7 @@ public void rbsiPeriodic() { continue; } + // If this estimate is better than the existing "best", update this -> best TimedPose est = built.estimate; if (best == null || isBetter(est, best)) { best = est; @@ -180,6 +186,7 @@ public void rbsiPeriodic() { } } + // Add an accepted measurement, and update if (best != null) { accepted++; lastAcceptedTsPerCam[cam] = best.timestampSeconds(); @@ -193,39 +200,46 @@ public void rbsiPeriodic() { Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTagCount", bestTagCount); } + // Log everything from this camera Logger.recordOutput("Vision/Camera" + cam + "/ObsSeen", seen); Logger.recordOutput("Vision/Camera" + cam + "/ObsAccepted", accepted); Logger.recordOutput("Vision/Camera" + cam + "/ObsRejected", rejected); } + // If no "accepted" pose measurements from this loop, return now if (perCamAccepted.isEmpty()) return; - // Fusion time is the newest timestamp among accepted per-camera samples - double tF = + // Fusion time is the newest timestamp among accepted per-camera samples; return if NaN + double tFusion = perCamAccepted.stream().mapToDouble(e -> e.timestampSeconds()).max().orElse(Double.NaN); - if (!Double.isFinite(tF)) return; + if (!Double.isFinite(tFusion)) return; - // Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse - TimedPose fused = fuseAtTime(perCamAccepted, tF); + // Time-align camera estimates to tFusion using odometry buffer, then inverse-variance fuse; + // return if null + TimedPose fused = fuseAtTime(perCamAccepted, tFusion); if (fused == null) return; - // Smooth by fusing recent fused estimates (also aligned to tF) + // Smooth by fusing recent fused estimates (also aligned to tFusion); return if null + // NOTE: THIS IS WHERE THE PROBLEM LIES, I THINK. THE FUSED POSE IS OKAY, BUT THE SMOOTHED POSE + // IS NOT!!! pushFused(fused); - TimedPose smoothed = smoothAtTime(tF); + TimedPose smoothed = smoothAtTime(tFusion); if (smoothed == null) return; // Inject the pose consumer.accept(smoothed); + // Log the fused and smoothed poses along with tFusion Logger.recordOutput("Vision/FusedPose", fused.pose()); Logger.recordOutput("Vision/SmoothedPose", smoothed.pose()); - Logger.recordOutput("Vision/FusedTimestamp", tF); + Logger.recordOutput("Vision/FusedTimestamp", tFusion); } + /************************************************************************* */ /** Runtime configuration hooks ****************************************** */ /** - * Call when you reset odometry (e.g. auto start, manual reset, etc). + * Call when odometry is reset (e.g. auto start, manual reset, etc). * * @param fpgaNowSeconds Timestamp for the pose gate reset */ @@ -245,7 +259,7 @@ public void setTrustedTags(Set tags) { } /** - * Set whether to requite trusted tags + * Set whether to require trusted tags * * @param require Boolean */ @@ -277,7 +291,10 @@ public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limi yawGateLimitRadPerSec = limitRadPerSec; } + /************************************************************************* */ /** Gating + Scoring ***************************************************** */ + + /** GateResult Class */ private static final class GateResult { final boolean accepted; final String reason; @@ -288,6 +305,12 @@ private static final class GateResult { } } + /** + * Gating Function -- checks all sorts of things! + * + * @param cam Camera index + * @param obs PoseObservation + */ private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation obs) { final double ts = obs.timestamp(); @@ -325,6 +348,7 @@ private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation o return new GateResult(true, ""); } + /** Built Estimate Class */ private static final class BuiltEstimate { final TimedPose estimate; final double trustScale; @@ -337,12 +361,19 @@ private static final class BuiltEstimate { } } + /** + * Build a pose esitmate + * + * @param cam Camera Index + * @param obs PoseObservation + */ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { - // Base uncertainty grows with distance^2 / tagCount (your 2486-style) + // Base uncertainty grows with distance^2 / tagCount final double tagCount = Math.max(1, obs.tagCount()); final double avgDist = Math.max(0.0, obs.averageTagDistance()); final double distFactor = (avgDist * avgDist) / tagCount; + // Camera uncertainty factor final double camFactor = (cam < camConfigs.length) ? camConfigs[cam].stdDevFactor() : 1.0; double linearStdDev = linearStdDevBaseline * camFactor * distFactor; @@ -361,13 +392,14 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { if (kTrusted.contains(id)) trustedCount++; } + // If no trusted tags, return null if (requireTrustedTag && trustedCount == 0) { return null; } + // Build the trust scale final int usedCount = obs.usedTagIds().size(); final double fracTrusted = (usedCount == 0) ? 0.0 : ((double) trustedCount / usedCount); - final double trustScale = untrustedTagStdDevScale + fracTrusted * (trustedTagStdDevScale - untrustedTagStdDevScale); @@ -386,6 +418,12 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { trustedCount); } + /** + * Evaluate whether a is better than b + * + * @param a Base pose + * @param b Competitor pose + */ private boolean isBetter(TimedPose a, TimedPose b) { // Lower trace of stddev vector (x+y+theta) is better double ta = a.stdDevs().get(0, 0) + a.stdDevs().get(1, 0) + a.stdDevs().get(2, 0); @@ -393,40 +431,67 @@ private boolean isBetter(TimedPose a, TimedPose b) { return ta < tb; } + /************************************************************************* */ /** Time alignment & fusion ********************************************** */ - private TimedPose fuseAtTime(ArrayList estimates, double tF) { + + /** + * Fuse poses at a specified timestamp + * + * @param estimates Array of TimedPose esitmates + * @param tFusion The timestamp at which to fuse the measurements + */ + private TimedPose fuseAtTime(ArrayList estimates, double tFusion) { final ArrayList aligned = new ArrayList<>(estimates.size()); for (var e : estimates) { - Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); + Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tFusion); if (alignedPose == null) return null; - aligned.add(new TimedPose(alignedPose, tF, e.stdDevs())); + aligned.add(new TimedPose(alignedPose, tFusion, e.stdDevs())); } - return inverseVarianceFuse(aligned, tF); + return inverseVarianceFuse(aligned, tFusion); } - private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tF) { + /** + * Align a pose to where it would have been at the fusion time + * + *

Gets the odometric poses at ts and tFusion from the drivebase PoseEstimator, computes the + * transform between them, and applies that to the vision pose. + * + * @param visionPoseAtTs The pose at ts + * @param ts Timestamp of the pose + * @param tFusion Fusion timestamp + * @return Transformed Pose2d + */ + private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tFusion) { Optional odomAtTsOpt = drive.getPoseAtTime(ts); - Optional odomAtTFOpt = drive.getPoseAtTime(tF); + Optional odomAtTFOpt = drive.getPoseAtTime(tFusion); + // If empty, return null if (odomAtTsOpt.isEmpty() || odomAtTFOpt.isEmpty()) return null; - Pose2d odomAtTs = odomAtTsOpt.get(); - Pose2d odomAtTF = odomAtTFOpt.get(); - // Transform that takes odomAtTs -> odomAtTF - Transform2d ts_T_tf = odomAtTF.minus(odomAtTs); + Transform2d ts_T_tf = odomAtTFOpt.get().minus(odomAtTsOpt.get()); // Apply same motion to vision pose to bring it forward return visionPoseAtTs.transformBy(ts_T_tf); } - private TimedPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) { + /** + * Fuse a list of poses using inverse variable weighting + * + * @param alignedAtTF List of aligned poses + * @param tFusion Fusion timestamp + * @return Fuesed TimedPose object + */ + private TimedPose inverseVarianceFuse(ArrayList alignedAtTF, double tFusion) { + // If size of alignedAtTF is 0 or 1, return null or the only value if (alignedAtTF == null || alignedAtTF.isEmpty()) return null; if (alignedAtTF.size() == 1) return alignedAtTF.get(0); + // Define summing values double sumWx = 0.0, sumWy = 0.0, sumWth = 0.0; double sumX = 0.0, sumY = 0.0; double sumCos = 0.0, sumSin = 0.0; + // Loop over poses in the list for (int i = 0; i < alignedAtTF.size(); i++) { final TimedPose e = alignedAtTF.get(i); final Pose2d p = e.pose(); @@ -462,9 +527,10 @@ private TimedPose inverseVarianceFuse(ArrayList alignedAtTF, double t sumSin += rot.getSin() * wth; } - // If everything got skipped, fail closed. + // If everything got skipped; return null if (sumWx <= 0.0 || sumWy <= 0.0 || sumWth <= 0.0) return null; + // Construct the fused translation final Translation2d fusedTranslation = new Translation2d(sumX / sumWx, sumY / sumWy); // Rotation2d(cos, sin) will normalize internally; if both are ~0, fall back to zero. @@ -473,15 +539,23 @@ private TimedPose inverseVarianceFuse(ArrayList alignedAtTF, double t ? Rotation2d.kZero : new Rotation2d(sumCos, sumSin); + // The fused pose is the combination of translation and rotation final Pose2d fusedPose = new Pose2d(fusedTranslation, fusedRotation); + // Fused standard deviations final Matrix fusedStdDevs = VecBuilder.fill(Math.sqrt(1.0 / sumWx), Math.sqrt(1.0 / sumWy), Math.sqrt(1.0 / sumWth)); - return new TimedPose(fusedPose, tF, fusedStdDevs); + // Construct and return the TimedPose objects + return new TimedPose(fusedPose, tFusion, fusedStdDevs); } + /************************************************************************* */ /** Smoothing buffer ***************************************************** */ + + /** THIS LIKELY HAS PROBLEMS */ + + /** Push the fused pose */ private void pushFused(TimedPose fused) { fusedBuffer.addLast(fused); @@ -497,18 +571,18 @@ private void pushFused(TimedPose fused) { } } - private TimedPose smoothAtTime(double tF) { + private TimedPose smoothAtTime(double tFusion) { if (fusedBuffer.isEmpty()) return null; if (fusedBuffer.size() == 1) return fusedBuffer.peekLast(); final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); for (var e : fusedBuffer) { - Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); + Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tFusion); if (alignedPose == null) continue; - aligned.add(new TimedPose(alignedPose, tF, e.stdDevs())); + aligned.add(new TimedPose(alignedPose, tFusion, e.stdDevs())); } if (aligned.isEmpty()) return fusedBuffer.peekLast(); - return inverseVarianceFuse(aligned, tF); + return inverseVarianceFuse(aligned, tFusion); } } From f6a938e526e4ad84c784f581f690a8d416a52631 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 16 Feb 2026 10:01:06 -0700 Subject: [PATCH 14/19] PoseBuffer: Correctly align timestamps and poses MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit That plot (purple exploding to tens of thousands of feet) is exactly what you see when the time-alignment math is applying the odometry delta in the wrong reference frame. The fused pose stays sane because it’s only aligning aw camera estimates (which are usually already close to odom), but the smoother repeatedly re-aligns already-fused field poses across a sliding window — and a small frame mistake turns into a huge “ping-pong” once you do it over and over. --- doc/RBSI-PoseBuffer.md | 19 + src/main/java/frc/robot/Constants.java | 9 +- src/main/java/frc/robot/Robot.java | 3 +- src/main/java/frc/robot/RobotContainer.java | 21 +- .../accelerometer/Accelerometer.java | 4 +- .../frc/robot/subsystems/drive/Drive.java | 73 +++- .../robot/subsystems/drive/DriveOdometry.java | 69 ++- .../robot/subsystems/drive/ModuleIOSim.java | 4 +- .../frc/robot/subsystems/imu/ImuIOSim.java | 4 +- .../frc/robot/subsystems/vision/Vision.java | 401 +++++++++++++----- .../frc/robot/subsystems/vision/VisionIO.java | 9 +- .../subsystems/vision/VisionIOLimelight.java | 81 ++-- .../vision/VisionIOPhotonVision.java | 16 +- src/main/java/frc/robot/util/Alert.java | 3 +- .../ConcurrentTimeInterpolatableBuffer.java | 59 ++- src/main/java/frc/robot/util/TimeUtil.java | 32 ++ 16 files changed, 619 insertions(+), 188 deletions(-) create mode 100644 src/main/java/frc/robot/util/TimeUtil.java diff --git a/doc/RBSI-PoseBuffer.md b/doc/RBSI-PoseBuffer.md index efbb828b..60823c4a 100644 --- a/doc/RBSI-PoseBuffer.md +++ b/doc/RBSI-PoseBuffer.md @@ -129,3 +129,22 @@ aiming, and reduced oscillation under aggressive maneuvers -- because control decisions are based on a pose model that more faithfully represents the real robot’s motion and sensor timing rather than a simplified "latest value only" approximation. + + +### Behavior when the Robot is Disabled + +In normal operation (robot enabled), vision measurements are incorporated using standard Kalman fusion via addVisionMeasurement(). This is a probabilistic update: the estimator weighs the vision measurement against the predicted state based on covariance, producing a smooth, statistically optimal correction. Small errors are gently nudged toward vision; large discrepancies are handled proportionally according to the reported measurement uncertainty. This is the correct and intended behavior for a moving robot. + +When the robot is disabled, however, the estimator is no longer operating in a dynamic system. Wheel odometry is effectively static, process noise collapses, and repeated vision corrections can cause pathological estimator behavior (particularly in translation). Instead of performing another Kalman update in this regime, the system switches to a controlled pose blending strategy. Each accepted vision pose is blended toward the current estimate using a fixed interpolation factor (e.g., 10–20%), and the estimator is explicitly reset to that blended pose. This produces a gradual convergence toward the vision solution without allowing covariance collapse or runaway corrections. + +The result is a stable and intuitive pre-match behavior: while disabled, the robot will slowly “walk” its pose estimate toward the vision solution if needed, but it will not snap violently or diverge numerically. Once enabled, the system seamlessly returns to proper Kalman fusion, where vision and odometry interact in a fully dynamic and statistically grounded manner. + +### Design Rationale – Split Enabled/Disabled Vision Handling + +The core reason for separating enabled and disabled behavior is that the Kalman filter assumes a **dynamic system model**. When the robot is enabled, the drivetrain is actively moving and the estimator continuously predicts forward using wheel odometry and gyro inputs. Vision measurements then act as bounded corrections against that prediction. In this regime, Kalman fusion is mathematically appropriate: process noise and measurement noise are balanced, covariance evolves realistically, and corrections remain stable. + +When the robot is disabled, however, the system is no longer dynamic. Wheel distances stop changing, gyro rate is near zero, and process noise effectively collapses. If vision continues injecting measurements into the estimator with timestamps that are slightly offset from the estimator’s internal state, the filter can enter a degenerate regime. Because translational covariance may shrink aggressively while no true motion exists, even small inconsistencies between time-aligned vision and odometry can produce disproportionately large corrections. This is why translation can numerically “explode” while rotation often remains stable—rotation is typically better constrained by the gyro and wraps naturally, while translation depends more directly on integrated wheel deltas and covariance scaling. + +The disabled blending pattern avoids this pathological case by temporarily stepping outside strict Kalman fusion. Instead of applying repeated measurement updates against a near-zero process model, we treat vision as a slowly converging reference and explicitly blend the current estimate toward it. This maintains numerical stability, prevents covariance collapse artifacts, and still allows the pose to settle accurately before a match begins. + +Once the robot transitions back to enabled, the estimator resumes normal probabilistic fusion with a healthy process model. The split-mode approach therefore preserves mathematical correctness while the robot is moving, and guarantees numerical stability while it is stationary. diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 7e3af173..59647ab7 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -30,6 +30,7 @@ import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.system.plant.DCMotor; +import edu.wpi.first.math.util.Units; import edu.wpi.first.units.measure.Mass; import edu.wpi.first.wpilibj.PowerDistribution; import edu.wpi.first.wpilibj.RobotBase; @@ -324,8 +325,14 @@ public static final class DrivebaseConstants { public static final double kSteerD = 0.02; public static final double kSteerS = 2.0; - // Odometry-related constants + // Odometry-related constants ================================== public static final double kHistorySize = 1.5; // seconds + // How aggressively to pull pose toward vision while DISABLED. + // 0.10 = gentle, 0.25 = fairly quick, 1.0 = full snap. + public static final double kDisabledVisionBlendAlpha = 0.15; + // Optional: ignore obviously insane measurements while disabled. + public static final double kDisabledVisionMaxJumpM = 2.0; // meters + public static final double kDisabledVisionMaxJumpRad = Units.degreesToRadians(20.0); } /************************************************************************* */ diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index b9668ae3..92196f64 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -25,6 +25,7 @@ import edu.wpi.first.wpilibj2.command.Command; import edu.wpi.first.wpilibj2.command.CommandScheduler; import frc.robot.Constants.PowerConstants; +import frc.robot.util.TimeUtil; import frc.robot.util.VirtualSubsystem; import org.littletonrobotics.junction.LogFileUtil; import org.littletonrobotics.junction.LoggedPowerDistribution; @@ -192,7 +193,7 @@ public void autonomousInit() { CommandScheduler.getInstance().cancelAll(); m_robotContainer.getDrivebase().setMotorBrake(true); m_robotContainer.getDrivebase().resetHeadingController(); - m_robotContainer.getVision().resetPoseGate(Timer.getFPGATimestamp()); + m_robotContainer.getVision().resetPoseGate(TimeUtil.now()); // TODO: Make sure Gyro inits here with whatever is in the path planning thingie switch (Constants.getAutoType()) { diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 5b74cf60..e80e242d 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -222,7 +222,9 @@ public RobotContainer() { m_flywheel = new Flywheel(new FlywheelIO() {}); m_vision = - new Vision(m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); + new Vision( + m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReplay(m_drivebase)); + m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -563,8 +565,21 @@ private VisionIO[] buildVisionIOsSim(Drive drive) { } // Vision Factories (REPLAY) - private VisionIO[] buildVisionIOsReplay() { - return new VisionIO[] {}; // simplest: Vision does nothing during replay + private VisionIO[] buildVisionIOsReplay(Drive drive) { + var cams = Constants.Cameras.ALL; + + VisionIO[] ios = new VisionIO[cams.length]; + for (int i = 0; i < cams.length; i++) { + ios[i] = + new VisionIO() { + @Override + public void updateInputs(VisionIOInputs inputs) { + // Intentionally empty. + // Logger.processInputs("Vision/Camera" + i, inputs) will populate these from the log. + } + }; + } + return ios; } /** diff --git a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java index 0b5ff652..205edd44 100644 --- a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java +++ b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java @@ -14,10 +14,10 @@ package frc.robot.subsystems.accelerometer; import edu.wpi.first.math.geometry.Translation3d; -import edu.wpi.first.wpilibj.Timer; import frc.robot.Constants; import frc.robot.Constants.RobotConstants; import frc.robot.subsystems.imu.Imu; +import frc.robot.util.TimeUtil; import frc.robot.util.VirtualSubsystem; import org.littletonrobotics.junction.Logger; @@ -95,7 +95,7 @@ public void rbsiPeriodic() { final double[] ts = imuInputs.odometryYawTimestamps; if (ts.length > 0) { - Logger.recordOutput("IMU/OdometryLatencySec", Timer.getFPGATimestamp() - ts[ts.length - 1]); + Logger.recordOutput("IMU/OdometryLatencySec", TimeUtil.now() - ts[ts.length - 1]); } } } diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 8ebda99f..30981328 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -37,7 +37,6 @@ import edu.wpi.first.wpilibj.Alert.AlertType; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStation.Alliance; -import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj2.command.Command; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.Constants; @@ -50,6 +49,7 @@ import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; import frc.robot.util.RBSISubsystem; +import frc.robot.util.TimeUtil; import frc.robot.util.TimedPose; import java.util.Optional; import java.util.OptionalDouble; @@ -264,6 +264,10 @@ public void rbsiPeriodic() { */ @Override public void simulationPeriodic() { + + // IMPORTANT: do not run sim physics during REPLAY + if (Constants.getMode() != Mode.SIM) return; + final double dt = Constants.loopPeriodSecs; // Advance module wheel physics @@ -477,6 +481,14 @@ public Optional getPoseAtTime(double timestampSeconds) { return poseBuffer.getSample(timestampSeconds); } + public double getPoseBufferOldestTime() { + return poseBuffer.getOldestTimestamp().getAsDouble(); + } + + public double getPoseBufferNewestTime() { + return poseBuffer.getNewestTimestamp().getAsDouble(); + } + /** * Max abs yaw rate over [t0, t1] using buffered yaw-rate history * @@ -571,7 +583,7 @@ public double getFFCharacterizationVelocity() { */ public void resetPose(Pose2d pose) { m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), pose); - markPoseReset(Timer.getFPGATimestamp()); + markPoseReset(TimeUtil.now()); } /** Zeros the gyro based on alliance color */ @@ -581,28 +593,69 @@ public void zeroHeadingForAlliance() { ? Rotation2d.kZero : Rotation2d.k180deg); resetHeadingController(); - markPoseReset(Timer.getFPGATimestamp()); + markPoseReset(TimeUtil.now()); } /** Zeros the gyro regardless of the alliance */ public void zeroHeading() { imu.zeroYaw(Rotation2d.kZero); resetHeadingController(); - markPoseReset(Timer.getFPGATimestamp()); + markPoseReset(TimeUtil.now()); } /** * Adds a vision measurement safely into the PoseEstimator * - * @param timedPose The pose @ timestamp to add to the pose estimator + * @param measurement The pose @ timestamp to add to the pose estimator */ - public void addVisionMeasurement(TimedPose timedPose) { - odometryLock.lock(); + // Called by Vision via consumer.accept(TimedPose) + public void addVisionMeasurement(TimedPose meas) { + Drive.odometryLock.lock(); try { - m_PoseEstimator.addVisionMeasurement( - timedPose.pose(), timedPose.timestampSeconds(), timedPose.stdDevs()); + // Always use measurement timestamp when fusing (enabled path) + final double t = meas.timestampSeconds(); + + if (!DriverStation.isDisabled()) { + // ENABLED: normal soft fusion + m_PoseEstimator.addVisionMeasurement(meas.pose(), t, meas.stdDevs()); + return; + } + + // DISABLED: blend toward vision, then reset estimator to blended pose + final Pose2d current = m_PoseEstimator.getEstimatedPosition(); + final Pose2d vision = meas.pose(); + + // Optional sanity gate while disabled (prevents one bad frame from wrecking you) + final double dTrans = current.getTranslation().getDistance(vision.getTranslation()); + final double dRot = Math.abs(current.getRotation().minus(vision.getRotation()).getRadians()); + if (dTrans > DrivebaseConstants.kDisabledVisionMaxJumpM + || dRot > DrivebaseConstants.kDisabledVisionMaxJumpRad) { + Logger.recordOutput("Vision/DisabledReject", true); + Logger.recordOutput("Vision/DisabledReject_dTransM", dTrans); + Logger.recordOutput("Vision/DisabledReject_dRotRad", dRot); + return; + } + Logger.recordOutput("Vision/DisabledReject", false); + + // Controlled “soft snap” + final Pose2d blended = + current.interpolate(vision, DrivebaseConstants.kDisabledVisionBlendAlpha); + + // Reset estimator to blended pose (heading/modules are "now", but robot can't move while + // disabled) + m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), blended); + + // Mark reset so Vision gates old samples, etc. + markPoseReset(TimeUtil.now()); + + // Keep pose buffer in sync immediately (helps timeAlignPose calls) + poseBufferAddSample(TimeUtil.now(), blended); + + Logger.recordOutput( + "Vision/DisabledBlendAlpha", DrivebaseConstants.kDisabledVisionBlendAlpha); + Logger.recordOutput("Vision/DisabledBlendedPose", blended); } finally { - odometryLock.unlock(); + Drive.odometryLock.unlock(); } } diff --git a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java index 99250649..900bd15b 100644 --- a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java +++ b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java @@ -19,10 +19,11 @@ import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.kinematics.SwerveModulePosition; -import edu.wpi.first.wpilibj.Timer; +import edu.wpi.first.wpilibj.DriverStation; import frc.robot.Constants; import frc.robot.subsystems.imu.Imu; import frc.robot.util.RBSIEnum.Mode; +import frc.robot.util.TimeUtil; import frc.robot.util.VirtualSubsystem; import org.littletonrobotics.junction.Logger; @@ -59,31 +60,55 @@ protected int getPeriodPriority() { public void rbsiPeriodic() { Drive.odometryLock.lock(); try { - // Ensure IMU inputs are fresh for this cycle final var imuInputs = imu.getInputs(); - // Drain per-module odometry queues ONCE per loop (this also refreshes motor signals) + // Drain per-module odometry queues ONCE per loop (refresh signals). for (var module : modules) { module.periodic(); } - if (Constants.getMode() == Mode.SIM) { - // SIMULATION: Keep sim pose buffer time-aligned, too - final double now = Timer.getFPGATimestamp(); + final boolean isReplayActive = Logger.hasReplaySource(); + + // Pure SIM (not replaying a log): use sim pose/yaw + if (Constants.getMode() == Mode.SIM && !isReplayActive) { + final double now = TimeUtil.now(); drive.poseBufferAddSample(now, drive.getSimPose()); drive.yawBuffersAddSample(now, drive.getSimYawRad(), drive.getSimYawRateRadPerSec()); Logger.recordOutput("Drive/Pose", drive.getSimPose()); return; } + // DISABLED (REAL or REPLAY): minimal ticking — keep buffers alive, do NOT integrate module + // deltas. + // Exception: if you *want* replay odometry integration while disabled, remove the + // DriverStation + // guard and keep the original replay loop. + if (DriverStation.isDisabled() && !isReplayActive) { + final double now = TimeUtil.now(); + + // keep yaw buffers alive + if (imuInputs.connected) { + drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); + } + + // keep pose buffer alive with the *current estimator pose* (which can be blended by + // disabled vision) + drive.poseBufferAddSample(now, drive.poseEstimatorGetPose()); + Logger.recordOutput("Drive/Pose", drive.poseEstimatorGetPose()); + drive.setGyroDisconnectedAlert(!imuInputs.connected); + return; + } + // Canonical timestamp queue from module[0] final double[] ts = modules[0].getOdometryTimestamps(); final int n = (ts == null) ? 0 : ts.length; // Always keep yaw buffers “alive” even if no samples if (n == 0) { - final double now = Timer.getFPGATimestamp(); - drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); + if (Constants.getMode() != Mode.REPLAY) { + final double now = TimeUtil.now(); + drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); + } drive.setGyroDisconnectedAlert(!imuInputs.connected); return; } @@ -126,7 +151,7 @@ public void rbsiPeriodic() { drive.yawBuffersFillFromQueue(yawTs, yawPos); } else if (!hasYawQueue) { // Single “now” sample once (not per replay) - final double now = Timer.getFPGATimestamp(); + final double now = TimeUtil.now(); drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); } @@ -158,6 +183,32 @@ public void rbsiPeriodic() { } } + // Debugging + Logger.recordOutput("Odometry/Debug/timestamp", t); + Logger.recordOutput("Odometry/Debug/now", TimeUtil.now()); + if (i > 0) { + Logger.recordOutput("Odometry/Debug/timeNowDiff", t - ts[i - 1]); + } + + Logger.recordOutput("Odometry/Debug/replay_t", t); + Logger.recordOutput("Odometry/Debug/replay_yawRad", yawRad); + + double[] lastDist = new double[4]; + boolean firstSample = true; + for (int m = 0; m < 4; m++) { + SwerveModulePosition pos = odomPositions[m]; + double dist = pos.distanceMeters; + + Logger.recordOutput("Odometry/Debug/mod" + m + "_distanceMeters", dist); + Logger.recordOutput("Odometry/Debug/mod" + m + "_angleRad", pos.angle.getRadians()); + if (!firstSample) { + double delta = dist - lastDist[m]; + Logger.recordOutput("Odometry/Debug/mod" + m + "_deltaMeters", delta); + } + + lastDist[m] = dist; + } + firstSample = false; // Feed estimator at this historical timestamp drive.poseEstimatorUpdateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); // Maintain pose history in SAME timebase as estimator diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOSim.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOSim.java index 8fb64fe4..ec87f59e 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOSim.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOSim.java @@ -15,9 +15,9 @@ import edu.wpi.first.math.system.plant.DCMotor; import edu.wpi.first.math.system.plant.LinearSystemId; import edu.wpi.first.math.util.Units; -import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.simulation.DCMotorSim; import frc.robot.Constants; +import frc.robot.util.TimeUtil; /** * Physics sim implementation of module IO. The sim models are configured using a set of module @@ -116,7 +116,7 @@ public void updateInputs(ModuleIOInputs inputs) { inputs.turnCurrentAmps = Math.abs(turnSim.getCurrentDrawAmps()); // Odometry (single sample per loop is fine) - inputs.odometryTimestamps = new double[] {Timer.getFPGATimestamp()}; + inputs.odometryTimestamps = new double[] {TimeUtil.now()}; inputs.odometryDrivePositionsRad = new double[] {mechanismPositionRad}; inputs.odometryTurnPositions = new Rotation2d[] {inputs.turnPosition}; } diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java b/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java index a28212ce..102b60e7 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java @@ -19,7 +19,7 @@ import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.util.Units; -import edu.wpi.first.wpilibj.Timer; +import frc.robot.util.TimeUtil; import org.littletonrobotics.junction.Logger; /** Simulated IMU for full robot simulation & replay logging */ @@ -73,7 +73,7 @@ public void updateInputs(ImuIOInputs inputs) { inputs.linearJerk = Translation3d.kZero; // Maintain odometry history - pushOdomSample(Timer.getFPGATimestamp(), yawRad); + pushOdomSample(TimeUtil.now(), yawRad); // Export odometry arrays (copy out in chronological order) final int n = odomSize; diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 271904be..0753b67a 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -88,6 +88,13 @@ public interface PoseMeasurementConsumer { // Variance minimum for fusing poses to prevent divide-by-zero explosions private static final double kMinVariance = 1e-12; + // Fields + private Pose2d lastFusedPose = new Pose2d(); + private Pose2d lastSmoothedPose = new Pose2d(); + private double lastFusedTs = Double.NaN; + private boolean lastFusedValid = false; + private boolean lastSmoothedValid = false; + /** Constructor */ public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { this.drive = drive; @@ -124,115 +131,182 @@ protected int getPeriodPriority() { @Override public void rbsiPeriodic() { - // Pose reset logic and logging - long epoch = drive.getPoseResetEpoch(); - if (epoch != lastSeenPoseResetEpoch) { - lastSeenPoseResetEpoch = epoch; - resetPoseGate(drive.getLastPoseResetTimestamp()); - Logger.recordOutput("Vision/PoseGateResetFromDrive", true); - } else { - Logger.recordOutput("Vision/PoseGateResetFromDrive", false); - } + boolean hasAcceptedThisLoop = false; + boolean hasFusedThisLoop = false; + boolean hasSmoothedThisLoop = false; + + // Default debug outputs (so keys exist even if we return early) + double dbgAlignDt = Double.NaN; + double dbgDeltaTranslation = Double.NaN; + double dbgDeltaRotation = Double.NaN; + boolean dbgAlignFinite = false; + + try { + + lastAlignDbg.reset(); + // ---------------------------------------------------------------------- + // 1) Pose reset gate (clears smoothing state, resets per-cam monotonic gates) + // ---------------------------------------------------------------------- + long epoch = drive.getPoseResetEpoch(); + if (epoch != lastSeenPoseResetEpoch) { + lastSeenPoseResetEpoch = epoch; + resetPoseGate(drive.getLastPoseResetTimestamp()); + Logger.recordOutput("Vision/PoseGateResetFromDrive", true); + } else { + Logger.recordOutput("Vision/PoseGateResetFromDrive", false); + } - // Update & log camera inputs - for (int i = 0; i < io.length; i++) { - io[i].updateInputs(inputs[i]); - Logger.processInputs("Vision/Camera" + i, inputs[i]); - } + // ---------------------------------------------------------------------- + // 2) Read camera inputs (REAL/SIM/REPLAY all go through IO inputs) + // ---------------------------------------------------------------------- + for (int i = 0; i < io.length; i++) { + io[i].updateInputs(inputs[i]); + Logger.processInputs("Vision/Camera" + i, inputs[i]); + } - // Pick the one best accepted estimate per camera for this loop - final ArrayList perCamAccepted = new ArrayList<>(io.length); + // Optional always-on “health” debug + Logger.recordOutput("Vision/Debug/ioLength", io.length); + int totalObs = 0; + for (int i = 0; i < io.length; i++) { + totalObs += (inputs[i].poseObservations != null) ? inputs[i].poseObservations.length : 0; + } + Logger.recordOutput("Vision/Debug/totalObsThisLoop", totalObs); - // Loop over cameras - for (int cam = 0; cam < io.length; cam++) { + // ---------------------------------------------------------------------- + // 3) Choose best observation per camera for THIS loop + // ---------------------------------------------------------------------- + final ArrayList perCamAccepted = new ArrayList<>(io.length); - // Instantiate variables for this camera - int seen = 0; - int accepted = 0; - int rejected = 0; - TimedPose best = null; - double bestTrustScale = Double.NaN; - int bestTrustedCount = 0; - int bestTagCount = 0; + for (int cam = 0; cam < io.length; cam++) { - // Loop over observations for this camera this loop - for (var obs : inputs[cam].poseObservations) { + int seen = 0; + int accepted = 0; + int rejected = 0; - // Increment - seen++; + TimedPose best = null; + double bestTrustScale = Double.NaN; + int bestTrustedCount = 0; + int bestTagCount = 0; - // Check the gating criteria; move on if bad - GateResult gate = passesHardGatesAndYawGate(cam, obs); - Logger.recordOutput("Vision/Camera" + cam + "/GateFail", gate.reason); - if (!gate.accepted) { - rejected++; + final var obsArr = inputs[cam].poseObservations; + if (obsArr == null) { + Logger.recordOutput("Vision/Camera" + cam + "/ObsSeen", 0); + Logger.recordOutput("Vision/Camera" + cam + "/ObsAccepted", 0); + Logger.recordOutput("Vision/Camera" + cam + "/ObsRejected", 0); continue; } - // Build a pose estimate; move on if bad - BuiltEstimate built = buildEstimate(cam, obs); - if (built == null) { - rejected++; - continue; + for (var obs : obsArr) { + seen++; + + GateResult gate = passesHardGatesAndYawGate(cam, obs); + Logger.recordOutput("Vision/Camera" + cam + "/GateFail", gate.reason); + if (!gate.accepted) { + rejected++; + continue; + } + + BuiltEstimate built = buildEstimate(cam, obs); + if (built == null) { + rejected++; + continue; + } + + // Prefer “best” by your scoring function + if (best == null || isBetter(built.estimate, best)) { + best = built.estimate; + bestTrustScale = built.trustScale; + bestTrustedCount = built.trustedCount; + bestTagCount = obs.tagCount(); + } } - // If this estimate is better than the existing "best", update this -> best - TimedPose est = built.estimate; - if (best == null || isBetter(est, best)) { - best = est; - bestTrustScale = built.trustScale; - bestTrustedCount = built.trustedCount; - bestTagCount = obs.tagCount(); + if (best != null) { + accepted++; + lastAcceptedTsPerCam[cam] = best.timestampSeconds(); + perCamAccepted.add(best); + + Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose()); + Logger.recordOutput( + "Vision/Camera" + cam + "/InjectedTimestamp", best.timestampSeconds()); + Logger.recordOutput( + "Vision/Camera" + cam + "/InjectedStdDevs", stdDevsToArray(best.stdDevs())); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustScale", bestTrustScale); + Logger.recordOutput( + "Vision/Camera" + cam + "/LastAcceptedTrustedCount", bestTrustedCount); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTagCount", bestTagCount); } - } - // Add an accepted measurement, and update - if (best != null) { - accepted++; - lastAcceptedTsPerCam[cam] = best.timestampSeconds(); - perCamAccepted.add(best); - - Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose()); - Logger.recordOutput("Vision/Camera" + cam + "/InjectedTimestamp", best.timestampSeconds()); - Logger.recordOutput("Vision/Camera" + cam + "/InjectedStdDevs", best.stdDevs()); - Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustScale", bestTrustScale); - Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustedCount", bestTrustedCount); - Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTagCount", bestTagCount); + Logger.recordOutput("Vision/Camera" + cam + "/ObsSeen", seen); + Logger.recordOutput("Vision/Camera" + cam + "/ObsAccepted", accepted); + Logger.recordOutput("Vision/Camera" + cam + "/ObsRejected", rejected); } - // Log everything from this camera - Logger.recordOutput("Vision/Camera" + cam + "/ObsSeen", seen); - Logger.recordOutput("Vision/Camera" + cam + "/ObsAccepted", accepted); - Logger.recordOutput("Vision/Camera" + cam + "/ObsRejected", rejected); - } + Logger.recordOutput("Vision/Debug/perCamAcceptedSize", perCamAccepted.size()); - // If no "accepted" pose measurements from this loop, return now - if (perCamAccepted.isEmpty()) return; - - // Fusion time is the newest timestamp among accepted per-camera samples; return if NaN - double tFusion = - perCamAccepted.stream().mapToDouble(e -> e.timestampSeconds()).max().orElse(Double.NaN); - if (!Double.isFinite(tFusion)) return; - - // Time-align camera estimates to tFusion using odometry buffer, then inverse-variance fuse; - // return if null - TimedPose fused = fuseAtTime(perCamAccepted, tFusion); - if (fused == null) return; - - // Smooth by fusing recent fused estimates (also aligned to tFusion); return if null - // NOTE: THIS IS WHERE THE PROBLEM LIES, I THINK. THE FUSED POSE IS OKAY, BUT THE SMOOTHED POSE - // IS NOT!!! - pushFused(fused); - TimedPose smoothed = smoothAtTime(tFusion); - if (smoothed == null) return; - - // Inject the pose - consumer.accept(smoothed); - - // Log the fused and smoothed poses along with tFusion - Logger.recordOutput("Vision/FusedPose", fused.pose()); - Logger.recordOutput("Vision/SmoothedPose", smoothed.pose()); - Logger.recordOutput("Vision/FusedTimestamp", tFusion); + if (perCamAccepted.isEmpty()) { + // No new vision accepted this loop; we still log cached outputs below (in finally). + return; + } + hasAcceptedThisLoop = true; + + // ---------------------------------------------------------------------- + // 4) Fuse all accepted cams at the newest timestamp among them + // ---------------------------------------------------------------------- + final double tFusion = + perCamAccepted.stream().mapToDouble(e -> e.timestampSeconds()).max().orElse(Double.NaN); + if (!Double.isFinite(tFusion)) return; + + final TimedPose fused = fuseAtTime(perCamAccepted, tFusion); + if (fused == null) return; + hasFusedThisLoop = true; + + // ---------------------------------------------------------------------- + // 5) Smooth by fusing recent fused estimates aligned to tFusion + // ---------------------------------------------------------------------- + pushFused(fused); + final TimedPose smoothed = smoothAtTime(tFusion); + if (smoothed == null) return; + hasSmoothedThisLoop = true; + + // ---------------------------------------------------------------------- + // 6) Update caches (ONLY HERE) + inject to drive + // ---------------------------------------------------------------------- + lastFusedPose = fused.pose(); + lastSmoothedPose = smoothed.pose(); + lastFusedTs = tFusion; + lastFusedValid = true; + lastSmoothedValid = true; + + consumer.accept(smoothed); + + // If you want, you can feed debug values from inside timeAlignPose(...) via fields, + // but leaving the plumbing as-is since you’re already logging inside helpers. + + } finally { + + // ---------------------------------------------------------------------- + // 7) “Ultra-clean” logging: one place, every loop, replay-safe + // ---------------------------------------------------------------------- + + // Always-present “outputs” + Logger.recordOutput("Vision/FusedPose", lastFusedPose); + Logger.recordOutput("Vision/SmoothedPose", lastSmoothedPose); + Logger.recordOutput("Vision/FusedTimestamp", lastFusedTs); + Logger.recordOutput("Vision/HasFused", lastFusedValid); + Logger.recordOutput("Vision/HasSmoothed", lastSmoothedValid); + + // Per-loop flags (never stale) + Logger.recordOutput("Vision/HasAcceptedThisLoop", hasAcceptedThisLoop); + Logger.recordOutput("Vision/HasFusedThisLoop", hasFusedThisLoop); + Logger.recordOutput("Vision/HasSmoothedThisLoop", hasSmoothedThisLoop); + + // Debug keys (exist even if not touched) + Logger.recordOutput("Vision/Debug/alignDt", lastAlignDbg.alignDt); + Logger.recordOutput("Vision/Debug/deltaTranslation", lastAlignDbg.deltaTranslation); + Logger.recordOutput("Vision/Debug/deltaRotation", lastAlignDbg.deltaRotation); + Logger.recordOutput("Vision/Debug/alignFinite", lastAlignDbg.alignFinite); + } } /************************************************************************* */ @@ -387,9 +461,11 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { // Trusted tag blending final Set kTrusted = trustedTags.get(); + final int[] usedIds = (obs.usedTagIds() != null) ? obs.usedTagIds() : new int[0]; + int trustedCount = 0; - for (int id : obs.usedTagIds()) { - if (kTrusted.contains(id)) trustedCount++; + for (int i = 0; i < usedIds.length; i++) { + if (kTrusted.contains(usedIds[i])) trustedCount++; } // If no trusted tags, return null @@ -398,7 +474,7 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { } // Build the trust scale - final int usedCount = obs.usedTagIds().size(); + final int usedCount = usedIds.length; final double fracTrusted = (usedCount == 0) ? 0.0 : ((double) trustedCount / usedCount); final double trustScale = untrustedTagStdDevScale + fracTrusted * (trustedTagStdDevScale - untrustedTagStdDevScale); @@ -406,9 +482,17 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { linearStdDev *= trustScale; angularStdDev *= trustScale; + linearStdDev = Math.max(linearStdDev, linearStdDevBaseline); + angularStdDev = Math.max(angularStdDev, angularStdDevBaseline); + // Output logs for tuning Logger.recordOutput("Vision/Camera" + cam + "/InjectedFracTrusted", fracTrusted); + Logger.recordOutput("Vision/Camera" + cam + "/Dbg_linearStdDev", linearStdDev); + Logger.recordOutput("Vision/Camera" + cam + "/Dbg_angularStdDev", angularStdDev); + Logger.recordOutput("Vision/Camera" + cam + "/Dbg_avgDist", avgDist); + Logger.recordOutput("Vision/Camera" + cam + "/Dbg_tagCount", obs.tagCount()); + return new BuiltEstimate( new TimedPose( obs.pose().toPose2d(), @@ -431,6 +515,34 @@ private boolean isBetter(TimedPose a, TimedPose b) { return ta < tb; } + /************************************************************************* */ + /** Debug snapshot for the most-recent successful alignment this loop. */ + private static final class AlignDebug { + double alignDt = Double.NaN; + double deltaTranslation = Double.NaN; + double deltaRotation = Double.NaN; + boolean alignFinite = false; + + void reset() { + alignDt = Double.NaN; + deltaTranslation = Double.NaN; + deltaRotation = Double.NaN; + alignFinite = false; + } + + void set(double dt, Transform2d tf) { + alignDt = dt; + deltaTranslation = tf.getTranslation().getNorm(); + deltaRotation = tf.getRotation().getRadians(); + alignFinite = + Double.isFinite(alignDt) + && Double.isFinite(deltaTranslation) + && Double.isFinite(deltaRotation); + } + } + + private final AlignDebug lastAlignDbg = new AlignDebug(); + /************************************************************************* */ /** Time alignment & fusion ********************************************** */ @@ -443,7 +555,7 @@ private boolean isBetter(TimedPose a, TimedPose b) { private TimedPose fuseAtTime(ArrayList estimates, double tFusion) { final ArrayList aligned = new ArrayList<>(estimates.size()); for (var e : estimates) { - Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tFusion); + Pose2d alignedPose = timeAlignPoseFieldDelta(e.pose(), e.timestampSeconds(), tFusion); if (alignedPose == null) return null; aligned.add(new TimedPose(alignedPose, tFusion, e.stdDevs())); } @@ -454,7 +566,9 @@ private TimedPose fuseAtTime(ArrayList estimates, double tFusion) { * Align a pose to where it would have been at the fusion time * *

Gets the odometric poses at ts and tFusion from the drivebase PoseEstimator, computes the - * transform between them, and applies that to the vision pose. + * transform between them, and applies that to the vision pose. The correction is applied by + * finding the field-frame deltas for both translation and rotation, then returning a new Pose2d + * object that consists of the vision pose adjusted by the field-frame deltas. * * @param visionPoseAtTs The pose at ts * @param ts Timestamp of the pose @@ -462,18 +576,98 @@ private TimedPose fuseAtTime(ArrayList estimates, double tFusion) { * @return Transformed Pose2d */ private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tFusion) { + + Logger.recordOutput("Vision/Debug/ts", ts); + Logger.recordOutput("Vision/Debug/tFusion", tFusion); + Logger.recordOutput("Vision/Debug/alignDtMs", (tFusion - ts) * 1000.0); + + double dt = tFusion - ts; + Optional odomAtTsOpt = drive.getPoseAtTime(ts); Optional odomAtTFOpt = drive.getPoseAtTime(tFusion); // If empty, return null if (odomAtTsOpt.isEmpty() || odomAtTFOpt.isEmpty()) return null; - // Transform that takes odomAtTs -> odomAtTF + // Transform that takes odomAtTs -> odomAtTF (in odomAtTs frame) Transform2d ts_T_tf = odomAtTFOpt.get().minus(odomAtTsOpt.get()); - // Apply same motion to vision pose to bring it forward + double dtrans = ts_T_tf.getTranslation().getNorm(); + double drot = ts_T_tf.getRotation().getRadians(); + + boolean finite = + Double.isFinite(dt) + && Double.isFinite(dtrans) + && Double.isFinite(drot) + && Double.isFinite(odomAtTsOpt.get().getX()) + && Double.isFinite(odomAtTFOpt.get().getX()); + + // Even more debugging logging + Logger.recordOutput("Vision/Debug/alignDt", dt); + Logger.recordOutput("Vision/Debug/deltaTranslation", dtrans); + Logger.recordOutput("Vision/Debug/deltaRotation", drot); + Logger.recordOutput("Vision/Debug/alignFinite", finite); + Logger.recordOutput("Vision/Debug/odomAtTs", odomAtTsOpt.get()); + Logger.recordOutput("Vision/Debug/odomAtTF", odomAtTFOpt.get()); + + if (!finite) { + Logger.recordOutput("Vision/Debug/odomAtTs", odomAtTsOpt.get()); + Logger.recordOutput("Vision/Debug/odomAtTF", odomAtTFOpt.get()); + return null; + } + + // Debugging Logging + Logger.recordOutput("Vision/Debug/deltaTranslation", ts_T_tf.getTranslation().getNorm()); + Logger.recordOutput("Vision/Debug/deltaRotation", ts_T_tf.getRotation().getRadians()); + + // Apply the same SE(2) transform to the vision pose return visionPoseAtTs.transformBy(ts_T_tf); } + /** + * Align a pose to where it would have been at the fusion time + * + *

* + * + *

We compute: - dTrans = odomTF.translation - odomTs.translation (field frame) - dTheta = + * odomTF.rotation - odomTs.rotation (field frame / global heading delta) + * + *

Then apply those deltas directly to the vision pose at ts to estimate vision at tFusion. + * + *

Gets the odometric poses at ts and tFusion from the drivebase PoseEstimator, computes the + * transform between them, and applies that to the vision pose. The correction is applied by + * finding the field-frame deltas for both translation and rotation, then returning a new Pose2d + * object that consists of the vision pose adjusted by the field-frame deltas. + * + * @param visionPoseAtTs The pose at ts + * @param ts Timestamp of the pose + * @param tFusion Fusion timestamp + * @return Transformed Pose2d + */ + private Pose2d timeAlignPoseFieldDelta(Pose2d visionPoseAtTs, double ts, double tFusion) { + Optional odomAtTsOpt = drive.getPoseAtTime(ts); + Optional odomAtTFOpt = drive.getPoseAtTime(tFusion); + if (odomAtTsOpt.isEmpty() || odomAtTFOpt.isEmpty()) return null; + + final Pose2d odomAtTs = odomAtTsOpt.get(); + final Pose2d odomAtTF = odomAtTFOpt.get(); + + // FIELD-FRAME translation delta + final Translation2d dTrans = odomAtTF.getTranslation().minus(odomAtTs.getTranslation()); + + // Heading delta (Rotation2d handles wrapping) + final Rotation2d dTheta = odomAtTF.getRotation().minus(odomAtTs.getRotation()); + + // Update debug ONCE per loop (first successful alignment wins) + if (!lastAlignDbg.alignFinite) { + // For debug parity with the other version, package deltas as a Transform2d + lastAlignDbg.set(tFusion - ts, new Transform2d(dTrans, dTheta)); + } + + // Apply field-frame deltas to the vision pose + return new Pose2d( + visionPoseAtTs.getTranslation().plus(dTrans), visionPoseAtTs.getRotation().plus(dTheta)); + } + /** * Fuse a list of poses using inverse variable weighting * @@ -577,12 +771,19 @@ private TimedPose smoothAtTime(double tFusion) { final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); for (var e : fusedBuffer) { - Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tFusion); + Pose2d alignedPose = timeAlignPoseFieldDelta(e.pose(), e.timestampSeconds(), tFusion); if (alignedPose == null) continue; aligned.add(new TimedPose(alignedPose, tFusion, e.stdDevs())); + // Debugging Logging + Logger.recordOutput("Vision/Debug/deltaTime", tFusion - e.timestampSeconds()); } if (aligned.isEmpty()) return fusedBuffer.peekLast(); return inverseVarianceFuse(aligned, tFusion); } + + /** UTILITY FUNCTIONS **************************************************** */ + private static double[] stdDevsToArray(Matrix s) { + return new double[] {s.get(0, 0), s.get(1, 0), s.get(2, 0)}; + } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java index 7bc61ed2..dc249ec9 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -11,7 +11,6 @@ import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; -import java.util.Set; import org.littletonrobotics.junction.AutoLog; public interface VisionIO { @@ -41,8 +40,12 @@ record PoseObservation( int tagCount, double averageTagDistance, PoseObservationType type, - Set usedTagIds // immutable per observation - ) {} + int[] usedTagIds) { + // Compact constructor to coalesce null to empty + public PoseObservation { + usedTagIds = (usedTagIds == null) ? new int[0] : usedTagIds; + } + } enum PoseObservationType { MEGATAG_1, diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java index 07c83c7b..a4c5d13b 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java @@ -18,6 +18,7 @@ import edu.wpi.first.networktables.DoubleSubscriber; import edu.wpi.first.networktables.NetworkTableInstance; import edu.wpi.first.wpilibj.RobotController; +import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; @@ -55,57 +56,80 @@ public VisionIOLimelight(String name, Supplier rotationSupplier) { @Override public void updateInputs(VisionIOInputs inputs) { - // Update connection status based on whether an update has been seen in the last 250ms + // Update connection status inputs.connected = ((RobotController.getFPGATime() - latencySubscriber.getLastChange()) / 1000) < 250; - // Update target observation + // Target observation (simple tx/ty) inputs.latestTargetObservation = new TargetObservation( Rotation2d.fromDegrees(txSubscriber.get()), Rotation2d.fromDegrees(tySubscriber.get())); - // Update orientation for MegaTag 2 + // Push robot orientation for MegaTag 2 orientationPublisher.accept( new double[] {rotationSupplier.get().getDegrees(), 0.0, 0.0, 0.0, 0.0, 0.0}); - NetworkTableInstance.getDefault() - .flush(); // Increases network traffic but recommended by Limelight + NetworkTableInstance.getDefault().flush(); + + // Union of tag IDs seen this loop (for logging/UI only) + Set unionTagIds = new HashSet<>(); - // Read new pose observations from NetworkTables - Set tagIds = new HashSet<>(); List poseObservations = new LinkedList<>(); + + /* -------------------- MegaTag 1 -------------------- */ for (var rawSample : megatag1Subscriber.readQueue()) { if (rawSample.value.length == 0) continue; - for (int i = 11; i < rawSample.value.length; i += 7) { - tagIds.add((int) rawSample.value[i]); + + int tagCount = (int) rawSample.value[7]; + + // Build used tag array for THIS observation only + int[] used = new int[tagCount]; + int u = 0; + + for (int i = 11; i < rawSample.value.length && u < tagCount; i += 7) { + int id = (int) rawSample.value[i]; + used[u++] = id; + unionTagIds.add(id); } + poseObservations.add( new PoseObservation( - // Timestamp, based on server timestamp of publish and latency + // Timestamp (LL server time minus latency) rawSample.timestamp * 1.0e-6 - rawSample.value[6] * 1.0e-3, - // 3D pose estimate + // Pose parsePose(rawSample.value), - // Ambiguity, using only the first tag because ambiguity isn't applicable for multitag + // Ambiguity (first tag only) rawSample.value.length >= 18 ? rawSample.value[17] : 0.0, // Tag count - (int) rawSample.value[7], + tagCount, - // Average tag distance + // Avg distance rawSample.value[9], - // Observation type + // Type PoseObservationType.MEGATAG_1, - // Visible Tag IDs - tagIds)); + // Used tag IDs + used)); } + + /* -------------------- MegaTag 2 -------------------- */ for (var rawSample : megatag2Subscriber.readQueue()) { if (rawSample.value.length == 0) continue; - for (int i = 11; i < rawSample.value.length; i += 7) { - tagIds.add((int) rawSample.value[i]); + + int tagCount = (int) rawSample.value[7]; + + int[] used = new int[tagCount]; + int u = 0; + + for (int i = 11; i < rawSample.value.length && u < tagCount; i += 7) { + int id = (int) rawSample.value[i]; + used[u++] = id; + unionTagIds.add(id); } + poseObservations.add( new PoseObservation( // Timestamp, based on server timestamp of publish and latency @@ -125,23 +149,20 @@ public void updateInputs(VisionIOInputs inputs) { // Observation type PoseObservationType.MEGATAG_2, - - // Visible Tag IDs - tagIds)); + used)); } - // Save pose observations to inputs object - inputs.poseObservations = new PoseObservation[poseObservations.size()]; - for (int i = 0; i < poseObservations.size(); i++) { - inputs.poseObservations[i] = poseObservations.get(i); - } + /* -------------------- Save to inputs -------------------- */ - // Save tag IDs to inputs objects - inputs.tagIds = new int[tagIds.size()]; + inputs.poseObservations = poseObservations.toArray(new PoseObservation[0]); + + inputs.tagIds = new int[unionTagIds.size()]; int i = 0; - for (int id : tagIds) { + for (int id : unionTagIds) { inputs.tagIds[i++] = id; } + + Arrays.sort(inputs.tagIds); // optional but recommended } /** Parses the 3D pose from a Limelight botpose array. */ diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java index 14a2867b..b328385a 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java @@ -15,6 +15,7 @@ import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Transform3d; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashSet; import java.util.Set; import org.photonvision.PhotonCamera; @@ -77,10 +78,12 @@ public void updateInputs(VisionIOInputs inputs) { double avgTagDistance = result.targets.isEmpty() ? 0.0 : (totalTagDistance / result.targets.size()); - Set used = new HashSet<>(); + // Build used tag list (loggable + replayable) + int[] used = new int[multitag.fiducialIDsUsed.size()]; + int u = 0; for (int id : multitag.fiducialIDsUsed) { - used.add(id); - unionTagIds.add(id); + used[u++] = id; + unionTagIds.add(id); // keep your union set for tagIds UI/log } poseObservations.add( @@ -91,7 +94,7 @@ public void updateInputs(VisionIOInputs inputs) { multitag.fiducialIDsUsed.size(), avgTagDistance, PoseObservationType.PHOTONVISION, - Set.copyOf(used))); + used)); } else if (!result.targets.isEmpty()) { var target = result.targets.get(0); @@ -117,7 +120,7 @@ public void updateInputs(VisionIOInputs inputs) { 1, cameraToTarget.getTranslation().getNorm(), PoseObservationType.PHOTONVISION, - Set.of(target.fiducialId))); + new int[] {target.fiducialId})); } } @@ -131,5 +134,8 @@ public void updateInputs(VisionIOInputs inputs) { inputs.tagIds = new int[unionTagIds.size()]; int i = 0; for (int id : unionTagIds) inputs.tagIds[i++] = id; + + // Sort the AprilTag IDs for ease of use by dashboards, etc. + Arrays.sort(inputs.tagIds); } } diff --git a/src/main/java/frc/robot/util/Alert.java b/src/main/java/frc/robot/util/Alert.java index 5f20d34a..202012d8 100644 --- a/src/main/java/frc/robot/util/Alert.java +++ b/src/main/java/frc/robot/util/Alert.java @@ -12,7 +12,6 @@ import edu.wpi.first.util.sendable.Sendable; import edu.wpi.first.util.sendable.SendableBuilder; import edu.wpi.first.wpilibj.DriverStation; -import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import java.util.ArrayList; import java.util.Comparator; @@ -66,7 +65,7 @@ public Alert(String group, String text, AlertType type) { */ public void set(boolean active) { if (active && !this.active) { - activeStartTime = Timer.getFPGATimestamp(); + activeStartTime = TimeUtil.now(); switch (type) { case ERROR: DriverStation.reportError(text, false); diff --git a/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java index df46efd2..3db0b253 100644 --- a/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java +++ b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java @@ -24,8 +24,10 @@ import edu.wpi.first.math.interpolation.Interpolator; import java.util.Map.Entry; import java.util.Optional; +import java.util.OptionalDouble; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; +import org.littletonrobotics.junction.Logger; /** * A concurrent version of WPIlib's TimeInterpolatableBuffer class to avoid the need for explicit @@ -88,6 +90,9 @@ public static ConcurrentTimeInterpolatableBuffer createDoubleBuffer( * @param sample The sample object. */ public void addSample(double timeSeconds, T sample) { + Logger.recordOutput("Odometry/Debug/lastPoseBufferAddTs", timeSeconds); + Logger.recordOutput("Odometry/Debug/nowTs", TimeUtil.now()); + m_pastSnapshots.put(timeSeconds, sample); cleanUp(timeSeconds); } @@ -126,26 +131,32 @@ public Optional getSample(double timeSeconds) { var bottomBound = m_pastSnapshots.floorEntry(timeSeconds); var topBound = m_pastSnapshots.ceilingEntry(timeSeconds); - // Return null if neither sample exists, and the opposite bound if the other is - // null - if (topBound == null && bottomBound == null) { - return Optional.empty(); - } else if (topBound == null) { + if (topBound == null && bottomBound == null) return Optional.empty(); + if (topBound == null) return Optional.of(bottomBound.getValue()); + if (bottomBound == null) return Optional.of(topBound.getValue()); + + // NEW: if they are the same sample, no interpolation possible/needed + if (topBound.getKey().doubleValue() == bottomBound.getKey().doubleValue()) { return Optional.of(bottomBound.getValue()); - } else if (bottomBound == null) { - return Optional.of(topBound.getValue()); - } else { - // Otherwise, interpolate. Because T is between [0, 1], we want the ratio of - // (the difference - // between the current time and bottom bound) and (the difference between top - // and bottom - // bounds). - return Optional.of( - m_interpolatingFunc.interpolate( - bottomBound.getValue(), - topBound.getValue(), - (timeSeconds - bottomBound.getKey()) / (topBound.getKey() - bottomBound.getKey()))); } + + double t0 = bottomBound.getKey(); + double t1 = topBound.getKey(); + double denom = t1 - t0; + + // (optional but good) + if (Math.abs(denom) < 1e-9) return Optional.of(bottomBound.getValue()); + + double ratio = (timeSeconds - t0) / denom; + ratio = MathUtil.clamp(ratio, 0.0, 1.0); + + Logger.recordOutput("Odometry/Debug/bottomKey", t0); + Logger.recordOutput("Odometry/Debug/topKey", t1); + Logger.recordOutput("Odometry/Debug/denom", denom); + Logger.recordOutput("Odometry/Debug/snaphotSize", m_pastSnapshots.size()); + + return Optional.of( + m_interpolatingFunc.interpolate(bottomBound.getValue(), topBound.getValue(), ratio)); } public Entry getLatest() { @@ -161,4 +172,16 @@ public Entry getLatest() { public ConcurrentNavigableMap getInternalBuffer() { return m_pastSnapshots; } + + /** Return the oldest timestamp in the buffer */ + public OptionalDouble getOldestTimestamp() { + if (m_pastSnapshots.isEmpty()) return OptionalDouble.empty(); + return OptionalDouble.of(m_pastSnapshots.firstKey()); + } + + /** Return the newest timestamp in the buffer */ + public OptionalDouble getNewestTimestamp() { + if (m_pastSnapshots.isEmpty()) return OptionalDouble.empty(); + return OptionalDouble.of(m_pastSnapshots.lastKey()); + } } diff --git a/src/main/java/frc/robot/util/TimeUtil.java b/src/main/java/frc/robot/util/TimeUtil.java new file mode 100644 index 00000000..3b8e7aad --- /dev/null +++ b/src/main/java/frc/robot/util/TimeUtil.java @@ -0,0 +1,32 @@ +// Copyright (c) 2024-2026 Az-FIRST +// http://github.com/AZ-First +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.util; + +import org.littletonrobotics.junction.Logger; + +public final class TimeUtil { + private TimeUtil() {} + + /** Always seconds, regardless of real/sim/replay timebase quirks. */ + public static double now() { + double t = Logger.getTimestamp(); + + // Empirical: in some environments, Logger timestamp is microseconds. + // If it looks like µs, convert to seconds. + if (t > 1.0e6) { + t *= 1.0e-6; + } + return t; + } +} From 03cbadfd15b693ef151204a19fcfb2b88e9372b6 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 16 Feb 2026 15:02:22 -0700 Subject: [PATCH 15/19] =?UTF-8?q?=F0=9F=98=AE=E2=80=8D=F0=9F=92=A8=20Got?= =?UTF-8?q?=20it?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Got the pose buffering cleaned up and working. Next step is to clean the code so that it's easy to read and clean up any extraneous additions. --- src/main/java/frc/robot/Constants.java | 10 + .../frc/robot/subsystems/drive/Drive.java | 229 +++++++++++++++--- .../robot/subsystems/drive/DriveOdometry.java | 106 ++++++-- .../frc/robot/subsystems/vision/Vision.java | 6 + 4 files changed, 301 insertions(+), 50 deletions(-) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 59647ab7..bf2a175c 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -333,6 +333,16 @@ public static final class DrivebaseConstants { // Optional: ignore obviously insane measurements while disabled. public static final double kDisabledVisionMaxJumpM = 2.0; // meters public static final double kDisabledVisionMaxJumpRad = Units.degreesToRadians(20.0); + public static final double kDisabledVisionStale = 0.75; // seconds + + // Coast window config + public static final double kDisabledCoastSeconds = 5.0; + + // "Stationary" detection config (tune) + public static final double kStationaryMaxWheelDeltaM = 0.002; // 2mm per loop + public static final double kStationaryMaxYawRateRadPerSec = 0.05; // ~3 deg/s + public static final int kStationaryLoopsToEndCoast = 10; // ~0.20s @ 20ms + public static final double kDisabledVisionIgnoreAfterDisableSec = 0.25; // 250ms } /************************************************************************* */ diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 30981328..1eb5411d 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -104,6 +104,15 @@ public class Drive extends RBSISubsystem { private volatile long poseResetEpoch = 0; // monotonic counter private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; + // Pose Regimes (ENABLED, DISABLED_COAST, DISABLE_STATIONARY) + private boolean lastEnabled = false; + private double disabledCoastUntilTs = Double.NEGATIVE_INFINITY; + private double disabledCoastStartTs = Double.NEGATIVE_INFINITY; + + private final double[] lastWheelDistM = new double[4]; + private boolean haveLastWheelDist = false; + private int stationaryLoops = 0; + /** Constructor */ public Drive(Imu imu) { this.imu = imu; @@ -385,6 +394,83 @@ public ProfiledPIDController getAngleController() { return angleController; } + // Drive.java + public void updateDisabledCoastState( + boolean enabledNow, + boolean disabledNow, + double now, + double yawRateRadPerSec, + SwerveModulePosition[] odomPositions) { + + // Detect ENABLED -> DISABLED edge + if (lastEnabled && !enabledNow) { + disabledCoastStartTs = now; + disabledCoastUntilTs = now + DrivebaseConstants.kDisabledCoastSeconds; + + stationaryLoops = 0; + haveLastWheelDist = false; // reset delta baseline on transition + } + lastEnabled = enabledNow; + + // If not disabled, no coast. + if (!disabledNow) { + stationaryLoops = 0; + haveLastWheelDist = false; + return; + } + + // If coast already expired, nothing to do. + if (!(now < disabledCoastUntilTs)) { + return; + } + + // Need odometry positions to detect motion + if (odomPositions == null || odomPositions.length < 4) { + return; + } + + // Compute max wheel delta this loop + double maxDelta = 0.0; + if (haveLastWheelDist) { + for (int i = 0; i < 4; i++) { + double dist = odomPositions[i].distanceMeters; + double d = Math.abs(dist - lastWheelDistM[i]); + if (d > maxDelta) maxDelta = d; + } + } + + // Update baseline for next loop + for (int i = 0; i < 4; i++) { + lastWheelDistM[i] = odomPositions[i].distanceMeters; + } + haveLastWheelDist = true; + + // Stationary test (must have baseline) + if (haveLastWheelDist + && maxDelta <= DrivebaseConstants.kStationaryMaxWheelDeltaM + && Math.abs(yawRateRadPerSec) <= DrivebaseConstants.kStationaryMaxYawRateRadPerSec) { + stationaryLoops++; + } else { + stationaryLoops = 0; + } + + // Optional: don’t end coast “instantly” right after disable edge + final double minCoastTime = 0.25; // seconds + final boolean pastMin = (now - disabledCoastStartTs) >= minCoastTime; + + // End coast early if stationary long enough + if (pastMin && stationaryLoops >= DrivebaseConstants.kStationaryLoopsToEndCoast) { + disabledCoastUntilTs = now; // expires immediately + } + + // Debug logs (optional) + Logger.recordOutput("Odometry/Coast/active", isDisabledCoast(now)); + Logger.recordOutput("Odometry/Coast/untilTs", disabledCoastUntilTs); + Logger.recordOutput("Odometry/Coast/stationaryLoops", stationaryLoops); + Logger.recordOutput("Odometry/Coast/maxWheelDeltaM", maxDelta); + Logger.recordOutput("Odometry/Coast/yawRateRadPerSec", yawRateRadPerSec); + } + /************************************************************************* */ /** SysId Characterization Routines ************************************** */ @@ -420,7 +506,7 @@ private SwerveModuleState[] getModuleStates() { /** Returns the module positions (turn angles and drive positions) for all of the modules. */ @AutoLogOutput(key = "SwerveStates/Positions") - private SwerveModulePosition[] getModulePositions() { + SwerveModulePosition[] getModulePositions() { SwerveModulePosition[] states = new SwerveModulePosition[4]; for (int i = 0; i < 4; i++) { states[i] = modules[i].getPosition(); @@ -435,9 +521,9 @@ public ChassisSpeeds getChassisSpeeds() { } /** Returns the current odometry pose. */ - @AutoLogOutput(key = "Odometry/Robot") public Pose2d getPose() { - if (Constants.getMode() == Mode.SIM) { + boolean isReplay = Logger.hasReplaySource(); + if (Constants.getMode() == Mode.SIM && !isReplay) { return simPhysics.getPose(); } return m_PoseEstimator.getEstimatedPosition(); @@ -555,6 +641,19 @@ public static Translation2d[] getModuleTranslations() { }; } + // Drive.java + public boolean isDisabledCoast() { + return isDisabledCoast(TimeUtil.now()); + } + + public boolean isDisabledCoast(double now) { + return DriverStation.isDisabled() && (now < disabledCoastUntilTs); + } + + public double getDisabledCoastStartTs() { + return disabledCoastStartTs; + } + /** Returns the position of each module in radians. */ public double[] getWheelRadiusCharacterizationPositions() { double[] values = new double[4]; @@ -609,51 +708,125 @@ public void zeroHeading() { * @param measurement The pose @ timestamp to add to the pose estimator */ // Called by Vision via consumer.accept(TimedPose) + // Drive.java fields + private boolean disabledVisionInitialized = false; + + private Pose2d lastDisabledVisionPose = new Pose2d(); + private double lastDisabledVisionTs = Double.NaN; + public void addVisionMeasurement(TimedPose meas) { Drive.odometryLock.lock(); try { // Always use measurement timestamp when fusing (enabled path) final double t = meas.timestampSeconds(); + final Pose2d vision = meas.pose(); + // ENABLED: normal fusion if (!DriverStation.isDisabled()) { - // ENABLED: normal soft fusion - m_PoseEstimator.addVisionMeasurement(meas.pose(), t, meas.stdDevs()); + disabledVisionInitialized = false; + lastDisabledVisionTs = Double.NaN; + m_PoseEstimator.addVisionMeasurement(vision, t, meas.stdDevs()); return; } - // DISABLED: blend toward vision, then reset estimator to blended pose + // DISABLED: + final boolean coast = isDisabledCoast(t); // your overload (timebase-consistent) + + // Optional: ignore vision briefly right after ENABLE->DISABLE (prevents “phase mismatch” at + // disable edge) + if (coast) { + final double coastAge = t - getDisabledCoastStartTs(); + Logger.recordOutput("Vision/Dbg/disabledCoastAge", coastAge); + + if (coastAge >= 0.0 && coastAge < DrivebaseConstants.kDisabledVisionIgnoreAfterDisableSec) { + Logger.recordOutput("Vision/Dbg/disabledIgnoreEarlyCoast", true); + return; + } + } + Logger.recordOutput("Vision/Dbg/disabledIgnoreEarlyCoast", false); + + // If we're coasting, we *avoid* init snap and we lean gentler than stationary. + // (still use the same gating so one bad frame doesn't wreck you) + final double alpha = + coast + ? Math.min(DrivebaseConstants.kDisabledVisionBlendAlpha, 0.05) // gentle during coast + : DrivebaseConstants.kDisabledVisionBlendAlpha; + + // "Current" for blending target (estimator pose) final Pose2d current = m_PoseEstimator.getEstimatedPosition(); - final Pose2d vision = meas.pose(); - // Optional sanity gate while disabled (prevents one bad frame from wrecking you) - final double dTrans = current.getTranslation().getDistance(vision.getTranslation()); - final double dRot = Math.abs(current.getRotation().minus(vision.getRotation()).getRadians()); - if (dTrans > DrivebaseConstants.kDisabledVisionMaxJumpM - || dRot > DrivebaseConstants.kDisabledVisionMaxJumpRad) { + // Debug + Logger.recordOutput("Vision/Dbg/disabledCoast", coast); + Logger.recordOutput("Vision/Dbg/disabledVisionInitialized", disabledVisionInitialized); + Logger.recordOutput("Vision/Dbg/disabledVisionTs", t); + Logger.recordOutput( + "Vision/Dbg/disabledVisionAge", + Double.isFinite(lastDisabledVisionTs) ? (t - lastDisabledVisionTs) : Double.NaN); + + // Stale logic (looser = LONGER timeout) + final boolean stale = + Double.isFinite(lastDisabledVisionTs) + && (t - lastDisabledVisionTs) > DrivebaseConstants.kDisabledVisionStale; + Logger.recordOutput("Vision/Dbg/visionStale", stale); + + // If we're in coast, we intentionally *don't* init-snap. + // We also reset initialization so that once coast ends, the first good stationary frame + // snaps. + if (coast) { + disabledVisionInitialized = false; + } + + // If not initialized AND not coasting: snap hard to vision once + if (!disabledVisionInitialized && !coast) { + disabledVisionInitialized = true; + lastDisabledVisionPose = vision; + lastDisabledVisionTs = t; + + m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), vision); + markPoseReset(t); + poseBufferAddSample(t, vision); + + Logger.recordOutput("Vision/DisabledInitSnap", true); + Logger.recordOutput("Vision/DisabledReject", false); + Logger.recordOutput("Vision/DisabledBlendAlphaUsed", alpha); + return; + } + Logger.recordOutput("Vision/DisabledInitSnap", false); + + // Gate vs last accepted disabled vision pose (not estimator) + final Pose2d gateRef = + Double.isFinite(lastDisabledVisionTs) ? lastDisabledVisionPose : vision; + + final double dTrans = gateRef.getTranslation().getDistance(vision.getTranslation()); + final double dRot = Math.abs(gateRef.getRotation().minus(vision.getRotation()).getRadians()); + + Logger.recordOutput("Vision/Dbg/dTransFromLastVision", dTrans); + Logger.recordOutput("Vision/Dbg/dRotFromLastVision", dRot); + + // Reject only if NOT stale + if (!stale + && (dTrans > DrivebaseConstants.kDisabledVisionMaxJumpM + || dRot > DrivebaseConstants.kDisabledVisionMaxJumpRad)) { Logger.recordOutput("Vision/DisabledReject", true); - Logger.recordOutput("Vision/DisabledReject_dTransM", dTrans); - Logger.recordOutput("Vision/DisabledReject_dRotRad", dRot); + Logger.recordOutput("Vision/DisabledBlendAlphaUsed", alpha); return; } Logger.recordOutput("Vision/DisabledReject", false); - // Controlled “soft snap” - final Pose2d blended = - current.interpolate(vision, DrivebaseConstants.kDisabledVisionBlendAlpha); - - // Reset estimator to blended pose (heading/modules are "now", but robot can't move while - // disabled) - m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), blended); + // Accept this vision frame as the new reference + lastDisabledVisionPose = vision; + lastDisabledVisionTs = t; - // Mark reset so Vision gates old samples, etc. - markPoseReset(TimeUtil.now()); + // Blend toward vision + final Pose2d blended = current.interpolate(vision, alpha); - // Keep pose buffer in sync immediately (helps timeAlignPose calls) - poseBufferAddSample(TimeUtil.now(), blended); + m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), blended); + markPoseReset(t); + poseBufferAddSample(t, blended); - Logger.recordOutput( - "Vision/DisabledBlendAlpha", DrivebaseConstants.kDisabledVisionBlendAlpha); Logger.recordOutput("Vision/DisabledBlendedPose", blended); + Logger.recordOutput("Vision/DisabledBlendAlphaUsed", alpha); + } finally { Drive.odometryLock.unlock(); } @@ -681,7 +854,7 @@ private void markPoseReset(double fpgaNow) { */ /** Get the pose estimator current pose */ - Pose2d poseEstimatorGetPose() { + public Pose2d poseEstimatorGetPose() { return m_PoseEstimator.getEstimatedPosition(); } diff --git a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java index 900bd15b..1d50d9c0 100644 --- a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java +++ b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java @@ -17,6 +17,7 @@ package frc.robot.subsystems.drive; +import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.kinematics.SwerveModulePosition; import edu.wpi.first.wpilibj.DriverStation; @@ -34,6 +35,8 @@ public final class DriveOdometry extends VirtualSubsystem { private final Imu imu; private final Module[] modules; + private long writeNumber = 0L; + // Per-cycle cached objects (to avoid repeated allocations) private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; @@ -58,6 +61,8 @@ protected int getPeriodPriority() { /** Periodic function to read inputs */ @Override public void rbsiPeriodic() { + Logger.recordOutput("Odometry/Debug/alive", true); + Drive.odometryLock.lock(); try { final var imuInputs = imu.getInputs(); @@ -68,21 +73,34 @@ public void rbsiPeriodic() { } final boolean isReplayActive = Logger.hasReplaySource(); + Logger.recordOutput("Odometry/Debug/isDisabled", DriverStation.isDisabled()); + Logger.recordOutput("Odometry/Debug/isReplayActive", isReplayActive); + // ---------------------------------------------------------------------- // Pure SIM (not replaying a log): use sim pose/yaw + // ---------------------------------------------------------------------- if (Constants.getMode() == Mode.SIM && !isReplayActive) { final double now = TimeUtil.now(); + + // Keep buffers alive drive.poseBufferAddSample(now, drive.getSimPose()); drive.yawBuffersAddSample(now, drive.getSimYawRad(), drive.getSimYawRateRadPerSec()); - Logger.recordOutput("Drive/Pose", drive.getSimPose()); + + // Coast state uses "now" + current module positions + drive.updateDisabledCoastState( + DriverStation.isEnabled(), + DriverStation.isDisabled(), + now, + drive.getSimYawRateRadPerSec(), + drive.getModulePositions()); + return; } - // DISABLED (REAL or REPLAY): minimal ticking — keep buffers alive, do NOT integrate module - // deltas. - // Exception: if you *want* replay odometry integration while disabled, remove the - // DriverStation - // guard and keep the original replay loop. + // ---------------------------------------------------------------------- + // DISABLED (REAL only): minimal ticking — keep buffers alive, do NOT integrate module deltas. + // (If you want replay integration while disabled, this branch is already !isReplayActive.) + // ---------------------------------------------------------------------- if (DriverStation.isDisabled() && !isReplayActive) { final double now = TimeUtil.now(); @@ -91,15 +109,24 @@ public void rbsiPeriodic() { drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); } - // keep pose buffer alive with the *current estimator pose* (which can be blended by - // disabled vision) + // Coast state from "now" + current module positions + drive.updateDisabledCoastState( + DriverStation.isEnabled(), + DriverStation.isDisabled(), + now, + imuInputs.yawRateRadPerSec, + drive.getModulePositions()); + + // keep pose buffer alive with the *current estimator pose* drive.poseBufferAddSample(now, drive.poseEstimatorGetPose()); Logger.recordOutput("Drive/Pose", drive.poseEstimatorGetPose()); drive.setGyroDisconnectedAlert(!imuInputs.connected); return; } + // ---------------------------------------------------------------------- // Canonical timestamp queue from module[0] + // ---------------------------------------------------------------------- final double[] ts = modules[0].getOdometryTimestamps(); final int n = (ts == null) ? 0 : ts.length; @@ -108,6 +135,14 @@ public void rbsiPeriodic() { if (Constants.getMode() != Mode.REPLAY) { final double now = TimeUtil.now(); drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); + + // Coast state update (no per-sample positions available; use current) + drive.updateDisabledCoastState( + DriverStation.isEnabled(), + DriverStation.isDisabled(), + now, + imuInputs.yawRateRadPerSec, + drive.getModulePositions()); } drive.setGyroDisconnectedAlert(!imuInputs.connected); return; @@ -119,7 +154,9 @@ public void rbsiPeriodic() { modHist[m] = modules[m].getOdometryPositions(); } + // ---------------------------------------------------------------------- // Determine YAW queue availability (everything exists and lines up) + // ---------------------------------------------------------------------- final boolean hasYawQueue = imuInputs.connected && imuInputs.odometryYawTimestamps != null @@ -130,10 +167,7 @@ public void rbsiPeriodic() { final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; - // Determine index alignment (cheap + deterministic) - // We only trust index alignment if BOTH: - // - yaw has at least n samples - // - yawTs[i] ~= ts[i] for i in range (tight epsilon) + // Determine index alignment boolean yawIndexAligned = false; if (hasYawQueue && yawTs.length >= n) { yawIndexAligned = true; @@ -150,12 +184,16 @@ public void rbsiPeriodic() { if (hasYawQueue && !yawIndexAligned) { drive.yawBuffersFillFromQueue(yawTs, yawPos); } else if (!hasYawQueue) { - // Single “now” sample once (not per replay) final double now = TimeUtil.now(); drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); } + // ---------------------------------------------------------------------- // Replay each odometry sample + // ---------------------------------------------------------------------- + final double[] lastDist = new double[4]; + boolean haveLastDist = false; + for (int i = 0; i < n; i++) { final double t = ts[i]; @@ -176,41 +214,51 @@ public void rbsiPeriodic() { if (hasYawQueue) { if (yawIndexAligned) { yawRad = yawPos[i]; - // Keep yaw buffers aligned to replay timeline drive.yawBuffersAddSampleIndexAligned(t, yawTs, yawPos, i); } else { yawRad = drive.yawBufferSampleOr(t, imuInputs.yawPositionRad); } } + // Coast state update IN REPLAY TIMEBASE + // Yaw rate: if you have a buffered rate, use it; otherwise imuInputs.yawRateRadPerSec is + // ok. + drive.updateDisabledCoastState( + DriverStation.isEnabled(), + DriverStation.isDisabled(), + t, + imuInputs.yawRateRadPerSec, + odomPositions); + // Debugging Logger.recordOutput("Odometry/Debug/timestamp", t); Logger.recordOutput("Odometry/Debug/now", TimeUtil.now()); if (i > 0) { Logger.recordOutput("Odometry/Debug/timeNowDiff", t - ts[i - 1]); } - Logger.recordOutput("Odometry/Debug/replay_t", t); Logger.recordOutput("Odometry/Debug/replay_yawRad", yawRad); - double[] lastDist = new double[4]; - boolean firstSample = true; + // Module distance deltas (valid within batch) for (int m = 0; m < 4; m++) { - SwerveModulePosition pos = odomPositions[m]; - double dist = pos.distanceMeters; + final SwerveModulePosition pos = odomPositions[m]; + final double dist = pos.distanceMeters; Logger.recordOutput("Odometry/Debug/mod" + m + "_distanceMeters", dist); Logger.recordOutput("Odometry/Debug/mod" + m + "_angleRad", pos.angle.getRadians()); - if (!firstSample) { - double delta = dist - lastDist[m]; + + if (haveLastDist) { + final double delta = dist - lastDist[m]; Logger.recordOutput("Odometry/Debug/mod" + m + "_deltaMeters", delta); } lastDist[m] = dist; } - firstSample = false; + haveLastDist = true; + // Feed estimator at this historical timestamp drive.poseEstimatorUpdateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + // Maintain pose history in SAME timebase as estimator drive.poseBufferAddSample(t, drive.poseEstimatorGetPose()); } @@ -219,6 +267,20 @@ public void rbsiPeriodic() { drive.setGyroDisconnectedAlert(!imuInputs.connected); } finally { + final Pose2d pose = drive.poseEstimatorGetPose(); + final double x = pose.getX(); + final double y = pose.getY(); + final double th = pose.getRotation().getRadians(); + + Logger.recordOutput("OdometryReplay/Debug/wroteRobotPose", ++writeNumber); + Logger.recordOutput("OdometryReplay/Debug/xFinite", Double.isFinite(x)); + Logger.recordOutput("OdometryReplay/Debug/yFinite", Double.isFinite(y)); + Logger.recordOutput("OdometryReplay/Debug/thFinite", Double.isFinite(th)); + + Logger.recordOutput("OdometryReplay/RobotX", x); + Logger.recordOutput("OdometryReplay/RobotY", y); + Logger.recordOutput("OdometryReplay/RobotThetaRad", th); + Drive.odometryLock.unlock(); } } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 0753b67a..ea4930f1 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -278,8 +278,14 @@ public void rbsiPeriodic() { lastFusedValid = true; lastSmoothedValid = true; + Logger.recordOutput("OdometryReplay/PreInjectRobotX", drive.poseEstimatorGetPose().getX()); + Logger.recordOutput("OdometryReplay/PreInjectRobotY", drive.poseEstimatorGetPose().getY()); + consumer.accept(smoothed); + Logger.recordOutput("OdometryReplay/PostInjectRobotX", drive.poseEstimatorGetPose().getX()); + Logger.recordOutput("OdometryReplay/PostInjectRobotY", drive.poseEstimatorGetPose().getY()); + // If you want, you can feed debug values from inside timeAlignPose(...) via fields, // but leaving the plumbing as-is since you’re already logging inside helpers. From 1b4006d371f5ab1ee84c9cadf0ead0fda664c5b4 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Tue, 17 Feb 2026 09:45:46 -0700 Subject: [PATCH 16/19] Clean up diff versus `main` --- src/main/java/frc/robot/Constants.java | 49 +++--- src/main/java/frc/robot/Robot.java | 13 -- src/main/java/frc/robot/RobotContainer.java | 5 - .../accelerometer/Accelerometer.java | 11 +- .../frc/robot/subsystems/drive/ModuleIO.java | 1 - .../subsystems/drive/ModuleIOTalonFX.java | 11 +- .../frc/robot/subsystems/vision/VisionIO.java | 12 +- .../subsystems/vision/VisionIOLimelight.java | 30 ++-- .../vision/VisionIOPhotonVision.java | 12 +- src/main/java/frc/robot/util/GeomUtil.java | 165 ++++++++++++++++++ .../frc/robot/util/RBSICANBusRegistry.java | 3 +- 11 files changed, 234 insertions(+), 78 deletions(-) create mode 100644 src/main/java/frc/robot/util/GeomUtil.java diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index bf2a175c..b7218f43 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -75,7 +75,7 @@ public final class Constants { private static SwerveType swerveType = SwerveType.PHOENIX6; // PHOENIX6, YAGSL private static CTREPro phoenixPro = CTREPro.UNLICENSED; // LICENSED, UNLICENSED private static AutoType autoType = AutoType.MANUAL; // MANUAL, PATHPLANNER, CHOREO - private static VisionType visionType = VisionType.NONE; // PHOTON, LIMELIGHT, NONE + private static VisionType visionType = VisionType.NONE; // PHOTON, LIMELIGHT, NONE /** Enumerate the robot types (name your robots here) */ public static enum RobotType { @@ -314,15 +314,19 @@ public static final class DrivebaseConstants { public static final double kNominalFFVolts = 2.0; // Volts // Default TalonFX Gains (Replaces what's in Phoenix X's Tuner Constants) - public static final double kDriveP = 4.0; + // NOTE: Default values from 6328's 2025 Public Code + // + // IMPORTANT:: These values are valid only for CTRE LICENSED operation!! + // Adjust these downward until your modules behave correctly + public static final double kDriveP = 40.0; public static final double kDriveD = 0.03; public static final double kDriveV = 0.83; public static final double kDriveA = 0.0; public static final double kDriveS = 2.00; public static final double kDriveT = SwerveConstants.kDriveGearRatio / DCMotor.getKrakenX60Foc(1).KtNMPerAmp; - public static final double kSteerP = 4.0; - public static final double kSteerD = 0.02; + public static final double kSteerP = 400.0; + public static final double kSteerD = 20.0; public static final double kSteerS = 2.0; // Odometry-related constants ================================== @@ -482,7 +486,7 @@ public record CameraConfig( // Example Cameras are mounted in the back corners, 18" up from the floor, facing sideways public static final CameraConfig[] ALL = { new CameraConfig( - "Photon_BW7", + "camera_0", new Transform3d( Inches.of(-13.0), Inches.of(13.0), @@ -498,24 +502,23 @@ public record CameraConfig( setLatencyStdDevMs(5); } }), - // - // new CameraConfig( - // "camera_1", - // new Transform3d( - // Inches.of(-13.0), - // Inches.of(-13.0), - // Inches.of(12.0), - // new Rotation3d(0.0, 0.0, -Math.PI / 2)), - // 1.0, - // new SimCameraProperties() { - // { - // setCalibration(1280, 800, Rotation2d.fromDegrees(120)); - // setCalibError(0.25, 0.08); - // setFPS(30); - // setAvgLatencyMs(20); - // setLatencyStdDevMs(5); - // } - // }), + new CameraConfig( + "camera_1", + new Transform3d( + Inches.of(-13.0), + Inches.of(-13.0), + Inches.of(12.0), + new Rotation3d(0.0, 0.0, -Math.PI / 2)), + 1.0, + new SimCameraProperties() { + { + setCalibration(1280, 800, Rotation2d.fromDegrees(120)); + setCalibError(0.25, 0.08); + setFPS(30); + setAvgLatencyMs(20); + setLatencyStdDevMs(5); + } + }), // ... And more, if needed }; diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index 92196f64..c3db830e 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -19,7 +19,6 @@ import com.revrobotics.util.StatusLogger; import edu.wpi.first.wpilibj.DriverStation; -import edu.wpi.first.wpilibj.DriverStation.Alliance; import edu.wpi.first.wpilibj.Threads; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj2.command.Command; @@ -49,7 +48,6 @@ public class Robot extends LoggedRobot { private Command m_autoCommandPathPlanner; private RobotContainer m_robotContainer; private Timer m_disabledTimer; - private static boolean isBlueAlliance = false; // Define simulation fields here private VisionSystemSim visionSim; @@ -139,8 +137,6 @@ public Robot() { @Override public void robotPeriodic() { - isBlueAlliance = DriverStation.getAlliance().orElse(Alliance.Blue) == Alliance.Blue; - final long t0 = System.nanoTime(); if (isReal()) { @@ -323,13 +319,4 @@ public void simulationPeriodic() { // Update sim each sim tick visionSim.update(m_robotContainer.getDrivebase().getPose()); } - - // Helper method to simplify checking if the robot is blue or red alliance - public static boolean isBlue() { - return isBlueAlliance; - } - - public static boolean isRed() { - return !isBlue(); - } } diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index e80e242d..28eae154 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -82,9 +82,6 @@ /** This is the location for defining robot hardware, commands, and controller button bindings. */ public class RobotContainer { - private static final boolean USE_MAPLESIM = true; - public static final boolean MAPLESIM = USE_MAPLESIM && Robot.isSimulation(); - /** Define the Driver and, optionally, the Operator/Co-Driver Controllers */ // Replace with ``CommandPS4Controller`` or ``CommandJoystick`` if needed final CommandXboxController driverController = new CommandXboxController(0); // Main Driver @@ -139,8 +136,6 @@ public class RobotContainer { // Alerts private final Alert aprilTagLayoutAlert = new Alert("", AlertType.INFO); - public static RobotContainer instance; - /** * Constructor for the Robot Container. This container holds subsystems, opertator interface * devices, and commands. diff --git a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java index 205edd44..2fb0361e 100644 --- a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java +++ b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java @@ -30,9 +30,6 @@ */ public class Accelerometer extends VirtualSubsystem { - // Gravitational acceleration - private static final double G_TO_MPS2 = 9.80665; - // Define hardware interfaces private final RioAccelIO rio; private final RioAccelIO.Inputs rioInputs = new RioAccelIO.Inputs(); @@ -71,8 +68,12 @@ public void rbsiPeriodic() { rio.updateInputs(rioInputs); // Compute RIO accelerations and jerks - rawRio = new Translation3d(rioInputs.xG, rioInputs.yG, rioInputs.zG); - rioAcc = rawRio.rotateBy(RobotConstants.kRioOrientation).times(G_TO_MPS2); + rawRio = + new Translation3d( + rioInputs.xG * Constants.G_TO_MPS2, + rioInputs.yG * Constants.G_TO_MPS2, + rioInputs.zG * Constants.G_TO_MPS2); + rioAcc = rawRio.rotateBy(RobotConstants.kRioOrientation); // Acceleration from previous loop prevRioAcc = rioAcc; diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIO.java b/src/main/java/frc/robot/subsystems/drive/ModuleIO.java index 8e9c1132..10055776 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIO.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIO.java @@ -13,7 +13,6 @@ import org.littletonrobotics.junction.AutoLog; public interface ModuleIO { - @AutoLog public static class ModuleIOInputs { public boolean driveConnected = false; diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java index c7015bfc..4359b385 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java @@ -265,14 +265,14 @@ public ModuleIOTalonFX(int module) { @Override public void updateInputs(ModuleIOInputs inputs) { - // -------------------- Refresh Phoenix signals -------------------- + // Refresh Phoenix signals var driveStatus = BaseStatusSignal.refreshAll(drivePosition, driveVelocity, driveAppliedVolts, driveCurrent); var turnStatus = BaseStatusSignal.refreshAll(turnPosition, turnVelocity, turnAppliedVolts, turnCurrent); var encStatus = BaseStatusSignal.refreshAll(turnAbsolutePosition); - // Log refresh failures explicitly (debug gold) + // Log *which* groups are failing and what the code is if (!driveStatus.isOK()) { Logger.recordOutput("CAN/Module" + module + "/DriveRefreshStatus", driveStatus.toString()); } @@ -283,24 +283,25 @@ public void updateInputs(ModuleIOInputs inputs) { Logger.recordOutput("CAN/Module" + module + "/EncRefreshStatus", encStatus.toString()); } - // -------------------- Connectivity flags -------------------- + // Connectivity flags inputs.driveConnected = driveConnectedDebounce.calculate(driveStatus.isOK()); inputs.turnConnected = turnConnectedDebounce.calculate(turnStatus.isOK()); inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(encStatus.isOK()); - // -------------------- Instantaneous state -------------------- + // Update drive inputs inputs.drivePositionRad = Units.rotationsToRadians(drivePosition.getValueAsDouble()); inputs.driveVelocityRadPerSec = Units.rotationsToRadians(driveVelocity.getValueAsDouble()); inputs.driveAppliedVolts = driveAppliedVolts.getValueAsDouble(); inputs.driveCurrentAmps = driveCurrent.getValueAsDouble(); + // Update turn inputs inputs.turnAbsolutePosition = Rotation2d.fromRotations(turnAbsolutePosition.getValueAsDouble()); inputs.turnPosition = Rotation2d.fromRotations(turnPosition.getValueAsDouble()); inputs.turnVelocityRadPerSec = Units.rotationsToRadians(turnVelocity.getValueAsDouble()); inputs.turnAppliedVolts = turnAppliedVolts.getValueAsDouble(); inputs.turnCurrentAmps = turnCurrent.getValueAsDouble(); - // -------------------- Odometry queue drain -------------------- + // Odometry queue drain final int tsCount = timestampQueue.size(); final int driveCount = drivePositionQueue.size(); final int turnCount = turnPositionQueue.size(); diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java index dc249ec9..4f11bf9f 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -18,14 +18,14 @@ public interface VisionIO { class VisionIOInputs { public boolean connected = false; - /** Latest "camera to target" observation (optional) */ + // Latest "camera to target" observation (optional) public TargetObservation latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); - /** Pose observations generated by the camera pipeline (each has its own timestamp) */ + // Pose observations generated by the camera pipeline (each has its own timestamp) public PoseObservation[] poseObservations = new PoseObservation[0]; - /** Union of tag IDs seen this loop (for logging/UI only) */ + // Union of tag IDs seen this loop public int[] tagIds = new int[0]; } @@ -34,9 +34,9 @@ record TargetObservation(Rotation2d tx, Rotation2d ty) {} /** Represents a robot pose sample used for pose estimation. */ record PoseObservation( - double timestamp, // camera capture time (seconds, same timebase as FPGA) - Pose3d pose, // field->robot pose - double ambiguity, // single-tag ambiguity if available + double timestamp, + Pose3d pose, + double ambiguity, int tagCount, double averageTagDistance, PoseObservationType type, diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java index a4c5d13b..c9bbc0d3 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java @@ -56,26 +56,25 @@ public VisionIOLimelight(String name, Supplier rotationSupplier) { @Override public void updateInputs(VisionIOInputs inputs) { - // Update connection status + // Update connection status based on whether an update has been seen in the last 250ms inputs.connected = ((RobotController.getFPGATime() - latencySubscriber.getLastChange()) / 1000) < 250; - // Target observation (simple tx/ty) + // Update target observation inputs.latestTargetObservation = new TargetObservation( Rotation2d.fromDegrees(txSubscriber.get()), Rotation2d.fromDegrees(tySubscriber.get())); - // Push robot orientation for MegaTag 2 + // Update orientation for MegaTag 2 orientationPublisher.accept( new double[] {rotationSupplier.get().getDegrees(), 0.0, 0.0, 0.0, 0.0, 0.0}); - NetworkTableInstance.getDefault().flush(); + NetworkTableInstance.getDefault() + .flush(); // Increases network traffic but recommended by Limelight - // Union of tag IDs seen this loop (for logging/UI only) + // Read new pose observations from NetworkTables Set unionTagIds = new HashSet<>(); - List poseObservations = new LinkedList<>(); - /* -------------------- MegaTag 1 -------------------- */ for (var rawSample : megatag1Subscriber.readQueue()) { if (rawSample.value.length == 0) continue; @@ -93,29 +92,28 @@ public void updateInputs(VisionIOInputs inputs) { poseObservations.add( new PoseObservation( - // Timestamp (LL server time minus latency) + // Timestamp, based on server timestamp of publish and latency rawSample.timestamp * 1.0e-6 - rawSample.value[6] * 1.0e-3, - // Pose + // 3D pose estimate parsePose(rawSample.value), - // Ambiguity (first tag only) + // Ambiguity, using only the first tag because ambiguity isn't applicable for multitag rawSample.value.length >= 18 ? rawSample.value[17] : 0.0, // Tag count tagCount, - // Avg distance + // Average tag distance rawSample.value[9], - // Type + // Observation type PoseObservationType.MEGATAG_1, // Used tag IDs used)); } - /* -------------------- MegaTag 2 -------------------- */ for (var rawSample : megatag2Subscriber.readQueue()) { if (rawSample.value.length == 0) continue; @@ -152,8 +150,7 @@ public void updateInputs(VisionIOInputs inputs) { used)); } - /* -------------------- Save to inputs -------------------- */ - + // Save pose observations to inputs object inputs.poseObservations = poseObservations.toArray(new PoseObservation[0]); inputs.tagIds = new int[unionTagIds.size()]; @@ -162,7 +159,8 @@ public void updateInputs(VisionIOInputs inputs) { inputs.tagIds[i++] = id; } - Arrays.sort(inputs.tagIds); // optional but recommended + // Sort list by TagID for clarity + Arrays.sort(inputs.tagIds); } /** Parses the 3D pose from a Limelight botpose array. */ diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java index b328385a..4dac1e05 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java @@ -20,7 +20,7 @@ import java.util.Set; import org.photonvision.PhotonCamera; -/** IO implementation for real PhotonVision hardware (pose already solved by PV). */ +/** IO implementation for real PhotonVision hardware. */ public class VisionIOPhotonVision implements VisionIO { protected final PhotonCamera camera; protected final Transform3d robotToCamera; @@ -63,9 +63,11 @@ public void updateInputs(VisionIOInputs inputs) { bestPitch = Rotation2d.fromDegrees(result.getBestTarget().getPitch()); } - if (result.multitagResult.isPresent()) { + // Add pose observation + if (result.multitagResult.isPresent()) { // Multitag result var multitag = result.multitagResult.get(); + // Calculate robot pose Transform3d fieldToCamera = multitag.estimatedPose.best; Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); @@ -86,6 +88,7 @@ public void updateInputs(VisionIOInputs inputs) { unionTagIds.add(id); // keep your union set for tagIds UI/log } + // Add observation poseObservations.add( new PoseObservation( ts, @@ -96,9 +99,10 @@ public void updateInputs(VisionIOInputs inputs) { PoseObservationType.PHOTONVISION, used)); - } else if (!result.targets.isEmpty()) { + } else if (!result.targets.isEmpty()) { // Single tag result var target = result.targets.get(0); + // Calculate robot pose var tagPose = aprilTagLayout.getTagPose(target.fiducialId); if (tagPose.isEmpty()) continue; @@ -124,6 +128,7 @@ public void updateInputs(VisionIOInputs inputs) { } } + // Save pose observations to inputs object inputs.latestTargetObservation = (newestTargetTs > Double.NEGATIVE_INFINITY) ? new TargetObservation(bestYaw, bestPitch) @@ -131,6 +136,7 @@ public void updateInputs(VisionIOInputs inputs) { inputs.poseObservations = poseObservations.toArray(new PoseObservation[0]); + // Save tag IDs to inputs objects inputs.tagIds = new int[unionTagIds.size()]; int i = 0; for (int id : unionTagIds) inputs.tagIds[i++] = id; diff --git a/src/main/java/frc/robot/util/GeomUtil.java b/src/main/java/frc/robot/util/GeomUtil.java new file mode 100644 index 00000000..f77c8b2f --- /dev/null +++ b/src/main/java/frc/robot/util/GeomUtil.java @@ -0,0 +1,165 @@ +// Copyright (c) 2024-2026 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2021-2026 Littleton Robotics +// http://github.com/Mechanical-Advantage +// +// Use of this source code is governed by a BSD +// license that can be found in the AdvantageKit-License.md file +// at the root directory of this project. + +package frc.robot.util; + +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.geometry.Translation2d; +import edu.wpi.first.math.geometry.Twist2d; +import edu.wpi.first.math.kinematics.ChassisSpeeds; + +/** Geometry utilities for working with translations, rotations, transforms, and poses. */ +public class GeomUtil { + /** + * Creates a pure translating transform + * + * @param translation The translation to create the transform with + * @return The resulting transform + */ + public static Transform2d toTransform2d(Translation2d translation) { + return new Transform2d(translation, Rotation2d.kZero); + } + + /** + * Creates a pure translating transform + * + * @param x The x coordinate of the translation + * @param y The y coordinate of the translation + * @return The resulting transform + */ + public static Transform2d toTransform2d(double x, double y) { + return new Transform2d(x, y, Rotation2d.kZero); + } + + /** + * Creates a pure rotating transform + * + * @param rotation The rotation to create the transform with + * @return The resulting transform + */ + public static Transform2d toTransform2d(Rotation2d rotation) { + return new Transform2d(Translation2d.kZero, rotation); + } + + /** + * Converts a Pose2d to a Transform2d to be used in a kinematic chain + * + * @param pose The pose that will represent the transform + * @return The resulting transform + */ + public static Transform2d toTransform2d(Pose2d pose) { + return new Transform2d(pose.getTranslation(), pose.getRotation()); + } + + public static Pose2d inverse(Pose2d pose) { + Rotation2d rotationInverse = pose.getRotation().unaryMinus(); + return new Pose2d( + pose.getTranslation().unaryMinus().rotateBy(rotationInverse), rotationInverse); + } + + /** + * Converts a Transform2d to a Pose2d to be used as a position or as the start of a kinematic + * chain + * + * @param transform The transform that will represent the pose + * @return The resulting pose + */ + public static Pose2d toPose2d(Transform2d transform) { + return new Pose2d(transform.getTranslation(), transform.getRotation()); + } + + /** + * Creates a pure translated pose + * + * @param translation The translation to create the pose with + * @return The resulting pose + */ + public static Pose2d toPose2d(Translation2d translation) { + return new Pose2d(translation, Rotation2d.kZero); + } + + /** + * Creates a pure rotated pose + * + * @param rotation The rotation to create the pose with + * @return The resulting pose + */ + public static Pose2d toPose2d(Rotation2d rotation) { + return new Pose2d(Translation2d.kZero, rotation); + } + + /** + * Multiplies a twist by a scaling factor + * + * @param twist The twist to multiply + * @param factor The scaling factor for the twist components + * @return The new twist + */ + public static Twist2d multiply(Twist2d twist, double factor) { + return new Twist2d(twist.dx * factor, twist.dy * factor, twist.dtheta * factor); + } + + /** + * Converts a Pose3d to a Transform3d to be used in a kinematic chain + * + * @param pose The pose that will represent the transform + * @return The resulting transform + */ + public static Transform3d toTransform3d(Pose3d pose) { + return new Transform3d(pose.getTranslation(), pose.getRotation()); + } + + /** + * Converts a Transform3d to a Pose3d to be used as a position or as the start of a kinematic + * chain + * + * @param transform The transform that will represent the pose + * @return The resulting pose + */ + public static Pose3d toPose3d(Transform3d transform) { + return new Pose3d(transform.getTranslation(), transform.getRotation()); + } + + /** + * Converts a ChassisSpeeds to a Twist2d by extracting two dimensions (Y and Z). chain + * + * @param speeds The original translation + * @return The resulting translation + */ + public static Twist2d toTwist2d(ChassisSpeeds speeds) { + return new Twist2d( + speeds.vxMetersPerSecond, speeds.vyMetersPerSecond, speeds.omegaRadiansPerSecond); + } + + /** + * Creates a new pose from an existing one using a different translation value. + * + * @param pose The original pose + * @param translation The new translation to use + * @return The new pose with the new translation and original rotation + */ + public static Pose2d withTranslation(Pose2d pose, Translation2d translation) { + return new Pose2d(translation, pose.getRotation()); + } + + /** + * Creates a new pose from an existing one using a different rotation value. + * + * @param pose The original pose + * @param rotation The new rotation to use + * @return The new pose with the original translation and new rotation + */ + public static Pose2d withRotation(Pose2d pose, Rotation2d rotation) { + return new Pose2d(pose.getTranslation(), rotation); + } +} diff --git a/src/main/java/frc/robot/util/RBSICANBusRegistry.java b/src/main/java/frc/robot/util/RBSICANBusRegistry.java index b0707fc6..ea3ef08e 100644 --- a/src/main/java/frc/robot/util/RBSICANBusRegistry.java +++ b/src/main/java/frc/robot/util/RBSICANBusRegistry.java @@ -15,6 +15,7 @@ import com.ctre.phoenix6.CANBus; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** Centralized CAN bus singleton registry + SIM/REAL indirection. */ @@ -64,7 +65,7 @@ private static void checkInit() { if (!initialized) throw new IllegalStateException("RBSICANBusRegistry not initialized."); } - private static void throwUnknown(String name, java.util.Set known) { + private static void throwUnknown(String name, Set known) { throw new IllegalArgumentException("Unknown CAN bus '" + name + "'. Known: " + known); } From 8b3dafad170292e532ab92b443e045de1b0c0024 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Tue, 17 Feb 2026 14:25:48 -0700 Subject: [PATCH 17/19] Cleaning and making things line up. --- src/main/java/frc/robot/Constants.java | 8 + src/main/java/frc/robot/Robot.java | 29 +- src/main/java/frc/robot/RobotContainer.java | 26 +- .../frc/robot/subsystems/drive/Drive.java | 95 +- .../robot/subsystems/drive/DriveOdometry.java | 19 +- .../subsystems/drive/ModuleIOBlended.java | 74 +- .../robot/subsystems/drive/ModuleIOSpark.java | 58 +- .../drive/ModuleIOSparkCANcoder.java | 71 +- .../subsystems/drive/ModuleIOTalonFX.java | 8 +- .../subsystems/drive/ModuleIOTalonFXS.java | 939 ++++++++++-------- .../frc/robot/subsystems/vision/Vision.java | 81 +- .../ConcurrentTimeInterpolatableBuffer.java | 15 +- .../frc/robot/util/RBSICANBusRegistry.java | 39 +- .../java/frc/robot/util/RBSICANHealth.java | 10 + src/main/java/frc/robot/util/TimeUtil.java | 1 + 15 files changed, 806 insertions(+), 667 deletions(-) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index b7218f43..1b556422 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -47,6 +47,7 @@ import frc.robot.util.RBSIEnum.VisionType; import frc.robot.util.RobotDeviceId; import java.util.Set; +import org.littletonrobotics.junction.Logger; import org.photonvision.simulation.SimCameraProperties; import swervelib.math.Matter; @@ -502,6 +503,7 @@ public record CameraConfig( setLatencyStdDevMs(5); } }), + // new CameraConfig( "camera_1", new Transform3d( @@ -554,6 +556,12 @@ public static Mode getMode() { }; } + /** Return whether this is pure simulation */ + public static boolean isPureSim() { + boolean isReplay = Logger.hasReplaySource(); + return getMode() == Mode.SIM && !isReplay; + } + /** Get the current swerve drive type */ public static SwerveType getSwerveType() { return swerveType; diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index c3db830e..5a1e15a3 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -119,24 +119,8 @@ public Robot() { } // /** This function is called periodically during all modes. */ - // @Override - // public void robotPeriodic() { - - // // Run all virtual subsystems each time through the loop - // VirtualSubsystem.periodicAll(); - - // // Runs the Scheduler. This is responsible for polling buttons, adding - // // newly-scheduled commands, running already-scheduled commands, removing - // // finished or interrupted commands, and running subsystem periodic() methods. - // // This must be called from the robot's periodic block in order for anything in - // // the Command-based framework to work. - // CommandScheduler.getInstance().run(); - // } - - /** TESTING VERSION OF ROBOTPERIODIC FOR OVERRUN SOURCES */ @Override public void robotPeriodic() { - final long t0 = System.nanoTime(); if (isReal()) { @@ -145,20 +129,27 @@ public void robotPeriodic() { } final long t1 = System.nanoTime(); + // Run all virtual subsystems each time through the loop VirtualSubsystem.periodicAll(); final long t2 = System.nanoTime(); + // Runs the Scheduler. This is responsible for polling buttons, adding + // newly-scheduled commands, running already-scheduled commands, removing + // finished or interrupted commands, and running subsystem periodic() methods. + // This must be called from the robot's periodic block in order for anything in + // the Command-based framework to work. CommandScheduler.getInstance().run(); final long t3 = System.nanoTime(); - Threads.setCurrentThreadPriority(false, 10); + if (isReal()) { + // Return thread to normal priority + Threads.setCurrentThreadPriority(false, 10); + } final long t4 = System.nanoTime(); Logger.recordOutput("Loop/RobotPeriodic_ms", (t4 - t0) / 1e6); - Logger.recordOutput("Loop/ThreadBoost_ms", (t1 - t0) / 1e6); Logger.recordOutput("Loop/Virtual_ms", (t2 - t1) / 1e6); Logger.recordOutput("Loop/Scheduler_ms", (t3 - t2) / 1e6); - Logger.recordOutput("Loop/ThreadRestore_ms", (t4 - t3) / 1e6); } /** This function is called once when the robot is disabled. */ diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 28eae154..dfd7d65e 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -155,8 +155,7 @@ public RobotContainer() { m_imu = new Imu(SwerveConstants.kImu.factory.get()); m_drivebase = new Drive(m_imu); - m_driveOdometry = - new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); // see note + m_driveOdometry = new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); m_vision = new Vision( m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); @@ -170,30 +169,22 @@ public RobotContainer() { m_imu = new Imu(new ImuIOSim()); m_drivebase = new Drive(m_imu); - m_driveOdometry = - new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); // see note - - m_flywheel = new Flywheel(new FlywheelIOSim()); - - // ---------------- Vision IOs (robot code) ---------------- - var cams = Cameras.ALL; + m_driveOdometry = new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); m_vision = new Vision( m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsSim(m_drivebase)); + m_flywheel = new Flywheel(new FlywheelIOSim()); m_accel = new Accelerometer(m_imu); - // ---------------- CameraSweepEvaluator (sim-only analysis) ---------------- + // CameraSweepEvaluator (sim-only analysis) VisionSystemSim visionSim = new VisionSystemSim("CameraSweepWorld"); visionSim.addAprilTags(FieldConstants.aprilTagLayout); - + var cams = Cameras.ALL; PhotonCameraSim[] simCams = new PhotonCameraSim[cams.length]; - for (int i = 0; i < cams.length; i++) { var cfg = cams[i]; - PhotonCamera photonCam = new PhotonCamera(cfg.name()); PhotonCameraSim camSim = new PhotonCameraSim(photonCam, cfg.simProps()); - visionSim.addCamera(camSim, cfg.robotToCamera()); simCams[i] = camSim; } @@ -212,21 +203,18 @@ public RobotContainer() { RBSICANBusRegistry.initSim(CANBuses.RIO, CANBuses.DRIVE); m_imu = new Imu(new ImuIOSim() {}); m_drivebase = new Drive(m_imu); - m_driveOdometry = - new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); // see note - - m_flywheel = new Flywheel(new FlywheelIO() {}); + m_driveOdometry = new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); m_vision = new Vision( m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReplay(m_drivebase)); + m_flywheel = new Flywheel(new FlywheelIO() {}); m_accel = new Accelerometer(m_imu); sweep = null; break; } // Init all CAN busses specified in the `Constants.CANBuses` class - RBSICANBusRegistry.initReal(Constants.CANBuses.ALL); canHealth = Arrays.stream(Constants.CANBuses.ALL).map(RBSICANHealth::new).toList(); // In addition to the initial battery capacity from the Dashbaord, ``RBSIPowerMonitor`` takes diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 1eb5411d..604d876c 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -1,11 +1,19 @@ // Copyright (c) 2024-2026 Az-FIRST // http://github.com/AZ-First -// Copyright (c) 2021-2026 Littleton Robotics -// http://github.com/Mechanical-Advantage // -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. package frc.robot.subsystems.drive; @@ -86,13 +94,8 @@ public class Drive extends RBSISubsystem { // Declare odometry and pose-related variables static final Lock odometryLock = new ReentrantLock(); private SwerveDriveKinematics kinematics = new SwerveDriveKinematics(getModuleTranslations()); - private SwerveModulePosition[] lastModulePositions = // For delta tracking - new SwerveModulePosition[] { - new SwerveModulePosition(), - new SwerveModulePosition(), - new SwerveModulePosition(), - new SwerveModulePosition() - }; + private SwerveModulePosition[] lastModulePositions = + new SwerveModulePosition[4]; // For delta tracking private SwerveDrivePoseEstimator m_PoseEstimator = new SwerveDrivePoseEstimator(kinematics, Rotation2d.kZero, lastModulePositions, Pose2d.kZero); @@ -108,11 +111,15 @@ public class Drive extends RBSISubsystem { private boolean lastEnabled = false; private double disabledCoastUntilTs = Double.NEGATIVE_INFINITY; private double disabledCoastStartTs = Double.NEGATIVE_INFINITY; - private final double[] lastWheelDistM = new double[4]; private boolean haveLastWheelDist = false; private int stationaryLoops = 0; + // Related to vision injection of pose + private boolean disabledVisionInitialized = false; + private Pose2d lastDisabledVisionPose = new Pose2d(); + private double lastDisabledVisionTs = Double.NaN; + /** Constructor */ public Drive(Imu imu) { this.imu = imu; @@ -389,12 +396,20 @@ public void resetHeadingController() { angleController.reset(getHeading().getRadians()); } - /** Getter function for the angle controller */ - public ProfiledPIDController getAngleController() { - return angleController; - } - - // Drive.java + /** + * Update the Disabled Coast State + * + *

The purpose of this function is to determine the coasting state of the robot on the ENABLE + * -> DISABLE edge. While the robot coasts to a stop, the wheel odometry will continue to + * integrate with usual vision input. Once the robot stops moving (within tolerance), the vision + * injection to the Pose will take over. + * + * @param enabledNow Are we enabled now? + * @param disabledNow Are we disabled now? + * @param now When is now? + * @param yawRateRadPerSec Current drivebase rotation rate + * @param odomPositions List of module odometry positions + */ public void updateDisabledCoastState( boolean enabledNow, boolean disabledNow, @@ -402,6 +417,10 @@ public void updateDisabledCoastState( double yawRateRadPerSec, SwerveModulePosition[] odomPositions) { + // Don’t end coast “instantly” right after disable edge + final double minCoastTime = 0.25; // seconds -- maybe put into Constants??? + final boolean pastMin = (now - disabledCoastStartTs) >= minCoastTime; + // Detect ENABLED -> DISABLED edge if (lastEnabled && !enabledNow) { disabledCoastStartTs = now; @@ -454,10 +473,6 @@ public void updateDisabledCoastState( stationaryLoops = 0; } - // Optional: don’t end coast “instantly” right after disable edge - final double minCoastTime = 0.25; // seconds - final boolean pastMin = (now - disabledCoastStartTs) >= minCoastTime; - // End coast early if stationary long enough if (pastMin && stationaryLoops >= DrivebaseConstants.kStationaryLoopsToEndCoast) { disabledCoastUntilTs = now; // expires immediately @@ -494,6 +509,11 @@ public Module[] getModules() { return modules; } + /** Return the prodiledPID angle controller */ + public ProfiledPIDController getAngleController() { + return angleController; + } + /** Returns the module states (turn angles and drive velocities) for all of the modules. */ @AutoLogOutput(key = "SwerveStates/Measured") private SwerveModuleState[] getModuleStates() { @@ -520,10 +540,14 @@ public ChassisSpeeds getChassisSpeeds() { return kinematics.toChassisSpeeds(getModuleStates()); } - /** Returns the current odometry pose. */ + /** + * Returns the current odometry pose. + * + *

If the code is running as pure simulation (i.e., not REPLAY of a log), return the simulated + * physics pose. Otherwise, return the pose from the pose estimator. + */ public Pose2d getPose() { - boolean isReplay = Logger.hasReplaySource(); - if (Constants.getMode() == Mode.SIM && !isReplay) { + if (Constants.isPureSim()) { return simPhysics.getPose(); } return m_PoseEstimator.getEstimatedPosition(); @@ -532,7 +556,7 @@ public Pose2d getPose() { /** Returns the current odometry YAW. */ @AutoLogOutput(key = "Odometry/Yaw") public Rotation2d getHeading() { - if (Constants.getMode() == Mode.SIM) { + if (Constants.isPureSim()) { return simPhysics.getYaw(); } return imu.getYaw(); @@ -567,10 +591,12 @@ public Optional getPoseAtTime(double timestampSeconds) { return poseBuffer.getSample(timestampSeconds); } + /** Returns the oldest timetamp in the current pose buffer */ public double getPoseBufferOldestTime() { return poseBuffer.getOldestTimestamp().getAsDouble(); } + /** Returns the newest timetamp in the current pose buffer */ public double getPoseBufferNewestTime() { return poseBuffer.getNewestTimestamp().getAsDouble(); } @@ -641,15 +667,17 @@ public static Translation2d[] getModuleTranslations() { }; } - // Drive.java + /** Returns whether the robot is currently in the DISABLED_COAST state */ public boolean isDisabledCoast() { return isDisabledCoast(TimeUtil.now()); } + /** Returns whether the robot was in the DISABLED_COAST state at time `now` */ public boolean isDisabledCoast(double now) { return DriverStation.isDisabled() && (now < disabledCoastUntilTs); } + /** Returns the disabledCoastStartTs variable */ public double getDisabledCoastStartTs() { return disabledCoastStartTs; } @@ -708,12 +736,6 @@ public void zeroHeading() { * @param measurement The pose @ timestamp to add to the pose estimator */ // Called by Vision via consumer.accept(TimedPose) - // Drive.java fields - private boolean disabledVisionInitialized = false; - - private Pose2d lastDisabledVisionPose = new Pose2d(); - private double lastDisabledVisionTs = Double.NaN; - public void addVisionMeasurement(TimedPose meas) { Drive.odometryLock.lock(); try { @@ -853,11 +875,6 @@ private void markPoseReset(double fpgaNow) { * pass-throughs to allow this functionality. */ - /** Get the pose estimator current pose */ - public Pose2d poseEstimatorGetPose() { - return m_PoseEstimator.getEstimatedPosition(); - } - /** Update the pose estimator at a timestamp */ void poseEstimatorUpdateWithTime(double t, Rotation2d yaw, SwerveModulePosition[] positions) { m_PoseEstimator.updateWithTime(t, yaw, positions); diff --git a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java index 1d50d9c0..aff93c27 100644 --- a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java +++ b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java @@ -40,6 +40,9 @@ public final class DriveOdometry extends VirtualSubsystem { // Per-cycle cached objects (to avoid repeated allocations) private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; + // Checking whether this is a REPLAY + private boolean isReplayActive = Logger.hasReplaySource(); + /** Constructor */ public DriveOdometry(Drive drive, Imu imu, Module[] modules) { this.drive = drive; @@ -72,14 +75,10 @@ public void rbsiPeriodic() { module.periodic(); } - final boolean isReplayActive = Logger.hasReplaySource(); - Logger.recordOutput("Odometry/Debug/isDisabled", DriverStation.isDisabled()); - Logger.recordOutput("Odometry/Debug/isReplayActive", isReplayActive); - // ---------------------------------------------------------------------- // Pure SIM (not replaying a log): use sim pose/yaw // ---------------------------------------------------------------------- - if (Constants.getMode() == Mode.SIM && !isReplayActive) { + if (Constants.isPureSim()) { final double now = TimeUtil.now(); // Keep buffers alive @@ -118,8 +117,8 @@ public void rbsiPeriodic() { drive.getModulePositions()); // keep pose buffer alive with the *current estimator pose* - drive.poseBufferAddSample(now, drive.poseEstimatorGetPose()); - Logger.recordOutput("Drive/Pose", drive.poseEstimatorGetPose()); + drive.poseBufferAddSample(now, drive.getPose()); + Logger.recordOutput("Drive/Pose", drive.getPose()); drive.setGyroDisconnectedAlert(!imuInputs.connected); return; } @@ -260,14 +259,14 @@ public void rbsiPeriodic() { drive.poseEstimatorUpdateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); // Maintain pose history in SAME timebase as estimator - drive.poseBufferAddSample(t, drive.poseEstimatorGetPose()); + drive.poseBufferAddSample(t, drive.getPose()); } - Logger.recordOutput("Drive/Pose", drive.poseEstimatorGetPose()); + Logger.recordOutput("Drive/Pose", drive.getPose()); drive.setGyroDisconnectedAlert(!imuInputs.connected); } finally { - final Pose2d pose = drive.poseEstimatorGetPose(); + final Pose2d pose = drive.getPose(); final double x = pose.getX(); final double y = pose.getY(); final double th = pose.getRotation().getRadians(); diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java index 01d0ffc2..9ae8105b 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java @@ -56,6 +56,7 @@ import frc.robot.util.PhoenixUtil; import frc.robot.util.RBSICANBusRegistry; import frc.robot.util.SparkUtil; +import java.util.Arrays; import java.util.Queue; import org.littletonrobotics.junction.Logger; @@ -73,6 +74,9 @@ public class ModuleIOBlended implements ModuleIO { TalonFXConfiguration, TalonFXConfiguration, CANcoderConfiguration> constants; + // This module number (for logging) + private final int module; + // Hardware Objects private final TalonFX driveTalon; private final SparkBase turnSpark; @@ -134,6 +138,8 @@ public class ModuleIOBlended implements ModuleIO { * added in appropriately. */ public ModuleIOBlended(int module) { + // Record the module number for logging purposes + this.module = module; constants = switch (module) { @@ -312,21 +318,28 @@ public ModuleIOBlended(int module) { @Override public void updateInputs(ModuleIOInputs inputs) { - // Refresh all signals + // Refresh signals var driveStatus = BaseStatusSignal.refreshAll(drivePosition, driveVelocity, driveAppliedVolts, driveCurrent); - var turnEncoderStatus = BaseStatusSignal.refreshAll(turnAbsolutePosition); + var encStatus = BaseStatusSignal.refreshAll(turnAbsolutePosition); + + if (!driveStatus.isOK()) { + Logger.recordOutput("CAN/Module" + module + "/DriveRefreshStatus", driveStatus.toString()); + } + if (!encStatus.isOK()) { + Logger.recordOutput("CAN/Module" + module + "/EncRefreshStatus", encStatus.toString()); + } - // Update drive inputs + // Drive inputs inputs.driveConnected = driveConnectedDebounce.calculate(driveStatus.isOK()); inputs.drivePositionRad = Units.rotationsToRadians(drivePosition.getValueAsDouble()); inputs.driveVelocityRadPerSec = Units.rotationsToRadians(driveVelocity.getValueAsDouble()); inputs.driveAppliedVolts = driveAppliedVolts.getValueAsDouble(); inputs.driveCurrentAmps = driveCurrent.getValueAsDouble(); - // Update turn inputs + // Turn inputs (Spark for turn motor, CANcoder for absolute) SparkUtil.sparkStickyFault = false; - inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(turnEncoderStatus.isOK()); + inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(encStatus.isOK()); inputs.turnAbsolutePosition = Rotation2d.fromRotations(turnAbsolutePosition.getValueAsDouble()); inputs.turnPosition = Rotation2d.fromRotations(turnPosition.getValueAsDouble()); inputs.turnVelocityRadPerSec = Units.rotationsToRadians(turnVelocity.getValueAsDouble()); @@ -334,20 +347,43 @@ public void updateInputs(ModuleIOInputs inputs) { turnSpark, turnSpark::getOutputCurrent, (value) -> inputs.turnCurrentAmps = value); inputs.turnConnected = turnConnectedDebounce.calculate(!SparkUtil.sparkStickyFault); - // Update odometry inputs - inputs.odometryTimestamps = - timestampQueue.stream().mapToDouble((Double value) -> value).toArray(); - inputs.odometryDrivePositionsRad = - drivePositionQueue.stream() - .mapToDouble((Double value) -> Units.rotationsToRadians(value)) - .toArray(); - inputs.odometryTurnPositions = - turnPositionQueue.stream() - .map((Double value) -> Rotation2d.fromRotations(value)) - .toArray(Rotation2d[]::new); - timestampQueue.clear(); - drivePositionQueue.clear(); - turnPositionQueue.clear(); + // Odometry queue drain (common prefix only) + final int tsCount = timestampQueue.size(); + final int driveCount = drivePositionQueue.size(); + final int turnCount = turnPositionQueue.size(); + final int sampleCount = Math.min(tsCount, Math.min(driveCount, turnCount)); + + if (sampleCount <= 0) { + inputs.odometryTimestamps = new double[0]; + inputs.odometryDrivePositionsRad = new double[0]; + inputs.odometryTurnPositions = new Rotation2d[0]; + return; + } + + final double[] outTs = new double[sampleCount]; + final double[] outDriveRad = new double[sampleCount]; + final Rotation2d[] outTurn = new Rotation2d[sampleCount]; + + for (int i = 0; i < sampleCount; i++) { + final Double t = timestampQueue.poll(); + final Double driveRot = drivePositionQueue.poll(); + final Double turnRot = turnPositionQueue.poll(); + + if (t == null || driveRot == null || turnRot == null) { + inputs.odometryTimestamps = Arrays.copyOf(outTs, i); + inputs.odometryDrivePositionsRad = Arrays.copyOf(outDriveRad, i); + inputs.odometryTurnPositions = Arrays.copyOf(outTurn, i); + return; + } + + outTs[i] = t.doubleValue(); + outDriveRad[i] = Units.rotationsToRadians(driveRot.doubleValue()); + outTurn[i] = Rotation2d.fromRotations(turnRot.doubleValue()); + } + + inputs.odometryTimestamps = outTs; + inputs.odometryDrivePositionsRad = outDriveRad; + inputs.odometryTurnPositions = outTurn; } /** diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java index 1c92c68d..c79d5ee9 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java @@ -32,6 +32,7 @@ import frc.robot.Constants; import frc.robot.Constants.DrivebaseConstants; import frc.robot.util.SparkUtil; +import java.util.Arrays; import java.util.Queue; import java.util.function.DoubleSupplier; import org.littletonrobotics.junction.Logger; @@ -214,7 +215,7 @@ public ModuleIOSpark(int module) { @Override public void updateInputs(ModuleIOInputs inputs) { - // Update drive inputs + // Drive inputs SparkUtil.sparkStickyFault = false; SparkUtil.ifOk( driveSpark, driveEncoder::getPosition, (value) -> inputs.drivePositionRad = value); @@ -228,7 +229,7 @@ public void updateInputs(ModuleIOInputs inputs) { driveSpark, driveSpark::getOutputCurrent, (value) -> inputs.driveCurrentAmps = value); inputs.driveConnected = driveConnectedDebounce.calculate(!SparkUtil.sparkStickyFault); - // Update turn inputs + // Turn inputs SparkUtil.sparkStickyFault = false; SparkUtil.ifOk( turnSpark, @@ -244,18 +245,47 @@ public void updateInputs(ModuleIOInputs inputs) { turnSpark, turnSpark::getOutputCurrent, (value) -> inputs.turnCurrentAmps = value); inputs.turnConnected = turnConnectedDebounce.calculate(!SparkUtil.sparkStickyFault); - // Update odometry inputs - inputs.odometryTimestamps = - timestampQueue.stream().mapToDouble((Double value) -> value).toArray(); - inputs.odometryDrivePositionsRad = - drivePositionQueue.stream().mapToDouble((Double value) -> value).toArray(); - inputs.odometryTurnPositions = - turnPositionQueue.stream() - .map((Double value) -> new Rotation2d(value).minus(zeroRotation)) - .toArray(Rotation2d[]::new); - timestampQueue.clear(); - drivePositionQueue.clear(); - turnPositionQueue.clear(); + // If you don’t have an absolute encoder on this variant, keep these consistent: + inputs.turnEncoderConnected = true; // or a real debounce if you have one + inputs.turnAbsolutePosition = inputs.turnPosition; + + // Odometry queue drain (common prefix only) + final int tsCount = timestampQueue.size(); + final int driveCount = drivePositionQueue.size(); + final int turnCount = turnPositionQueue.size(); + final int sampleCount = Math.min(tsCount, Math.min(driveCount, turnCount)); + + if (sampleCount <= 0) { + inputs.odometryTimestamps = new double[0]; + inputs.odometryDrivePositionsRad = new double[0]; + inputs.odometryTurnPositions = new Rotation2d[0]; + return; + } + + final double[] outTs = new double[sampleCount]; + final double[] outDriveRad = new double[sampleCount]; + final Rotation2d[] outTurn = new Rotation2d[sampleCount]; + + for (int i = 0; i < sampleCount; i++) { + final Double t = timestampQueue.poll(); + final Double drivePosRad = drivePositionQueue.poll(); // already rad in your existing code + final Double turnPosRad = turnPositionQueue.poll(); // rad in your existing code + + if (t == null || drivePosRad == null || turnPosRad == null) { + inputs.odometryTimestamps = Arrays.copyOf(outTs, i); + inputs.odometryDrivePositionsRad = Arrays.copyOf(outDriveRad, i); + inputs.odometryTurnPositions = Arrays.copyOf(outTurn, i); + return; + } + + outTs[i] = t.doubleValue(); + outDriveRad[i] = drivePosRad.doubleValue(); + outTurn[i] = new Rotation2d(turnPosRad.doubleValue()).minus(zeroRotation); + } + + inputs.odometryTimestamps = outTs; + inputs.odometryDrivePositionsRad = outDriveRad; + inputs.odometryTurnPositions = outTurn; } /** diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java index 587f8ec8..7c106838 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java @@ -38,6 +38,7 @@ import frc.robot.Constants.DrivebaseConstants; import frc.robot.util.RBSICANBusRegistry; import frc.robot.util.SparkUtil; +import java.util.Arrays; import java.util.Queue; import java.util.function.DoubleSupplier; import org.littletonrobotics.junction.Logger; @@ -47,6 +48,10 @@ * and duty cycle absolute encoder. */ public class ModuleIOSparkCANcoder implements ModuleIO { + + // This module number (for logging) + private final int module; + private final Rotation2d zeroRotation; // Hardware objects @@ -87,6 +92,9 @@ public class ModuleIOSparkCANcoder implements ModuleIO { * Spark I/O w/ CANcoders */ public ModuleIOSparkCANcoder(int module) { + // Record the module number for logging purposes + this.module = module; + zeroRotation = switch (module) { case 0 -> new Rotation2d(SwerveConstants.kFLEncoderOffset); @@ -237,11 +245,13 @@ public ModuleIOSparkCANcoder(int module) { @Override public void updateInputs(ModuleIOInputs inputs) { + // Refresh CANcoder absolute + var encStatus = BaseStatusSignal.refreshAll(turnAbsolutePosition); + if (!encStatus.isOK()) { + Logger.recordOutput("CAN/Module" + module + "/EncRefreshStatus", encStatus.toString()); + } - // Refresh all signals - var turnEncoderStatus = BaseStatusSignal.refreshAll(turnAbsolutePosition); - - // Update drive inputs + // Drive inputs (Spark) SparkUtil.sparkStickyFault = false; SparkUtil.ifOk( driveSpark, driveEncoder::getPosition, (value) -> inputs.drivePositionRad = value); @@ -255,9 +265,9 @@ public void updateInputs(ModuleIOInputs inputs) { driveSpark, driveSpark::getOutputCurrent, (value) -> inputs.driveCurrentAmps = value); inputs.driveConnected = driveConnectedDebounce.calculate(!SparkUtil.sparkStickyFault); - // Update turn inputs + // Turn inputs (CANcoder abs + Spark velocity/applied/current) SparkUtil.sparkStickyFault = false; - inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(turnEncoderStatus.isOK()); + inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(encStatus.isOK()); inputs.turnAbsolutePosition = Rotation2d.fromRotations(turnAbsolutePosition.getValueAsDouble()); inputs.turnPosition = Rotation2d.fromRotations(turnPosition.getValueAsDouble()); inputs.turnVelocityRadPerSec = Units.rotationsToRadians(turnVelocity.getValueAsDouble()); @@ -269,18 +279,43 @@ public void updateInputs(ModuleIOInputs inputs) { turnSpark, turnSpark::getOutputCurrent, (value) -> inputs.turnCurrentAmps = value); inputs.turnConnected = turnConnectedDebounce.calculate(!SparkUtil.sparkStickyFault); - // Update odometry inputs - inputs.odometryTimestamps = - timestampQueue.stream().mapToDouble((Double value) -> value).toArray(); - inputs.odometryDrivePositionsRad = - drivePositionQueue.stream().mapToDouble((Double value) -> value).toArray(); - inputs.odometryTurnPositions = - turnPositionQueue.stream() - .map((Double value) -> new Rotation2d(value).minus(zeroRotation)) - .toArray(Rotation2d[]::new); - timestampQueue.clear(); - drivePositionQueue.clear(); - turnPositionQueue.clear(); + // Odometry queue drain (common prefix only) + final int tsCount = timestampQueue.size(); + final int driveCount = drivePositionQueue.size(); + final int turnCount = turnPositionQueue.size(); + final int sampleCount = Math.min(tsCount, Math.min(driveCount, turnCount)); + + if (sampleCount <= 0) { + inputs.odometryTimestamps = new double[0]; + inputs.odometryDrivePositionsRad = new double[0]; + inputs.odometryTurnPositions = new Rotation2d[0]; + return; + } + + final double[] outTs = new double[sampleCount]; + final double[] outDriveRad = new double[sampleCount]; + final Rotation2d[] outTurn = new Rotation2d[sampleCount]; + + for (int i = 0; i < sampleCount; i++) { + final Double t = timestampQueue.poll(); + final Double drivePosRad = drivePositionQueue.poll(); // already rad in your existing code + final Double turnPosRad = turnPositionQueue.poll(); // rad, then minus zeroRotation below + + if (t == null || drivePosRad == null || turnPosRad == null) { + inputs.odometryTimestamps = Arrays.copyOf(outTs, i); + inputs.odometryDrivePositionsRad = Arrays.copyOf(outDriveRad, i); + inputs.odometryTurnPositions = Arrays.copyOf(outTurn, i); + return; + } + + outTs[i] = t.doubleValue(); + outDriveRad[i] = drivePosRad.doubleValue(); + outTurn[i] = new Rotation2d(turnPosRad.doubleValue()).minus(zeroRotation); + } + + inputs.odometryTimestamps = outTs; + inputs.odometryDrivePositionsRad = outDriveRad; + inputs.odometryTurnPositions = outTurn; } /** diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java index 4359b385..ccf9c270 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java @@ -363,7 +363,7 @@ public void setDriveOpenLoop(double output) { // Log output and battery Logger.recordOutput("Swerve/Drive/OpenLoopOutput", scaledOutput); - Logger.recordOutput("Robot/BatteryVoltage", busVoltage); + Logger.recordOutput("Swerve/BatteryVoltage", busVoltage); } /** @@ -384,7 +384,7 @@ public void setTurnOpenLoop(double output) { // Log output and battery Logger.recordOutput("Swerve/Turn/OpenLoopOutput", scaledOutput); - Logger.recordOutput("Robot/BatteryVoltage", busVoltage); + Logger.recordOutput("Swerve/BatteryVoltage", busVoltage); } /** @@ -427,7 +427,7 @@ public void setDriveVelocity(double velocityRadPerSec) { Logger.recordOutput("Swerve/Drive/VelocityRotPerSec", velocityRotPerSec); Logger.recordOutput("Swerve/Drive/AccelerationRotPerSec2", accelerationRotPerSec2); Logger.recordOutput("Swerve/Drive/FeedForwardVolts", scaledFFVolts); - Logger.recordOutput("Robot/BatteryVoltage", busVoltage); + Logger.recordOutput("Swerve/BatteryVoltage", busVoltage); Logger.recordOutput("Swerve/Drive/ClosedLoopMode", m_DriveMotorClosedLoopOutput); } @@ -455,7 +455,7 @@ public void setTurnPosition(Rotation2d rotation) { }); Logger.recordOutput("Swerve/Turn/TargetRotations", rotation.getRotations()); - Logger.recordOutput("Robot/BatteryVoltage", busVoltage); + Logger.recordOutput("Swerve/BatteryVoltage", busVoltage); } @Override diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java index f3cc029a..6226ed2f 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java @@ -7,438 +7,507 @@ // license that can be found in the LICENSE file // at the root directory of this project. -package frc.robot.subsystems.drive; - -import static edu.wpi.first.units.Units.RotationsPerSecond; - -import com.ctre.phoenix6.BaseStatusSignal; -import com.ctre.phoenix6.CANBus; -import com.ctre.phoenix6.StatusSignal; -import com.ctre.phoenix6.configs.CANdiConfiguration; -import com.ctre.phoenix6.configs.ClosedLoopRampsConfigs; -import com.ctre.phoenix6.configs.OpenLoopRampsConfigs; -import com.ctre.phoenix6.configs.Slot0Configs; -import com.ctre.phoenix6.configs.TalonFXSConfiguration; -import com.ctre.phoenix6.controls.PositionTorqueCurrentFOC; -import com.ctre.phoenix6.controls.PositionVoltage; -import com.ctre.phoenix6.controls.TorqueCurrentFOC; -import com.ctre.phoenix6.controls.VelocityTorqueCurrentFOC; -import com.ctre.phoenix6.controls.VelocityVoltage; -import com.ctre.phoenix6.controls.VoltageOut; -import com.ctre.phoenix6.hardware.CANdi; -import com.ctre.phoenix6.hardware.ParentDevice; -import com.ctre.phoenix6.hardware.TalonFXS; -import com.ctre.phoenix6.signals.BrushedMotorWiringValue; -import com.ctre.phoenix6.signals.ExternalFeedbackSensorSourceValue; -import com.ctre.phoenix6.signals.InvertedValue; -import com.ctre.phoenix6.signals.MotorArrangementValue; -import com.ctre.phoenix6.signals.NeutralModeValue; -import com.ctre.phoenix6.signals.StaticFeedforwardSignValue; -import com.ctre.phoenix6.swerve.SwerveModuleConstants; -import com.ctre.phoenix6.swerve.SwerveModuleConstants.ClosedLoopOutputType; -import edu.wpi.first.math.filter.Debouncer; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.units.measure.Angle; -import edu.wpi.first.units.measure.AngularVelocity; -import edu.wpi.first.units.measure.Current; -import edu.wpi.first.units.measure.Voltage; -import edu.wpi.first.wpilibj.RobotController; -import frc.robot.Constants; -import frc.robot.Constants.DrivebaseConstants; -import frc.robot.util.PhoenixUtil; -import frc.robot.util.RBSICANBusRegistry; -import java.util.Queue; -import org.littletonrobotics.junction.Logger; - -/** - * Module IO implementation for Talon FXS drive motor controller, Talon FXS turn motor controller, - * and CANdi (PWM 1). Configured using a set of module constants from Phoenix. - * - *

Device configuration and other behaviors not exposed by TunerConstants can be customized here. - */ -public class ModuleIOTalonFXS implements ModuleIO { - // Hardware objects - private final TalonFXS driveTalon; - private final TalonFXS turnTalon; - private final CANdi candi; - private final ClosedLoopOutputType m_DriveMotorClosedLoopOutput = - switch (Constants.getPhoenixPro()) { - case LICENSED -> ClosedLoopOutputType.TorqueCurrentFOC; - case UNLICENSED -> ClosedLoopOutputType.Voltage; - }; - private final ClosedLoopOutputType m_SteerMotorClosedLoopOutput = - switch (Constants.getPhoenixPro()) { - case LICENSED -> ClosedLoopOutputType.TorqueCurrentFOC; - case UNLICENSED -> ClosedLoopOutputType.Voltage; - }; - - // Voltage control requests - private final VoltageOut voltageRequest = new VoltageOut(0); - private final PositionVoltage positionVoltageRequest = new PositionVoltage(0.0); - private final VelocityVoltage velocityVoltageRequest = new VelocityVoltage(0.0); - - // Torque-current control requests - private final TorqueCurrentFOC torqueCurrentRequest = new TorqueCurrentFOC(0); - private final PositionTorqueCurrentFOC positionTorqueCurrentRequest = - new PositionTorqueCurrentFOC(0.0); - private final VelocityTorqueCurrentFOC velocityTorqueCurrentRequest = - new VelocityTorqueCurrentFOC(0.0); - - // Timestamp inputs from Phoenix thread - private final Queue timestampQueue; - - // Inputs from drive motor - private final StatusSignal drivePosition; - private final StatusSignal drivePositionOdom; - private final Queue drivePositionQueue; - private final StatusSignal driveVelocity; - private final StatusSignal driveAppliedVolts; - private final StatusSignal driveCurrent; - - // Inputs from turn motor - private final StatusSignal turnAbsolutePosition; - private final StatusSignal turnPosition; - private final StatusSignal turnPositionOdom; - private final Queue turnPositionQueue; - private final StatusSignal turnVelocity; - private final StatusSignal turnAppliedVolts; - private final StatusSignal turnCurrent; - - // Connection debouncers - private final Debouncer driveConnectedDebounce = - new Debouncer(0.5, Debouncer.DebounceType.kFalling); - private final Debouncer turnConnectedDebounce = - new Debouncer(0.5, Debouncer.DebounceType.kFalling); - private final Debouncer turnEncoderConnectedDebounce = - new Debouncer(0.5, Debouncer.DebounceType.kFalling); - - // Config - private final TalonFXSConfiguration driveConfig = new TalonFXSConfiguration(); - private final TalonFXSConfiguration turnConfig = new TalonFXSConfiguration(); - - // Values used for calculating feedforward from kS, kV, and kA - private double lastVelocityRotPerSec = 0.0; - private long lastTimestampNano = System.nanoTime(); - - /* - * TalonFXS I/O - */ - public ModuleIOTalonFXS( - SwerveModuleConstants - constants) { - CANBus canBus = RBSICANBusRegistry.getBus(SwerveConstants.kCANbusName); - driveTalon = new TalonFXS(constants.DriveMotorId, canBus); - turnTalon = new TalonFXS(constants.SteerMotorId, canBus); - candi = new CANdi(constants.EncoderId, canBus); - - // Configure drive motor - driveConfig.MotorOutput.NeutralMode = NeutralModeValue.Brake; - driveConfig.Slot0 = - new Slot0Configs() - .withKP(DrivebaseConstants.kDriveP) - .withKI(0.0) - .withKD(DrivebaseConstants.kDriveD) - .withKS(DrivebaseConstants.kDriveS) - .withKV(DrivebaseConstants.kDriveV) - .withKA(DrivebaseConstants.kDriveA); - driveConfig.Commutation.MotorArrangement = - switch (constants.DriveMotorType) { - case TalonFXS_NEO_JST -> MotorArrangementValue.NEO_JST; - case TalonFXS_VORTEX_JST -> MotorArrangementValue.VORTEX_JST; - default -> MotorArrangementValue.Disabled; - }; - driveConfig.MotorOutput.NeutralMode = NeutralModeValue.Brake; - driveConfig.Slot0 = constants.DriveMotorGains; - driveConfig.ExternalFeedback.SensorToMechanismRatio = constants.DriveMotorGearRatio; - driveConfig.CurrentLimits.StatorCurrentLimit = DrivebaseConstants.kSlipCurrent; - driveConfig.CurrentLimits.StatorCurrentLimitEnable = true; - // Build the OpenLoopRampsConfigs and ClosedLoopRampsConfigs for current smoothing - OpenLoopRampsConfigs openRamps = new OpenLoopRampsConfigs(); - openRamps.DutyCycleOpenLoopRampPeriod = DrivebaseConstants.kDriveOpenLoopRampPeriod; - openRamps.VoltageOpenLoopRampPeriod = DrivebaseConstants.kDriveOpenLoopRampPeriod; - openRamps.TorqueOpenLoopRampPeriod = DrivebaseConstants.kDriveOpenLoopRampPeriod; - ClosedLoopRampsConfigs closedRamps = new ClosedLoopRampsConfigs(); - closedRamps.DutyCycleClosedLoopRampPeriod = DrivebaseConstants.kDriveClosedLoopRampPeriod; - closedRamps.VoltageClosedLoopRampPeriod = DrivebaseConstants.kDriveClosedLoopRampPeriod; - closedRamps.TorqueClosedLoopRampPeriod = DrivebaseConstants.kDriveClosedLoopRampPeriod; - // Apply the open- and closed-loop ramp configuration for current smoothing - driveConfig.withClosedLoopRamps(closedRamps).withOpenLoopRamps(openRamps); - // Set motor inversions - driveConfig.MotorOutput.Inverted = - constants.DriveMotorInverted - ? InvertedValue.Clockwise_Positive - : InvertedValue.CounterClockwise_Positive; - // Apply everything to the motor controllers - PhoenixUtil.tryUntilOk(5, () -> driveTalon.getConfigurator().apply(driveConfig, 0.25)); - PhoenixUtil.tryUntilOk(5, () -> driveTalon.setPosition(0.0, 0.25)); - - // Configure turn motor - turnConfig.MotorOutput.NeutralMode = NeutralModeValue.Brake; - turnConfig.Slot0 = - new Slot0Configs() - .withKP(DrivebaseConstants.kSteerP) - .withKI(0.0) - .withKD(DrivebaseConstants.kSteerD) - .withKS(DrivebaseConstants.kSteerS) - .withKV(0.0) - .withKA(0.0) - .withStaticFeedforwardSign(StaticFeedforwardSignValue.UseClosedLoopSign); - turnConfig.Commutation.MotorArrangement = - switch (constants.SteerMotorType) { - case TalonFXS_Minion_JST -> MotorArrangementValue.Minion_JST; - case TalonFXS_NEO_JST -> MotorArrangementValue.NEO_JST; - case TalonFXS_VORTEX_JST -> MotorArrangementValue.VORTEX_JST; - case TalonFXS_NEO550_JST -> MotorArrangementValue.NEO550_JST; - case TalonFXS_Brushed_AB, TalonFXS_Brushed_AC, TalonFXS_Brushed_BC -> - MotorArrangementValue.Brushed_DC; - default -> MotorArrangementValue.Disabled; - }; - turnConfig.Commutation.BrushedMotorWiring = - switch (constants.SteerMotorType) { - case TalonFXS_Brushed_AC -> BrushedMotorWiringValue.Leads_A_and_C; - case TalonFXS_Brushed_BC -> BrushedMotorWiringValue.Leads_B_and_C; - default -> BrushedMotorWiringValue.Leads_A_and_B; - }; - turnConfig.MotorOutput.NeutralMode = NeutralModeValue.Brake; - turnConfig.Slot0 = constants.SteerMotorGains; - turnConfig.ExternalFeedback.FeedbackRemoteSensorID = constants.EncoderId; - turnConfig.ExternalFeedback.ExternalFeedbackSensorSource = - switch (constants.FeedbackSource) { - case RemoteCANdiPWM1 -> ExternalFeedbackSensorSourceValue.RemoteCANdiPWM1; - case FusedCANdiPWM1 -> ExternalFeedbackSensorSourceValue.FusedCANdiPWM1; - case SyncCANdiPWM1 -> ExternalFeedbackSensorSourceValue.SyncCANdiPWM1; - default -> - throw new RuntimeException( - "You have selected a turn feedback source that is not supported by the default implementation of ModuleIOTalonFXS (CANdi PWM 1). Please check the AdvantageKit documentation for more information on alternative configurations: https://docs.advantagekit.org/getting-started/template-projects/talonfx-swerve-template#custom-module-implementations"); - }; - turnConfig.ExternalFeedback.RotorToSensorRatio = constants.SteerMotorGearRatio; - turnConfig.MotionMagic.MotionMagicCruiseVelocity = 100.0 / constants.SteerMotorGearRatio; - turnConfig.MotionMagic.MotionMagicAcceleration = - turnConfig.MotionMagic.MotionMagicCruiseVelocity / 0.100; - turnConfig.MotionMagic.MotionMagicExpo_kV = 0.12 * constants.SteerMotorGearRatio; - turnConfig.MotionMagic.MotionMagicExpo_kA = 0.1; - turnConfig.ClosedLoopGeneral.ContinuousWrap = true; - turnConfig.MotorOutput.Inverted = - constants.SteerMotorInverted - ? InvertedValue.Clockwise_Positive - : InvertedValue.CounterClockwise_Positive; - PhoenixUtil.tryUntilOk(5, () -> turnTalon.getConfigurator().apply(turnConfig, 0.25)); - - // Configure CANdi - CANdiConfiguration candiConfig = constants.EncoderInitialConfigs; - candiConfig.PWM1.AbsoluteSensorOffset = constants.EncoderOffset; - candiConfig.PWM1.SensorDirection = constants.EncoderInverted; - candi.getConfigurator().apply(candiConfig); - - // Create timestamp queue - timestampQueue = PhoenixOdometryThread.getInstance().makeTimestampQueue(); - - // Create drive status signals - drivePosition = driveTalon.getPosition(); - drivePositionOdom = drivePosition.clone(); // NEW - drivePositionQueue = PhoenixOdometryThread.getInstance().registerSignal(drivePositionOdom); - - driveVelocity = driveTalon.getVelocity(); - driveAppliedVolts = driveTalon.getMotorVoltage(); - driveCurrent = driveTalon.getStatorCurrent(); - - // Create turn status signals - turnPosition = turnTalon.getPosition(); - turnPositionOdom = turnPosition.clone(); // NEW - turnPositionQueue = PhoenixOdometryThread.getInstance().registerSignal(turnPositionOdom); - - turnAbsolutePosition = candi.getPWM1Position(); - turnVelocity = turnTalon.getVelocity(); - turnAppliedVolts = turnTalon.getMotorVoltage(); - turnCurrent = turnTalon.getStatorCurrent(); - - // Configure periodic frames (IMPORTANT: apply odometry rate to the *odom clones*) - BaseStatusSignal.setUpdateFrequencyForAll( - SwerveConstants.kOdometryFrequency, drivePositionOdom, turnPositionOdom); - BaseStatusSignal.setUpdateFrequencyForAll( - 50.0, - drivePosition, - turnPosition, - driveVelocity, - driveAppliedVolts, - driveCurrent, - turnAbsolutePosition, - turnVelocity, - turnAppliedVolts, - turnCurrent); - - ParentDevice.optimizeBusUtilizationForAll(driveTalon, turnTalon); - } - - @Override - public void updateInputs(ModuleIOInputs inputs) { - // Refresh all signals - var driveStatus = - BaseStatusSignal.refreshAll(drivePosition, driveVelocity, driveAppliedVolts, driveCurrent); - var turnStatus = - BaseStatusSignal.refreshAll(turnPosition, turnVelocity, turnAppliedVolts, turnCurrent); - var turnEncoderStatus = BaseStatusSignal.refreshAll(turnAbsolutePosition); - - // Update drive inputs - inputs.driveConnected = driveConnectedDebounce.calculate(driveStatus.isOK()); - inputs.drivePositionRad = Units.rotationsToRadians(drivePosition.getValueAsDouble()); - inputs.driveVelocityRadPerSec = Units.rotationsToRadians(driveVelocity.getValueAsDouble()); - inputs.driveAppliedVolts = driveAppliedVolts.getValueAsDouble(); - inputs.driveCurrentAmps = driveCurrent.getValueAsDouble(); - - // Update turn inputs - inputs.turnConnected = turnConnectedDebounce.calculate(turnStatus.isOK()); - inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(turnEncoderStatus.isOK()); - inputs.turnAbsolutePosition = Rotation2d.fromRotations(turnAbsolutePosition.getValueAsDouble()); - inputs.turnPosition = Rotation2d.fromRotations(turnPosition.getValueAsDouble()); - inputs.turnVelocityRadPerSec = Units.rotationsToRadians(turnVelocity.getValueAsDouble()); - inputs.turnAppliedVolts = turnAppliedVolts.getValueAsDouble(); - inputs.turnCurrentAmps = turnCurrent.getValueAsDouble(); - - // Update odometry inputs - inputs.odometryTimestamps = - timestampQueue.stream().mapToDouble((Double value) -> value).toArray(); - inputs.odometryDrivePositionsRad = - drivePositionQueue.stream() - .mapToDouble((Double value) -> Units.rotationsToRadians(value)) - .toArray(); - inputs.odometryTurnPositions = - turnPositionQueue.stream() - .map((Double value) -> Rotation2d.fromRotations(value)) - .toArray(Rotation2d[]::new); - timestampQueue.clear(); - drivePositionQueue.clear(); - turnPositionQueue.clear(); - } - - /** - * Set the drive motor to an open-loop voltage, scaled to battery voltage - * - * @param output Specified open-loop voltage requested - */ - @Override - public void setDriveOpenLoop(double output) { - // Scale by actual battery voltage to keep full output consistent - double busVoltage = RobotController.getBatteryVoltage(); - double scaledOutput = output * DrivebaseConstants.kOptimalVoltage / busVoltage; - - driveTalon.setControl( - switch (m_DriveMotorClosedLoopOutput) { - case Voltage -> voltageRequest.withOutput(scaledOutput); - case TorqueCurrentFOC -> torqueCurrentRequest.withOutput(scaledOutput); - }); - - // Log output and battery - Logger.recordOutput("Swerve/Drive/OpenLoopOutput", scaledOutput); - Logger.recordOutput("Robot/BatteryVoltage", busVoltage); - } - - /** - * Set the turn motor to an open-loop voltage, scaled to battery voltage - * - * @param output Specified open-loop voltage requested - */ - @Override - public void setTurnOpenLoop(double output) { - double busVoltage = RobotController.getBatteryVoltage(); - double scaledOutput = output * DrivebaseConstants.kOptimalVoltage / busVoltage; - - turnTalon.setControl( - switch (m_SteerMotorClosedLoopOutput) { - case Voltage -> voltageRequest.withOutput(scaledOutput); - case TorqueCurrentFOC -> torqueCurrentRequest.withOutput(scaledOutput); - }); - - Logger.recordOutput("Swerve/Turn/OpenLoopOutput", scaledOutput); - Logger.recordOutput("Robot/BatteryVoltage", busVoltage); - } - - /** - * Set the velocity of the module - * - * @param velocityRadPerSec Requested module drive velocity in radians per second - */ - @Override - public void setDriveVelocity(double velocityRadPerSec) { - double velocityRotPerSec = Units.radiansToRotations(velocityRadPerSec); - double busVoltage = RobotController.getBatteryVoltage(); - - // Compute the Feedforward voltage for CTRE UNLICENSED operation ***** - // Compute acceleration - long currentTimeNano = System.nanoTime(); - double deltaTimeSec = (currentTimeNano - lastTimestampNano) * 1e-9; - double accelerationRotPerSec2 = - deltaTimeSec > 0 ? (velocityRotPerSec - lastVelocityRotPerSec) / deltaTimeSec : 0.0; - // Update last values for next loop - lastVelocityRotPerSec = velocityRotPerSec; - lastTimestampNano = currentTimeNano; - // Compute feedforward voltage: kS + kV*v + kA*a - double nominalFFVolts = - Math.signum(velocityRotPerSec) * DrivebaseConstants.kDriveS - + DrivebaseConstants.kDriveV * velocityRotPerSec - + DrivebaseConstants.kDriveA * accelerationRotPerSec2; - double scaledFFVolts = nominalFFVolts * DrivebaseConstants.kOptimalVoltage / busVoltage; - - // Set the drive motor control based on CTRE LICENSED status - driveTalon.setControl( - switch (m_DriveMotorClosedLoopOutput) { - case Voltage -> - velocityVoltageRequest.withVelocity(velocityRotPerSec).withFeedForward(scaledFFVolts); - case TorqueCurrentFOC -> - velocityTorqueCurrentRequest.withVelocity(RotationsPerSecond.of(velocityRotPerSec)); - }); - - // AdvantageKit logging - Logger.recordOutput("Swerve/Drive/VelocityRadPerSec", velocityRadPerSec); - Logger.recordOutput("Swerve/Drive/VelocityRotPerSec", velocityRotPerSec); - Logger.recordOutput("Swerve/Drive/AccelerationRotPerSec2", accelerationRotPerSec2); - Logger.recordOutput("Swerve/Drive/FeedForwardVolts", scaledFFVolts); - Logger.recordOutput("Robot/BatteryVoltage", busVoltage); - Logger.recordOutput("Swerve/Drive/ClosedLoopMode", m_DriveMotorClosedLoopOutput); - } - - /** - * Set the turn position of the module - * - * @param rotation Requested module Rotation2d position - */ - @Override - public void setTurnPosition(Rotation2d rotation) { - double busVoltage = RobotController.getBatteryVoltage(); - - // Scale feedforward voltage by battery voltage - double nominalFFVolts = - DrivebaseConstants.kNominalFFVolts; // replace with your feedforward calculation if needed - double scaledFFVolts = nominalFFVolts * DrivebaseConstants.kOptimalVoltage / busVoltage; - - turnTalon.setControl( - switch (m_SteerMotorClosedLoopOutput) { - case Voltage -> - positionVoltageRequest - .withPosition(rotation.getRotations()) - .withFeedForward(scaledFFVolts); - case TorqueCurrentFOC -> - positionTorqueCurrentRequest.withPosition(rotation.getRotations()); - }); - - Logger.recordOutput("Swerve/Turn/TargetRotations", rotation.getRotations()); - Logger.recordOutput("Robot/BatteryVoltage", busVoltage); - } - - @Override - public void setDrivePID(double kP, double kI, double kD) { - driveConfig.Slot0.kP = kP; - driveConfig.Slot0.kI = kI; - driveConfig.Slot0.kD = kD; - PhoenixUtil.tryUntilOk(5, () -> driveTalon.getConfigurator().apply(driveConfig, 0.25)); - } - - @Override - public void setTurnPID(double kP, double kI, double kD) { - turnConfig.Slot0.kP = kP; - turnConfig.Slot0.kI = kI; - turnConfig.Slot0.kD = kD; - PhoenixUtil.tryUntilOk(5, () -> turnTalon.getConfigurator().apply(turnConfig, 0.25)); - } -} +// IMPORTANT: UNCOMMENT THIS MODULE IF YOU ARE USING FXS instead of FX +// COMMENT OUT the FX module + +// package frc.robot.subsystems.drive; + +// import static edu.wpi.first.units.Units.RotationsPerSecond; + +// import com.ctre.phoenix6.BaseStatusSignal; +// import com.ctre.phoenix6.CANBus; +// import com.ctre.phoenix6.StatusSignal; +// import com.ctre.phoenix6.configs.CANdiConfiguration; +// import com.ctre.phoenix6.configs.ClosedLoopRampsConfigs; +// import com.ctre.phoenix6.configs.OpenLoopRampsConfigs; +// import com.ctre.phoenix6.configs.Slot0Configs; +// import com.ctre.phoenix6.configs.TalonFXConfiguration; +// import com.ctre.phoenix6.configs.TalonFXSConfiguration; +// import com.ctre.phoenix6.controls.PositionTorqueCurrentFOC; +// import com.ctre.phoenix6.controls.PositionVoltage; +// import com.ctre.phoenix6.controls.TorqueCurrentFOC; +// import com.ctre.phoenix6.controls.VelocityTorqueCurrentFOC; +// import com.ctre.phoenix6.controls.VelocityVoltage; +// import com.ctre.phoenix6.controls.VoltageOut; +// import com.ctre.phoenix6.hardware.CANdi; +// import com.ctre.phoenix6.hardware.ParentDevice; +// import com.ctre.phoenix6.hardware.TalonFXS; +// import com.ctre.phoenix6.signals.BrushedMotorWiringValue; +// import com.ctre.phoenix6.signals.ExternalFeedbackSensorSourceValue; +// import com.ctre.phoenix6.signals.InvertedValue; +// import com.ctre.phoenix6.signals.MotorArrangementValue; +// import com.ctre.phoenix6.signals.NeutralModeValue; +// import com.ctre.phoenix6.signals.StaticFeedforwardSignValue; +// import com.ctre.phoenix6.swerve.SwerveModuleConstants; +// import com.ctre.phoenix6.swerve.SwerveModuleConstants.ClosedLoopOutputType; +// import edu.wpi.first.math.filter.Debouncer; +// import edu.wpi.first.math.geometry.Rotation2d; +// import edu.wpi.first.math.util.Units; +// import edu.wpi.first.units.measure.Angle; +// import edu.wpi.first.units.measure.AngularVelocity; +// import edu.wpi.first.units.measure.Current; +// import edu.wpi.first.units.measure.Voltage; +// import edu.wpi.first.wpilibj.RobotController; +// import frc.robot.Constants; +// import frc.robot.Constants.DrivebaseConstants; +// import frc.robot.generated.TunerConstants; +// import frc.robot.util.PhoenixUtil; +// import frc.robot.util.RBSICANBusRegistry; +// import java.util.Arrays; +// import java.util.Queue; +// import org.littletonrobotics.junction.Logger; + +// /** +// * Module IO implementation for Talon FXS drive motor controller, Talon FXS turn motor +// controller, +// * and CANdi (PWM 1). Configured using a set of module constants from Phoenix. +// * +// *

Device configuration and other behaviors not exposed by TunerConstants can be customized +// here. +// */ +// public class ModuleIOTalonFXS implements ModuleIO { +// private final SwerveModuleConstants< +// TalonFXConfiguration, TalonFXConfiguration, CANdiConfiguration> +// constants; + +// // This module number (for logging) +// private final int module; + +// // Hardware objects +// private final TalonFXS driveTalon; +// private final TalonFXS turnTalon; +// private final CANdi candi; +// private final ClosedLoopOutputType m_DriveMotorClosedLoopOutput = +// switch (Constants.getPhoenixPro()) { +// case LICENSED -> ClosedLoopOutputType.TorqueCurrentFOC; +// case UNLICENSED -> ClosedLoopOutputType.Voltage; +// }; +// private final ClosedLoopOutputType m_SteerMotorClosedLoopOutput = +// switch (Constants.getPhoenixPro()) { +// case LICENSED -> ClosedLoopOutputType.TorqueCurrentFOC; +// case UNLICENSED -> ClosedLoopOutputType.Voltage; +// }; + +// // Voltage control requests +// private final VoltageOut voltageRequest = new VoltageOut(0); +// private final PositionVoltage positionVoltageRequest = new PositionVoltage(0.0); +// private final VelocityVoltage velocityVoltageRequest = new VelocityVoltage(0.0); + +// // Torque-current control requests +// private final TorqueCurrentFOC torqueCurrentRequest = new TorqueCurrentFOC(0); +// private final PositionTorqueCurrentFOC positionTorqueCurrentRequest = +// new PositionTorqueCurrentFOC(0.0); +// private final VelocityTorqueCurrentFOC velocityTorqueCurrentRequest = +// new VelocityTorqueCurrentFOC(0.0); + +// // Timestamp inputs from Phoenix thread +// private final Queue timestampQueue; + +// // Inputs from drive motor +// private final StatusSignal drivePosition; +// private final StatusSignal drivePositionOdom; +// private final Queue drivePositionQueue; +// private final StatusSignal driveVelocity; +// private final StatusSignal driveAppliedVolts; +// private final StatusSignal driveCurrent; + +// // Inputs from turn motor +// private final StatusSignal turnAbsolutePosition; +// private final StatusSignal turnPosition; +// private final StatusSignal turnPositionOdom; +// private final Queue turnPositionQueue; +// private final StatusSignal turnVelocity; +// private final StatusSignal turnAppliedVolts; +// private final StatusSignal turnCurrent; + +// // Connection debouncers +// private final Debouncer driveConnectedDebounce = +// new Debouncer(0.5, Debouncer.DebounceType.kFalling); +// private final Debouncer turnConnectedDebounce = +// new Debouncer(0.5, Debouncer.DebounceType.kFalling); +// private final Debouncer turnEncoderConnectedDebounce = +// new Debouncer(0.5, Debouncer.DebounceType.kFalling); + +// // Config +// private final TalonFXSConfiguration driveConfig = new TalonFXSConfiguration(); +// private final TalonFXSConfiguration turnConfig = new TalonFXSConfiguration(); + +// // Values used for calculating feedforward from kS, kV, and kA +// private double lastVelocityRotPerSec = 0.0; +// private long lastTimestampNano = System.nanoTime(); + +// /* +// * TalonFXS I/O +// */ +// public ModuleIOTalonFXS(int module) { +// // Record the module number for logging purposes +// this.module = module; + +// constants = +// switch (module) { +// case 0 -> TunerConstants.FrontLeft; +// case 1 -> TunerConstants.FrontRight; +// case 2 -> TunerConstants.BackLeft; +// case 3 -> TunerConstants.BackRight; +// default -> throw new IllegalArgumentException("Invalid module index"); +// }; + +// CANBus canBus = RBSICANBusRegistry.getBus(SwerveConstants.kCANbusName); +// driveTalon = new TalonFXS(constants.DriveMotorId, canBus); +// turnTalon = new TalonFXS(constants.SteerMotorId, canBus); +// candi = new CANdi(constants.EncoderId, canBus); + +// // Configure drive motor +// driveConfig.MotorOutput.NeutralMode = NeutralModeValue.Brake; +// driveConfig.Slot0 = +// new Slot0Configs() +// .withKP(DrivebaseConstants.kDriveP) +// .withKI(0.0) +// .withKD(DrivebaseConstants.kDriveD) +// .withKS(DrivebaseConstants.kDriveS) +// .withKV(DrivebaseConstants.kDriveV) +// .withKA(DrivebaseConstants.kDriveA); +// driveConfig.Commutation.MotorArrangement = +// switch (constants.DriveMotorType) { +// case TalonFXS_NEO_JST -> MotorArrangementValue.NEO_JST; +// case TalonFXS_VORTEX_JST -> MotorArrangementValue.VORTEX_JST; +// default -> MotorArrangementValue.Disabled; +// }; +// driveConfig.MotorOutput.NeutralMode = NeutralModeValue.Brake; +// driveConfig.Slot0 = constants.DriveMotorGains; +// driveConfig.ExternalFeedback.SensorToMechanismRatio = constants.DriveMotorGearRatio; +// driveConfig.CurrentLimits.StatorCurrentLimit = DrivebaseConstants.kSlipCurrent; +// driveConfig.CurrentLimits.StatorCurrentLimitEnable = true; +// // Build the OpenLoopRampsConfigs and ClosedLoopRampsConfigs for current smoothing +// OpenLoopRampsConfigs openRamps = new OpenLoopRampsConfigs(); +// openRamps.DutyCycleOpenLoopRampPeriod = DrivebaseConstants.kDriveOpenLoopRampPeriod; +// openRamps.VoltageOpenLoopRampPeriod = DrivebaseConstants.kDriveOpenLoopRampPeriod; +// openRamps.TorqueOpenLoopRampPeriod = DrivebaseConstants.kDriveOpenLoopRampPeriod; +// ClosedLoopRampsConfigs closedRamps = new ClosedLoopRampsConfigs(); +// closedRamps.DutyCycleClosedLoopRampPeriod = DrivebaseConstants.kDriveClosedLoopRampPeriod; +// closedRamps.VoltageClosedLoopRampPeriod = DrivebaseConstants.kDriveClosedLoopRampPeriod; +// closedRamps.TorqueClosedLoopRampPeriod = DrivebaseConstants.kDriveClosedLoopRampPeriod; +// // Apply the open- and closed-loop ramp configuration for current smoothing +// driveConfig.withClosedLoopRamps(closedRamps).withOpenLoopRamps(openRamps); +// // Set motor inversions +// driveConfig.MotorOutput.Inverted = +// constants.DriveMotorInverted +// ? InvertedValue.Clockwise_Positive +// : InvertedValue.CounterClockwise_Positive; +// // Apply everything to the motor controllers +// PhoenixUtil.tryUntilOk(5, () -> driveTalon.getConfigurator().apply(driveConfig, 0.25)); +// PhoenixUtil.tryUntilOk(5, () -> driveTalon.setPosition(0.0, 0.25)); + +// // Configure turn motor +// turnConfig.MotorOutput.NeutralMode = NeutralModeValue.Brake; +// turnConfig.Slot0 = +// new Slot0Configs() +// .withKP(DrivebaseConstants.kSteerP) +// .withKI(0.0) +// .withKD(DrivebaseConstants.kSteerD) +// .withKS(DrivebaseConstants.kSteerS) +// .withKV(0.0) +// .withKA(0.0) +// .withStaticFeedforwardSign(StaticFeedforwardSignValue.UseClosedLoopSign); +// turnConfig.Commutation.MotorArrangement = +// switch (constants.SteerMotorType) { +// case TalonFXS_Minion_JST -> MotorArrangementValue.Minion_JST; +// case TalonFXS_NEO_JST -> MotorArrangementValue.NEO_JST; +// case TalonFXS_VORTEX_JST -> MotorArrangementValue.VORTEX_JST; +// case TalonFXS_NEO550_JST -> MotorArrangementValue.NEO550_JST; +// case TalonFXS_Brushed_AB, TalonFXS_Brushed_AC, TalonFXS_Brushed_BC -> +// MotorArrangementValue.Brushed_DC; +// default -> MotorArrangementValue.Disabled; +// }; +// turnConfig.Commutation.BrushedMotorWiring = +// switch (constants.SteerMotorType) { +// case TalonFXS_Brushed_AC -> BrushedMotorWiringValue.Leads_A_and_C; +// case TalonFXS_Brushed_BC -> BrushedMotorWiringValue.Leads_B_and_C; +// default -> BrushedMotorWiringValue.Leads_A_and_B; +// }; +// turnConfig.MotorOutput.NeutralMode = NeutralModeValue.Brake; +// turnConfig.Slot0 = constants.SteerMotorGains; +// turnConfig.ExternalFeedback.FeedbackRemoteSensorID = constants.EncoderId; +// turnConfig.ExternalFeedback.ExternalFeedbackSensorSource = +// switch (constants.FeedbackSource) { +// case RemoteCANdiPWM1 -> ExternalFeedbackSensorSourceValue.RemoteCANdiPWM1; +// case FusedCANdiPWM1 -> ExternalFeedbackSensorSourceValue.FusedCANdiPWM1; +// case SyncCANdiPWM1 -> ExternalFeedbackSensorSourceValue.SyncCANdiPWM1; +// default -> +// throw new RuntimeException( +// "You have selected a turn feedback source that is not supported by the default +// implementation of ModuleIOTalonFXS (CANdi PWM 1). Please check the AdvantageKit documentation for +// more information on alternative configurations: +// https://docs.advantagekit.org/getting-started/template-projects/talonfx-swerve-template#custom-module-implementations"); +// }; +// turnConfig.ExternalFeedback.RotorToSensorRatio = constants.SteerMotorGearRatio; +// turnConfig.MotionMagic.MotionMagicCruiseVelocity = 100.0 / constants.SteerMotorGearRatio; +// turnConfig.MotionMagic.MotionMagicAcceleration = +// turnConfig.MotionMagic.MotionMagicCruiseVelocity / 0.100; +// turnConfig.MotionMagic.MotionMagicExpo_kV = 0.12 * constants.SteerMotorGearRatio; +// turnConfig.MotionMagic.MotionMagicExpo_kA = 0.1; +// turnConfig.ClosedLoopGeneral.ContinuousWrap = true; +// turnConfig.MotorOutput.Inverted = +// constants.SteerMotorInverted +// ? InvertedValue.Clockwise_Positive +// : InvertedValue.CounterClockwise_Positive; +// PhoenixUtil.tryUntilOk(5, () -> turnTalon.getConfigurator().apply(turnConfig, 0.25)); + +// // Configure CANdi +// CANdiConfiguration candiConfig = constants.EncoderInitialConfigs; +// candiConfig.PWM1.AbsoluteSensorOffset = constants.EncoderOffset; +// candiConfig.PWM1.SensorDirection = constants.EncoderInverted; +// candi.getConfigurator().apply(candiConfig); + +// // Create timestamp queue +// timestampQueue = PhoenixOdometryThread.getInstance().makeTimestampQueue(); + +// // Create drive status signals +// drivePosition = driveTalon.getPosition(); +// drivePositionOdom = drivePosition.clone(); // NEW +// drivePositionQueue = PhoenixOdometryThread.getInstance().registerSignal(drivePositionOdom); + +// driveVelocity = driveTalon.getVelocity(); +// driveAppliedVolts = driveTalon.getMotorVoltage(); +// driveCurrent = driveTalon.getStatorCurrent(); + +// // Create turn status signals +// turnPosition = turnTalon.getPosition(); +// turnPositionOdom = turnPosition.clone(); // NEW +// turnPositionQueue = PhoenixOdometryThread.getInstance().registerSignal(turnPositionOdom); + +// turnAbsolutePosition = candi.getPWM1Position(); +// turnVelocity = turnTalon.getVelocity(); +// turnAppliedVolts = turnTalon.getMotorVoltage(); +// turnCurrent = turnTalon.getStatorCurrent(); + +// // Configure periodic frames (IMPORTANT: apply odometry rate to the *odom clones*) +// BaseStatusSignal.setUpdateFrequencyForAll( +// SwerveConstants.kOdometryFrequency, drivePositionOdom, turnPositionOdom); +// BaseStatusSignal.setUpdateFrequencyForAll( +// 50.0, +// drivePosition, +// turnPosition, +// driveVelocity, +// driveAppliedVolts, +// driveCurrent, +// turnAbsolutePosition, +// turnVelocity, +// turnAppliedVolts, +// turnCurrent); + +// ParentDevice.optimizeBusUtilizationForAll(driveTalon, turnTalon); +// } + +// @Override +// public void updateInputs(ModuleIOInputs inputs) { +// // Refresh Phoenix signals +// var driveStatus = +// BaseStatusSignal.refreshAll(drivePosition, driveVelocity, driveAppliedVolts, +// driveCurrent); +// var turnStatus = +// BaseStatusSignal.refreshAll(turnPosition, turnVelocity, turnAppliedVolts, turnCurrent); +// var encStatus = BaseStatusSignal.refreshAll(turnAbsolutePosition); + +// // Optional: status logging +// if (!driveStatus.isOK()) { +// Logger.recordOutput("CAN/Module" + module + "/DriveRefreshStatus", driveStatus.toString()); +// } +// if (!turnStatus.isOK()) { +// Logger.recordOutput("CAN/Module" + module + "/TurnRefreshStatus", turnStatus.toString()); +// } +// if (!encStatus.isOK()) { +// Logger.recordOutput("CAN/Module" + module + "/EncRefreshStatus", encStatus.toString()); +// } + +// // Connectivity flags +// inputs.driveConnected = driveConnectedDebounce.calculate(driveStatus.isOK()); +// inputs.turnConnected = turnConnectedDebounce.calculate(turnStatus.isOK()); +// inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(encStatus.isOK()); + +// // Drive inputs +// inputs.drivePositionRad = Units.rotationsToRadians(drivePosition.getValueAsDouble()); +// inputs.driveVelocityRadPerSec = Units.rotationsToRadians(driveVelocity.getValueAsDouble()); +// inputs.driveAppliedVolts = driveAppliedVolts.getValueAsDouble(); +// inputs.driveCurrentAmps = driveCurrent.getValueAsDouble(); + +// // Turn inputs +// inputs.turnAbsolutePosition = +// Rotation2d.fromRotations(turnAbsolutePosition.getValueAsDouble()); +// inputs.turnPosition = Rotation2d.fromRotations(turnPosition.getValueAsDouble()); +// inputs.turnVelocityRadPerSec = Units.rotationsToRadians(turnVelocity.getValueAsDouble()); +// inputs.turnAppliedVolts = turnAppliedVolts.getValueAsDouble(); +// inputs.turnCurrentAmps = turnCurrent.getValueAsDouble(); + +// // Odometry queue drain (common prefix only) +// final int tsCount = timestampQueue.size(); +// final int driveCount = drivePositionQueue.size(); +// final int turnCount = turnPositionQueue.size(); +// final int sampleCount = Math.min(tsCount, Math.min(driveCount, turnCount)); + +// if (sampleCount <= 0) { +// inputs.odometryTimestamps = new double[0]; +// inputs.odometryDrivePositionsRad = new double[0]; +// inputs.odometryTurnPositions = new Rotation2d[0]; +// return; +// } + +// final double[] outTs = new double[sampleCount]; +// final double[] outDriveRad = new double[sampleCount]; +// final Rotation2d[] outTurn = new Rotation2d[sampleCount]; + +// for (int i = 0; i < sampleCount; i++) { +// final Double t = timestampQueue.poll(); +// final Double driveRot = drivePositionQueue.poll(); +// final Double turnRot = turnPositionQueue.poll(); + +// if (t == null || driveRot == null || turnRot == null) { +// inputs.odometryTimestamps = Arrays.copyOf(outTs, i); +// inputs.odometryDrivePositionsRad = Arrays.copyOf(outDriveRad, i); +// inputs.odometryTurnPositions = Arrays.copyOf(outTurn, i); +// return; +// } + +// outTs[i] = t.doubleValue(); +// outDriveRad[i] = Units.rotationsToRadians(driveRot.doubleValue()); +// outTurn[i] = Rotation2d.fromRotations(turnRot.doubleValue()); +// } + +// inputs.odometryTimestamps = outTs; +// inputs.odometryDrivePositionsRad = outDriveRad; +// inputs.odometryTurnPositions = outTurn; +// } + +// /** Module Action Functions ********************************************** */ +// /** +// * Set the drive motor to an open-loop voltage, scaled to battery voltage +// * +// * @param output Specified open-loop voltage requested +// */ +// @Override +// public void setDriveOpenLoop(double output) { +// // Scale by actual battery voltage to keep full output consistent +// double busVoltage = RobotController.getBatteryVoltage(); +// double scaledOutput = output * DrivebaseConstants.kOptimalVoltage / busVoltage; + +// driveTalon.setControl( +// switch (m_DriveMotorClosedLoopOutput) { +// case Voltage -> voltageRequest.withOutput(scaledOutput); +// case TorqueCurrentFOC -> torqueCurrentRequest.withOutput(scaledOutput); +// }); + +// // Log output and battery +// Logger.recordOutput("Swerve/Drive/OpenLoopOutput", scaledOutput); +// Logger.recordOutput("Swerve/BatteryVoltage", busVoltage); +// } + +// /** +// * Set the turn motor to an open-loop voltage, scaled to battery voltage +// * +// * @param output Specified open-loop voltage requested +// */ +// @Override +// public void setTurnOpenLoop(double output) { +// double busVoltage = RobotController.getBatteryVoltage(); +// double scaledOutput = output * DrivebaseConstants.kOptimalVoltage / busVoltage; + +// turnTalon.setControl( +// switch (m_SteerMotorClosedLoopOutput) { +// case Voltage -> voltageRequest.withOutput(scaledOutput); +// case TorqueCurrentFOC -> torqueCurrentRequest.withOutput(scaledOutput); +// }); + +// // Log output and battery +// Logger.recordOutput("Swerve/Turn/OpenLoopOutput", scaledOutput); +// Logger.recordOutput("Swerve/BatteryVoltage", busVoltage); +// } + +// /** +// * Set the velocity of the module +// * +// * @param velocityRadPerSec Requested module drive velocity in radians per second +// */ +// @Override +// public void setDriveVelocity(double velocityRadPerSec) { +// double velocityRotPerSec = Units.radiansToRotations(velocityRadPerSec); +// double busVoltage = RobotController.getBatteryVoltage(); + +// // Compute the Feedforward voltage for CTRE UNLICENSED operation ***** +// // Compute acceleration +// long currentTimeNano = System.nanoTime(); +// double deltaTimeSec = (currentTimeNano - lastTimestampNano) * 1e-9; +// double accelerationRotPerSec2 = +// deltaTimeSec > 0 ? (velocityRotPerSec - lastVelocityRotPerSec) / deltaTimeSec : 0.0; +// // Update last values for next loop +// lastVelocityRotPerSec = velocityRotPerSec; +// lastTimestampNano = currentTimeNano; +// // Compute feedforward voltage: kS + kV*v + kA*a +// double nominalFFVolts = +// Math.signum(velocityRotPerSec) * DrivebaseConstants.kDriveS +// + DrivebaseConstants.kDriveV * velocityRotPerSec +// + DrivebaseConstants.kDriveA * accelerationRotPerSec2; +// double scaledFFVolts = nominalFFVolts * DrivebaseConstants.kOptimalVoltage / busVoltage; + +// // Set the drive motor control based on CTRE LICENSED status +// driveTalon.setControl( +// switch (m_DriveMotorClosedLoopOutput) { +// case Voltage -> +// +// velocityVoltageRequest.withVelocity(velocityRotPerSec).withFeedForward(scaledFFVolts); +// case TorqueCurrentFOC -> +// +// velocityTorqueCurrentRequest.withVelocity(RotationsPerSecond.of(velocityRotPerSec)); +// }); + +// // AdvantageKit logging +// Logger.recordOutput("Swerve/Drive/VelocityRadPerSec", velocityRadPerSec); +// Logger.recordOutput("Swerve/Drive/VelocityRotPerSec", velocityRotPerSec); +// Logger.recordOutput("Swerve/Drive/AccelerationRotPerSec2", accelerationRotPerSec2); +// Logger.recordOutput("Swerve/Drive/FeedForwardVolts", scaledFFVolts); +// Logger.recordOutput("Swerve/BatteryVoltage", busVoltage); +// Logger.recordOutput("Swerve/Drive/ClosedLoopMode", m_DriveMotorClosedLoopOutput); +// } + +// /** +// * Set the turn position of the module +// * +// * @param rotation Requested module Rotation2d position +// */ +// @Override +// public void setTurnPosition(Rotation2d rotation) { +// double busVoltage = RobotController.getBatteryVoltage(); + +// // Scale feedforward voltage by battery voltage +// double nominalFFVolts = DrivebaseConstants.kNominalFFVolts; +// double scaledFFVolts = nominalFFVolts * DrivebaseConstants.kOptimalVoltage / busVoltage; + +// turnTalon.setControl( +// switch (m_SteerMotorClosedLoopOutput) { +// case Voltage -> +// positionVoltageRequest +// .withPosition(rotation.getRotations()) +// .withFeedForward(scaledFFVolts); +// case TorqueCurrentFOC -> +// positionTorqueCurrentRequest.withPosition(rotation.getRotations()); +// }); + +// Logger.recordOutput("Swerve/Turn/TargetRotations", rotation.getRotations()); +// Logger.recordOutput("Swerve/BatteryVoltage", busVoltage); +// } + +// @Override +// public void setDrivePID(double kP, double kI, double kD) { +// driveConfig.Slot0.kP = kP; +// driveConfig.Slot0.kI = kI; +// driveConfig.Slot0.kD = kD; +// PhoenixUtil.tryUntilOk(5, () -> driveTalon.getConfigurator().apply(driveConfig, 0.25)); +// } + +// @Override +// public void setTurnPID(double kP, double kI, double kD) { +// turnConfig.Slot0.kP = kP; +// turnConfig.Slot0.kI = kI; +// turnConfig.Slot0.kD = kD; +// PhoenixUtil.tryUntilOk(5, () -> turnTalon.getConfigurator().apply(turnConfig, 0.25)); +// } +// } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index ea4930f1..48e3bba7 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -135,12 +135,6 @@ public void rbsiPeriodic() { boolean hasFusedThisLoop = false; boolean hasSmoothedThisLoop = false; - // Default debug outputs (so keys exist even if we return early) - double dbgAlignDt = Double.NaN; - double dbgDeltaTranslation = Double.NaN; - double dbgDeltaRotation = Double.NaN; - boolean dbgAlignFinite = false; - try { lastAlignDbg.reset(); @@ -278,13 +272,13 @@ public void rbsiPeriodic() { lastFusedValid = true; lastSmoothedValid = true; - Logger.recordOutput("OdometryReplay/PreInjectRobotX", drive.poseEstimatorGetPose().getX()); - Logger.recordOutput("OdometryReplay/PreInjectRobotY", drive.poseEstimatorGetPose().getY()); + Logger.recordOutput("OdometryReplay/PreInjectRobotX", drive.getPose().getX()); + Logger.recordOutput("OdometryReplay/PreInjectRobotY", drive.getPose().getY()); consumer.accept(smoothed); - Logger.recordOutput("OdometryReplay/PostInjectRobotX", drive.poseEstimatorGetPose().getX()); - Logger.recordOutput("OdometryReplay/PostInjectRobotY", drive.poseEstimatorGetPose().getY()); + Logger.recordOutput("OdometryReplay/PostInjectRobotX", drive.getPose().getX()); + Logger.recordOutput("OdometryReplay/PostInjectRobotY", drive.getPose().getY()); // If you want, you can feed debug values from inside timeAlignPose(...) via fields, // but leaving the plumbing as-is since you’re already logging inside helpers. @@ -561,74 +555,13 @@ void set(double dt, Transform2d tf) { private TimedPose fuseAtTime(ArrayList estimates, double tFusion) { final ArrayList aligned = new ArrayList<>(estimates.size()); for (var e : estimates) { - Pose2d alignedPose = timeAlignPoseFieldDelta(e.pose(), e.timestampSeconds(), tFusion); + Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tFusion); if (alignedPose == null) return null; aligned.add(new TimedPose(alignedPose, tFusion, e.stdDevs())); } return inverseVarianceFuse(aligned, tFusion); } - /** - * Align a pose to where it would have been at the fusion time - * - *

Gets the odometric poses at ts and tFusion from the drivebase PoseEstimator, computes the - * transform between them, and applies that to the vision pose. The correction is applied by - * finding the field-frame deltas for both translation and rotation, then returning a new Pose2d - * object that consists of the vision pose adjusted by the field-frame deltas. - * - * @param visionPoseAtTs The pose at ts - * @param ts Timestamp of the pose - * @param tFusion Fusion timestamp - * @return Transformed Pose2d - */ - private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tFusion) { - - Logger.recordOutput("Vision/Debug/ts", ts); - Logger.recordOutput("Vision/Debug/tFusion", tFusion); - Logger.recordOutput("Vision/Debug/alignDtMs", (tFusion - ts) * 1000.0); - - double dt = tFusion - ts; - - Optional odomAtTsOpt = drive.getPoseAtTime(ts); - Optional odomAtTFOpt = drive.getPoseAtTime(tFusion); - // If empty, return null - if (odomAtTsOpt.isEmpty() || odomAtTFOpt.isEmpty()) return null; - - // Transform that takes odomAtTs -> odomAtTF (in odomAtTs frame) - Transform2d ts_T_tf = odomAtTFOpt.get().minus(odomAtTsOpt.get()); - - double dtrans = ts_T_tf.getTranslation().getNorm(); - double drot = ts_T_tf.getRotation().getRadians(); - - boolean finite = - Double.isFinite(dt) - && Double.isFinite(dtrans) - && Double.isFinite(drot) - && Double.isFinite(odomAtTsOpt.get().getX()) - && Double.isFinite(odomAtTFOpt.get().getX()); - - // Even more debugging logging - Logger.recordOutput("Vision/Debug/alignDt", dt); - Logger.recordOutput("Vision/Debug/deltaTranslation", dtrans); - Logger.recordOutput("Vision/Debug/deltaRotation", drot); - Logger.recordOutput("Vision/Debug/alignFinite", finite); - Logger.recordOutput("Vision/Debug/odomAtTs", odomAtTsOpt.get()); - Logger.recordOutput("Vision/Debug/odomAtTF", odomAtTFOpt.get()); - - if (!finite) { - Logger.recordOutput("Vision/Debug/odomAtTs", odomAtTsOpt.get()); - Logger.recordOutput("Vision/Debug/odomAtTF", odomAtTFOpt.get()); - return null; - } - - // Debugging Logging - Logger.recordOutput("Vision/Debug/deltaTranslation", ts_T_tf.getTranslation().getNorm()); - Logger.recordOutput("Vision/Debug/deltaRotation", ts_T_tf.getRotation().getRadians()); - - // Apply the same SE(2) transform to the vision pose - return visionPoseAtTs.transformBy(ts_T_tf); - } - /** * Align a pose to where it would have been at the fusion time * @@ -649,7 +582,7 @@ private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tFusion) { * @param tFusion Fusion timestamp * @return Transformed Pose2d */ - private Pose2d timeAlignPoseFieldDelta(Pose2d visionPoseAtTs, double ts, double tFusion) { + private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tFusion) { Optional odomAtTsOpt = drive.getPoseAtTime(ts); Optional odomAtTFOpt = drive.getPoseAtTime(tFusion); if (odomAtTsOpt.isEmpty() || odomAtTFOpt.isEmpty()) return null; @@ -777,7 +710,7 @@ private TimedPose smoothAtTime(double tFusion) { final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); for (var e : fusedBuffer) { - Pose2d alignedPose = timeAlignPoseFieldDelta(e.pose(), e.timestampSeconds(), tFusion); + Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tFusion); if (alignedPose == null) continue; aligned.add(new TimedPose(alignedPose, tFusion, e.stdDevs())); // Debugging Logging diff --git a/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java index 3db0b253..a2e55e61 100644 --- a/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java +++ b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java @@ -1,4 +1,4 @@ -// Copyright (c) 2025 Az-FIRST +// Copyright (c) 2026 Az-FIRST // http://github.com/AZ-First // Copyright (c) 2024 FRC 254 // https://github.com/team254 @@ -27,7 +27,6 @@ import java.util.OptionalDouble; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; -import org.littletonrobotics.junction.Logger; /** * A concurrent version of WPIlib's TimeInterpolatableBuffer class to avoid the need for explicit @@ -90,9 +89,6 @@ public static ConcurrentTimeInterpolatableBuffer createDoubleBuffer( * @param sample The sample object. */ public void addSample(double timeSeconds, T sample) { - Logger.recordOutput("Odometry/Debug/lastPoseBufferAddTs", timeSeconds); - Logger.recordOutput("Odometry/Debug/nowTs", TimeUtil.now()); - m_pastSnapshots.put(timeSeconds, sample); cleanUp(timeSeconds); } @@ -135,7 +131,7 @@ public Optional getSample(double timeSeconds) { if (topBound == null) return Optional.of(bottomBound.getValue()); if (bottomBound == null) return Optional.of(topBound.getValue()); - // NEW: if they are the same sample, no interpolation possible/needed + // If they are the same sample, no interpolation possible/needed if (topBound.getKey().doubleValue() == bottomBound.getKey().doubleValue()) { return Optional.of(bottomBound.getValue()); } @@ -144,17 +140,12 @@ public Optional getSample(double timeSeconds) { double t1 = topBound.getKey(); double denom = t1 - t0; - // (optional but good) + // If the samples are so close together as to be indistinguishable, they are the same if (Math.abs(denom) < 1e-9) return Optional.of(bottomBound.getValue()); double ratio = (timeSeconds - t0) / denom; ratio = MathUtil.clamp(ratio, 0.0, 1.0); - Logger.recordOutput("Odometry/Debug/bottomKey", t0); - Logger.recordOutput("Odometry/Debug/topKey", t1); - Logger.recordOutput("Odometry/Debug/denom", denom); - Logger.recordOutput("Odometry/Debug/snaphotSize", m_pastSnapshots.size()); - return Optional.of( m_interpolatingFunc.interpolate(bottomBound.getValue(), topBound.getValue(), ratio)); } diff --git a/src/main/java/frc/robot/util/RBSICANBusRegistry.java b/src/main/java/frc/robot/util/RBSICANBusRegistry.java index ea3ef08e..4619ec7f 100644 --- a/src/main/java/frc/robot/util/RBSICANBusRegistry.java +++ b/src/main/java/frc/robot/util/RBSICANBusRegistry.java @@ -18,13 +18,18 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -/** Centralized CAN bus singleton registry + SIM/REAL indirection. */ +/** Centralized CAN bus singleton registry */ public final class RBSICANBusRegistry { private static final Map realBuses = new ConcurrentHashMap<>(); private static final Map likeBuses = new ConcurrentHashMap<>(); private static volatile boolean initialized = false; private static volatile boolean sim = false; + /** + * Initialize the REAL CANBusses + * + * @param busNames The list of bus names to initialize + */ public static void initReal(String... busNames) { sim = false; for (String name : busNames) { @@ -34,6 +39,11 @@ public static void initReal(String... busNames) { initialized = true; } + /** + * Initialize the SIM CANBusses + * + * @param busNames The list of bus names to initialize + */ public static void initSim(String... busNames) { sim = true; for (String name : busNames) { @@ -42,7 +52,12 @@ public static void initSim(String... busNames) { initialized = true; } - /** For Phoenix device constructors (REAL/REPLAY only). */ + /** + * Get the CANBus for Phoenix device constructors + * + * @param name Name of the CAN bus to get + * @return CANBus object corresponding to the name + */ public static CANBus getBus(String name) { checkInit(); if (sim) { @@ -53,7 +68,12 @@ public static CANBus getBus(String name) { return bus; } - /** For health logging (REAL or SIM). */ + /** + * Get a CANBus-like object for health logging + * + * @param name Name of the bus to health log + * @return CANBusLike object + */ public static CANBusLike getLike(String name) { checkInit(); CANBusLike bus = likeBuses.get(name); @@ -61,52 +81,63 @@ public static CANBusLike getLike(String name) { return bus; } + /** Check that the Registry is initialized */ private static void checkInit() { if (!initialized) throw new IllegalStateException("RBSICANBusRegistry not initialized."); } + /** Throw exception if the CAN bus name is not in the Registry */ private static void throwUnknown(String name, Set known) { throw new IllegalArgumentException("Unknown CAN bus '" + name + "'. Known: " + known); } - // ---- nested types ---- + /** Nested types for Registry Function *********************************** */ + /** CANBusLike interface */ public interface CANBusLike { String getName(); CANBus.CANBusStatus getStatus(); } + /** Real CAN Bus Adapter */ static final class RealCANBusAdapter implements CANBusLike { private final CANBus bus; + /** Constructor */ RealCANBusAdapter(CANBus bus) { this.bus = bus; } + /** Get the name of this CANBus instance */ @Override public String getName() { return bus.getName(); } + /** Get the status of this CANBus instance */ @Override public CANBus.CANBusStatus getStatus() { return bus.getStatus(); } } + /** Simulated CAN Bus Stub */ static final class SimCANBusStub implements CANBusLike { private final String name; + /** Constructor */ SimCANBusStub(String name) { this.name = name; } + /** Get the name of this simulated CANBus */ @Override public String getName() { return name; } + /** Get the status of this simulated CANBus */ @Override public CANBus.CANBusStatus getStatus() { return new CANBus.CANBusStatus(); diff --git a/src/main/java/frc/robot/util/RBSICANHealth.java b/src/main/java/frc/robot/util/RBSICANHealth.java index df7a185a..8959020f 100644 --- a/src/main/java/frc/robot/util/RBSICANHealth.java +++ b/src/main/java/frc/robot/util/RBSICANHealth.java @@ -15,17 +15,27 @@ import org.littletonrobotics.junction.Logger; +/** + * Virtual subsystem to monitor CAN health + * + *

Instantiate one of these subsystems for each CAN bus on the robot in the RobotContainer. + */ public class RBSICANHealth extends VirtualSubsystem { private long loops = 0; private final RBSICANBusRegistry.CANBusLike bus; + /** Constructur */ public RBSICANHealth(String busName) { bus = RBSICANBusRegistry.getLike(busName); } + /** Periodic function */ @Override protected void rbsiPeriodic() { + // Only log CAN health every 5 loops if ((loops++ % 5) != 0) return; + + // Get status and log var status = bus.getStatus(); Logger.recordOutput("CAN/" + bus.getName() + "/Utilization", status.BusUtilization); Logger.recordOutput("CAN/" + bus.getName() + "/TxFullCount", status.TxFullCount); diff --git a/src/main/java/frc/robot/util/TimeUtil.java b/src/main/java/frc/robot/util/TimeUtil.java index 3b8e7aad..62f3cb39 100644 --- a/src/main/java/frc/robot/util/TimeUtil.java +++ b/src/main/java/frc/robot/util/TimeUtil.java @@ -15,6 +15,7 @@ import org.littletonrobotics.junction.Logger; +/** Time utility that works for REAL, SIM, and REPLAY */ public final class TimeUtil { private TimeUtil() {} From 621ae9b73320e0d7ba90456f39957212c41a8b88 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Tue, 17 Feb 2026 18:03:59 -0700 Subject: [PATCH 18/19] Cleaning up the pose buffering more Next step: clean up the logging to remove extraneous debug statements. --- .../frc/robot/subsystems/drive/Drive.java | 81 ++++++++++--------- .../frc/robot/subsystems/vision/Vision.java | 65 +++++---------- 2 files changed, 67 insertions(+), 79 deletions(-) diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 604d876c..c6e36a5a 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -92,10 +92,17 @@ public class Drive extends RBSISubsystem { new Alert("Disconnected gyro, using kinematics as fallback.", AlertType.kError); // Declare odometry and pose-related variables + // This one is package-private; used in DriveOdometry, PhoenixOdometryThread, and + // SparkOdometryThread static final Lock odometryLock = new ReentrantLock(); private SwerveDriveKinematics kinematics = new SwerveDriveKinematics(getModuleTranslations()); - private SwerveModulePosition[] lastModulePositions = - new SwerveModulePosition[4]; // For delta tracking + private SwerveModulePosition[] lastModulePositions = // For delta tracking + new SwerveModulePosition[] { + new SwerveModulePosition(), + new SwerveModulePosition(), + new SwerveModulePosition(), + new SwerveModulePosition() + }; private SwerveDrivePoseEstimator m_PoseEstimator = new SwerveDrivePoseEstimator(kinematics, Rotation2d.kZero, lastModulePositions, Pose2d.kZero); @@ -421,7 +428,7 @@ public void updateDisabledCoastState( final double minCoastTime = 0.25; // seconds -- maybe put into Constants??? final boolean pastMin = (now - disabledCoastStartTs) >= minCoastTime; - // Detect ENABLED -> DISABLED edge + // Detect ENABLED -> DISABLED edge -- set `disabledCoastUntilTs` when COAST-phase ends if (lastEnabled && !enabledNow) { disabledCoastStartTs = now; disabledCoastUntilTs = now + DrivebaseConstants.kDisabledCoastSeconds; @@ -672,9 +679,9 @@ public boolean isDisabledCoast() { return isDisabledCoast(TimeUtil.now()); } - /** Returns whether the robot was in the DISABLED_COAST state at time `now` */ - public boolean isDisabledCoast(double now) { - return DriverStation.isDisabled() && (now < disabledCoastUntilTs); + /** Returns whether the robot was in the DISABLED_COAST state at time `timestamp` */ + public boolean isDisabledCoast(double timestamp) { + return DriverStation.isDisabled() && (timestamp < disabledCoastUntilTs); } /** Returns the disabledCoastStartTs variable */ @@ -737,7 +744,7 @@ public void zeroHeading() { */ // Called by Vision via consumer.accept(TimedPose) public void addVisionMeasurement(TimedPose meas) { - Drive.odometryLock.lock(); + odometryLock.lock(); try { // Always use measurement timestamp when fusing (enabled path) final double t = meas.timestampSeconds(); @@ -751,49 +758,48 @@ public void addVisionMeasurement(TimedPose meas) { return; } - // DISABLED: - final boolean coast = isDisabledCoast(t); // your overload (timebase-consistent) + // DISABLED -- check if within "coast phase" + final boolean coast = isDisabledCoast(t); - // Optional: ignore vision briefly right after ENABLE->DISABLE (prevents “phase mismatch” at - // disable edge) + // If coasting, if (coast) { final double coastAge = t - getDisabledCoastStartTs(); - Logger.recordOutput("Vision/Dbg/disabledCoastAge", coastAge); + Logger.recordOutput("Vision/Debug/disabledCoastAge", coastAge); + // Ignore vision briefly right after ENABLE->DISABLE (prevents “phase mismatch” at disable + // edge) if (coastAge >= 0.0 && coastAge < DrivebaseConstants.kDisabledVisionIgnoreAfterDisableSec) { - Logger.recordOutput("Vision/Dbg/disabledIgnoreEarlyCoast", true); + Logger.recordOutput("Vision/Debug/disabledIgnoreEarlyCoast", true); return; } } - Logger.recordOutput("Vision/Dbg/disabledIgnoreEarlyCoast", false); + Logger.recordOutput("Vision/Debug/disabledIgnoreEarlyCoast", false); - // If we're coasting, we *avoid* init snap and we lean gentler than stationary. - // (still use the same gating so one bad frame doesn't wreck you) + // If we're coasting, avoid snapping Pose to Vision; lean gentler than stationary. final double alpha = coast - ? Math.min(DrivebaseConstants.kDisabledVisionBlendAlpha, 0.05) // gentle during coast + ? Math.min(DrivebaseConstants.kDisabledVisionBlendAlpha, 0.05) : DrivebaseConstants.kDisabledVisionBlendAlpha; // "Current" for blending target (estimator pose) final Pose2d current = m_PoseEstimator.getEstimatedPosition(); // Debug - Logger.recordOutput("Vision/Dbg/disabledCoast", coast); - Logger.recordOutput("Vision/Dbg/disabledVisionInitialized", disabledVisionInitialized); - Logger.recordOutput("Vision/Dbg/disabledVisionTs", t); + Logger.recordOutput("Vision/Debug/disabledCoast", coast); + Logger.recordOutput("Vision/Debug/disabledVisionInitialized", disabledVisionInitialized); + Logger.recordOutput("Vision/Debug/disabledVisionTs", t); Logger.recordOutput( - "Vision/Dbg/disabledVisionAge", + "Vision/Debug/disabledVisionAge", Double.isFinite(lastDisabledVisionTs) ? (t - lastDisabledVisionTs) : Double.NaN); - // Stale logic (looser = LONGER timeout) + // Check if the last while-disabled vision timestamp is stale (too old) final boolean stale = Double.isFinite(lastDisabledVisionTs) && (t - lastDisabledVisionTs) > DrivebaseConstants.kDisabledVisionStale; - Logger.recordOutput("Vision/Dbg/visionStale", stale); + Logger.recordOutput("Vision/Debug/visionStale", stale); - // If we're in coast, we intentionally *don't* init-snap. - // We also reset initialization so that once coast ends, the first good stationary frame - // snaps. + // If coasting, intentionally DO NOT snap; reset initialization so that once coast ends, the + // first good stationary frame snaps. if (coast) { disabledVisionInitialized = false; } @@ -815,20 +821,22 @@ public void addVisionMeasurement(TimedPose meas) { } Logger.recordOutput("Vision/DisabledInitSnap", false); - // Gate vs last accepted disabled vision pose (not estimator) + // Check that there is not a huge jump from the last accepted disabled vision pose final Pose2d gateRef = Double.isFinite(lastDisabledVisionTs) ? lastDisabledVisionPose : vision; - final double dTrans = gateRef.getTranslation().getDistance(vision.getTranslation()); - final double dRot = Math.abs(gateRef.getRotation().minus(vision.getRotation()).getRadians()); + final double deltaTranslation = gateRef.getTranslation().getDistance(vision.getTranslation()); + final double deltaRotation = + Math.abs(gateRef.getRotation().minus(vision.getRotation()).getRadians()); - Logger.recordOutput("Vision/Dbg/dTransFromLastVision", dTrans); - Logger.recordOutput("Vision/Dbg/dRotFromLastVision", dRot); + Logger.recordOutput("Vision/Debug/dTransFromLastVision", deltaTranslation); + Logger.recordOutput("Vision/Debug/dRotFromLastVision", deltaRotation); - // Reject only if NOT stale + // Reject large jumps only if vision measurement is not stale (large delta-T can mean large + // change in position) if (!stale - && (dTrans > DrivebaseConstants.kDisabledVisionMaxJumpM - || dRot > DrivebaseConstants.kDisabledVisionMaxJumpRad)) { + && (deltaTranslation > DrivebaseConstants.kDisabledVisionMaxJumpM + || deltaRotation > DrivebaseConstants.kDisabledVisionMaxJumpRad)) { Logger.recordOutput("Vision/DisabledReject", true); Logger.recordOutput("Vision/DisabledBlendAlphaUsed", alpha); return; @@ -839,9 +847,10 @@ public void addVisionMeasurement(TimedPose meas) { lastDisabledVisionPose = vision; lastDisabledVisionTs = t; - // Blend toward vision + // Blend toward vision -- gentle correction final Pose2d blended = current.interpolate(vision, alpha); + // Push values to pose estimator and pose buffer m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), blended); markPoseReset(t); poseBufferAddSample(t, blended); @@ -850,7 +859,7 @@ public void addVisionMeasurement(TimedPose meas) { Logger.recordOutput("Vision/DisabledBlendAlphaUsed", alpha); } finally { - Drive.odometryLock.unlock(); + odometryLock.unlock(); } } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 48e3bba7..fda7b4b5 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -88,7 +88,7 @@ public interface PoseMeasurementConsumer { // Variance minimum for fusing poses to prevent divide-by-zero explosions private static final double kMinVariance = 1e-12; - // Fields + // Last smoothed and fused poses -- used for debugging private Pose2d lastFusedPose = new Pose2d(); private Pose2d lastSmoothedPose = new Pose2d(); private double lastFusedTs = Double.NaN; @@ -109,7 +109,7 @@ public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { this.lastAcceptedTsPerCam = new double[io.length]; Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); - // Log robot->camera transforms if available + // Log robot->camera transforms int n = Math.min(camConfigs.length, io.length); for (int i = 0; i < n; i++) { Logger.recordOutput("Vision/RobotToCamera" + i, camConfigs[i].robotToCamera()); @@ -131,6 +131,7 @@ protected int getPeriodPriority() { @Override public void rbsiPeriodic() { + // Debugging values boolean hasAcceptedThisLoop = false; boolean hasFusedThisLoop = false; boolean hasSmoothedThisLoop = false; @@ -138,9 +139,7 @@ public void rbsiPeriodic() { try { lastAlignDbg.reset(); - // ---------------------------------------------------------------------- - // 1) Pose reset gate (clears smoothing state, resets per-cam monotonic gates) - // ---------------------------------------------------------------------- + // Pose reset gate (clears smoothing state, resets per-cam monotonic gates) long epoch = drive.getPoseResetEpoch(); if (epoch != lastSeenPoseResetEpoch) { lastSeenPoseResetEpoch = epoch; @@ -150,15 +149,13 @@ public void rbsiPeriodic() { Logger.recordOutput("Vision/PoseGateResetFromDrive", false); } - // ---------------------------------------------------------------------- - // 2) Read camera inputs (REAL/SIM/REPLAY all go through IO inputs) - // ---------------------------------------------------------------------- + // Read camera inputs for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); } - // Optional always-on “health” debug + // Always-on “health” debug -- may consider removing this Logger.recordOutput("Vision/Debug/ioLength", io.length); int totalObs = 0; for (int i = 0; i < io.length; i++) { @@ -166,13 +163,12 @@ public void rbsiPeriodic() { } Logger.recordOutput("Vision/Debug/totalObsThisLoop", totalObs); - // ---------------------------------------------------------------------- - // 3) Choose best observation per camera for THIS loop - // ---------------------------------------------------------------------- + // Choose best observation per camera for THIS loop final ArrayList perCamAccepted = new ArrayList<>(io.length); for (int cam = 0; cam < io.length; cam++) { + // Count the number of seen, accepted, and rejected poses estimates int seen = 0; int accepted = 0; int rejected = 0; @@ -184,16 +180,18 @@ public void rbsiPeriodic() { final var obsArr = inputs[cam].poseObservations; if (obsArr == null) { + // Log zeros and move along if we ain't seen nuthin' Logger.recordOutput("Vision/Camera" + cam + "/ObsSeen", 0); Logger.recordOutput("Vision/Camera" + cam + "/ObsAccepted", 0); Logger.recordOutput("Vision/Camera" + cam + "/ObsRejected", 0); continue; } + // Loop over pose observations; move along if gating or pose-construction fail for (var obs : obsArr) { seen++; - GateResult gate = passesHardGatesAndYawGate(cam, obs); + GateResult gate = passesScrutiny(cam, obs); Logger.recordOutput("Vision/Camera" + cam + "/GateFail", gate.reason); if (!gate.accepted) { rejected++; @@ -206,7 +204,7 @@ public void rbsiPeriodic() { continue; } - // Prefer “best” by your scoring function + // Compare this estimate to current "best" -- score current estimate using `isBetter()` if (best == null || isBetter(built.estimate, best)) { best = built.estimate; bestTrustScale = built.trustScale; @@ -215,11 +213,13 @@ public void rbsiPeriodic() { } } + // Accept the "best" pose, if extant if (best != null) { accepted++; lastAcceptedTsPerCam[cam] = best.timestampSeconds(); perCamAccepted.add(best); + // Log everything about the accepted pose Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose()); Logger.recordOutput( "Vision/Camera" + cam + "/InjectedTimestamp", best.timestampSeconds()); @@ -244,9 +244,8 @@ public void rbsiPeriodic() { } hasAcceptedThisLoop = true; - // ---------------------------------------------------------------------- - // 4) Fuse all accepted cams at the newest timestamp among them - // ---------------------------------------------------------------------- + // ===== + // Fuse all accepted cams at the newest timestamp among them final double tFusion = perCamAccepted.stream().mapToDouble(e -> e.timestampSeconds()).max().orElse(Double.NaN); if (!Double.isFinite(tFusion)) return; @@ -255,39 +254,25 @@ public void rbsiPeriodic() { if (fused == null) return; hasFusedThisLoop = true; - // ---------------------------------------------------------------------- - // 5) Smooth by fusing recent fused estimates aligned to tFusion - // ---------------------------------------------------------------------- + // ===== + // Smooth by fusing recent fused estimates aligned to tFusion pushFused(fused); final TimedPose smoothed = smoothAtTime(tFusion); if (smoothed == null) return; hasSmoothedThisLoop = true; - // ---------------------------------------------------------------------- - // 6) Update caches (ONLY HERE) + inject to drive - // ---------------------------------------------------------------------- + // Update caches & inject to drive lastFusedPose = fused.pose(); lastSmoothedPose = smoothed.pose(); lastFusedTs = tFusion; lastFusedValid = true; lastSmoothedValid = true; - Logger.recordOutput("OdometryReplay/PreInjectRobotX", drive.getPose().getX()); - Logger.recordOutput("OdometryReplay/PreInjectRobotY", drive.getPose().getY()); - consumer.accept(smoothed); - Logger.recordOutput("OdometryReplay/PostInjectRobotX", drive.getPose().getX()); - Logger.recordOutput("OdometryReplay/PostInjectRobotY", drive.getPose().getY()); - - // If you want, you can feed debug values from inside timeAlignPose(...) via fields, - // but leaving the plumbing as-is since you’re already logging inside helpers. - } finally { - // ---------------------------------------------------------------------- - // 7) “Ultra-clean” logging: one place, every loop, replay-safe - // ---------------------------------------------------------------------- + // Log everything on our way out of this function // Always-present “outputs” Logger.recordOutput("Vision/FusedPose", lastFusedPose); @@ -300,12 +285,6 @@ public void rbsiPeriodic() { Logger.recordOutput("Vision/HasAcceptedThisLoop", hasAcceptedThisLoop); Logger.recordOutput("Vision/HasFusedThisLoop", hasFusedThisLoop); Logger.recordOutput("Vision/HasSmoothedThisLoop", hasSmoothedThisLoop); - - // Debug keys (exist even if not touched) - Logger.recordOutput("Vision/Debug/alignDt", lastAlignDbg.alignDt); - Logger.recordOutput("Vision/Debug/deltaTranslation", lastAlignDbg.deltaTranslation); - Logger.recordOutput("Vision/Debug/deltaRotation", lastAlignDbg.deltaRotation); - Logger.recordOutput("Vision/Debug/alignFinite", lastAlignDbg.alignFinite); } } @@ -385,7 +364,7 @@ private static final class GateResult { * @param cam Camera index * @param obs PoseObservation */ - private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation obs) { + private GateResult passesScrutiny(int cam, VisionIO.PoseObservation obs) { final double ts = obs.timestamp(); // Monotonic per-camera time @@ -411,7 +390,7 @@ private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation o if (p.getY() < 0.0 || p.getY() > FieldConstants.aprilTagLayout.getFieldWidth()) return new GateResult(false, "out of bounds field Y"); - // Optional yaw gate: only meaningful for single-tag + // Yaw gate; only meaningful for single-tag if (enableSingleTagYawGate && obs.tagCount() == 1) { OptionalDouble maxYaw = drive.getMaxAbsYawRateRadPerSec(ts - yawGateLookbackSec, ts); if (maxYaw.isPresent() && maxYaw.getAsDouble() > yawGateLimitRadPerSec) { From fac0fba49cd8a35b658c7cada1f428c62d688090 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Thu, 19 Feb 2026 18:24:42 -0700 Subject: [PATCH 19/19] Clean logging --- src/main/java/frc/robot/Constants.java | 42 +++++++++---------- src/main/java/frc/robot/Robot.java | 6 +-- .../accelerometer/Accelerometer.java | 2 +- .../robot/subsystems/drive/DriveOdometry.java | 21 +--------- .../frc/robot/subsystems/imu/ImuIOSim.java | 2 +- .../java/frc/robot/util/RBSISubsystem.java | 2 +- .../java/frc/robot/util/VirtualSubsystem.java | 3 +- 7 files changed, 31 insertions(+), 47 deletions(-) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 1b556422..1f552a66 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -74,9 +74,9 @@ public final class Constants { // under strict caveat emptor -- and submit any error and bugfixes // via GitHub issues. private static SwerveType swerveType = SwerveType.PHOENIX6; // PHOENIX6, YAGSL - private static CTREPro phoenixPro = CTREPro.UNLICENSED; // LICENSED, UNLICENSED + private static CTREPro phoenixPro = CTREPro.LICENSED; // LICENSED, UNLICENSED private static AutoType autoType = AutoType.MANUAL; // MANUAL, PATHPLANNER, CHOREO - private static VisionType visionType = VisionType.NONE; // PHOTON, LIMELIGHT, NONE + private static VisionType visionType = VisionType.PHOTON; // PHOTON, LIMELIGHT, NONE /** Enumerate the robot types (name your robots here) */ public static enum RobotType { @@ -168,7 +168,7 @@ public static final class PowerConstants { /************************************************************************* */ /** List of Robot CAN Busses ********************************************* */ public static final class CANBuses { - public static final String RIO = ""; + public static final String RIO = "rio"; public static final String DRIVE = "DriveTrain"; public static final String[] ALL = {RIO, DRIVE}; @@ -487,7 +487,7 @@ public record CameraConfig( // Example Cameras are mounted in the back corners, 18" up from the floor, facing sideways public static final CameraConfig[] ALL = { new CameraConfig( - "camera_0", + "Photon_BW7", new Transform3d( Inches.of(-13.0), Inches.of(13.0), @@ -504,23 +504,23 @@ public record CameraConfig( } }), // - new CameraConfig( - "camera_1", - new Transform3d( - Inches.of(-13.0), - Inches.of(-13.0), - Inches.of(12.0), - new Rotation3d(0.0, 0.0, -Math.PI / 2)), - 1.0, - new SimCameraProperties() { - { - setCalibration(1280, 800, Rotation2d.fromDegrees(120)); - setCalibError(0.25, 0.08); - setFPS(30); - setAvgLatencyMs(20); - setLatencyStdDevMs(5); - } - }), + // new CameraConfig( + // "camera_1", + // new Transform3d( + // Inches.of(-13.0), + // Inches.of(-13.0), + // Inches.of(12.0), + // new Rotation3d(0.0, 0.0, -Math.PI / 2)), + // 1.0, + // new SimCameraProperties() { + // { + // setCalibration(1280, 800, Rotation2d.fromDegrees(120)); + // setCalibError(0.25, 0.08); + // setFPS(30); + // setAvgLatencyMs(20); + // setLatencyStdDevMs(5); + // } + // }), // ... And more, if needed }; diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index 5a1e15a3..0dbae5e0 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -147,9 +147,9 @@ public void robotPeriodic() { } final long t4 = System.nanoTime(); - Logger.recordOutput("Loop/RobotPeriodic_ms", (t4 - t0) / 1e6); - Logger.recordOutput("Loop/Virtual_ms", (t2 - t1) / 1e6); - Logger.recordOutput("Loop/Scheduler_ms", (t3 - t2) / 1e6); + Logger.recordOutput("LogPeriodic/CodeLoop/RobotPeriodicMS", (t4 - t0) / 1e6); + Logger.recordOutput("LogPeriodic/CodeLoop/VirtualMS", (t2 - t1) / 1e6); + Logger.recordOutput("LogPeriodic/CodeLoop/SchedulerMS", (t3 - t2) / 1e6); } /** This function is called once when the robot is disabled. */ diff --git a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java index 2fb0361e..e6ebe08d 100644 --- a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java +++ b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java @@ -96,7 +96,7 @@ public void rbsiPeriodic() { final double[] ts = imuInputs.odometryYawTimestamps; if (ts.length > 0) { - Logger.recordOutput("IMU/OdometryLatencySec", TimeUtil.now() - ts[ts.length - 1]); + Logger.recordOutput("Odometry/IMULatencySec", TimeUtil.now() - ts[ts.length - 1]); } } } diff --git a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java index aff93c27..7dc4925b 100644 --- a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java +++ b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java @@ -17,7 +17,6 @@ package frc.robot.subsystems.drive; -import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.kinematics.SwerveModulePosition; import edu.wpi.first.wpilibj.DriverStation; @@ -35,8 +34,6 @@ public final class DriveOdometry extends VirtualSubsystem { private final Imu imu; private final Module[] modules; - private long writeNumber = 0L; - // Per-cycle cached objects (to avoid repeated allocations) private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; @@ -64,7 +61,6 @@ protected int getPeriodPriority() { /** Periodic function to read inputs */ @Override public void rbsiPeriodic() { - Logger.recordOutput("Odometry/Debug/alive", true); Drive.odometryLock.lock(); try { @@ -118,7 +114,6 @@ public void rbsiPeriodic() { // keep pose buffer alive with the *current estimator pose* drive.poseBufferAddSample(now, drive.getPose()); - Logger.recordOutput("Drive/Pose", drive.getPose()); drive.setGyroDisconnectedAlert(!imuInputs.connected); return; } @@ -262,23 +257,11 @@ public void rbsiPeriodic() { drive.poseBufferAddSample(t, drive.getPose()); } - Logger.recordOutput("Drive/Pose", drive.getPose()); drive.setGyroDisconnectedAlert(!imuInputs.connected); } finally { - final Pose2d pose = drive.getPose(); - final double x = pose.getX(); - final double y = pose.getY(); - final double th = pose.getRotation().getRadians(); - - Logger.recordOutput("OdometryReplay/Debug/wroteRobotPose", ++writeNumber); - Logger.recordOutput("OdometryReplay/Debug/xFinite", Double.isFinite(x)); - Logger.recordOutput("OdometryReplay/Debug/yFinite", Double.isFinite(y)); - Logger.recordOutput("OdometryReplay/Debug/thFinite", Double.isFinite(th)); - - Logger.recordOutput("OdometryReplay/RobotX", x); - Logger.recordOutput("OdometryReplay/RobotY", y); - Logger.recordOutput("OdometryReplay/RobotThetaRad", th); + + Logger.recordOutput("Odometry/Robot", drive.getPose()); Drive.odometryLock.unlock(); } diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java b/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java index 102b60e7..1bf04a7d 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java @@ -94,7 +94,7 @@ public void updateInputs(ImuIOInputs inputs) { inputs.odometryYawTimestamps = tsOut; inputs.odometryYawPositionsRad = yawOut; - // Optional: SIM logging (primitive-friendly) + // SIM logging Logger.recordOutput("IMU/YawRad", yawRad); Logger.recordOutput("IMU/YawDeg", Units.radiansToDegrees(yawRad)); Logger.recordOutput("IMU/YawRateDps", Units.radiansToDegrees(yawRateRadPerSec)); diff --git a/src/main/java/frc/robot/util/RBSISubsystem.java b/src/main/java/frc/robot/util/RBSISubsystem.java index cb135207..b90752fb 100644 --- a/src/main/java/frc/robot/util/RBSISubsystem.java +++ b/src/main/java/frc/robot/util/RBSISubsystem.java @@ -39,7 +39,7 @@ public final void periodic() { long start = System.nanoTime(); rbsiPeriodic(); // Log the timing for this subsystem - Logger.recordOutput("Loop/Mech/" + name + "_ms", (System.nanoTime() - start) / 1e6); + Logger.recordOutput("LogPeriodic/Subsystem/" + name + "MS", (System.nanoTime() - start) / 1e6); } /** Subclasses must implement this instead of periodic(). */ diff --git a/src/main/java/frc/robot/util/VirtualSubsystem.java b/src/main/java/frc/robot/util/VirtualSubsystem.java index 36ece7bd..bb99b7ea 100644 --- a/src/main/java/frc/robot/util/VirtualSubsystem.java +++ b/src/main/java/frc/robot/util/VirtualSubsystem.java @@ -73,7 +73,8 @@ public final void periodic() { long start = System.nanoTime(); rbsiPeriodic(); // Log the timing for this subsystem - Logger.recordOutput("Loop/Virtual/" + name + "_ms", (System.nanoTime() - start) / 1e6); + Logger.recordOutput( + "LogPeriodic/VirtualSubsystem/" + name + "MS", (System.nanoTime() - start) / 1e6); } /** Subclasses must implement this instead of periodic(). */