001// Copyright (c) FIRST and other WPILib contributors.
002// Open Source Software; you can modify and/or share it under the terms of
003// the WPILib BSD license file in the root directory of this project.
004
005package edu.wpi.first.math.estimator;
006
007import edu.wpi.first.math.MathUtil;
008import edu.wpi.first.math.Matrix;
009import edu.wpi.first.math.Nat;
010import edu.wpi.first.math.VecBuilder;
011import edu.wpi.first.math.geometry.Pose2d;
012import edu.wpi.first.math.geometry.Rotation2d;
013import edu.wpi.first.math.geometry.Twist2d;
014import edu.wpi.first.math.interpolation.Interpolatable;
015import edu.wpi.first.math.interpolation.TimeInterpolatableBuffer;
016import edu.wpi.first.math.kinematics.SwerveDriveKinematics;
017import edu.wpi.first.math.kinematics.SwerveDriveOdometry;
018import edu.wpi.first.math.kinematics.SwerveModulePosition;
019import edu.wpi.first.math.numbers.N1;
020import edu.wpi.first.math.numbers.N3;
021import edu.wpi.first.util.WPIUtilJNI;
022import java.util.Map;
023import java.util.Objects;
024
025/**
026 * This class wraps {@link SwerveDriveOdometry Swerve Drive Odometry} to fuse latency-compensated
027 * vision measurements with swerve drive encoder distance measurements. It is intended to be a
028 * drop-in replacement for {@link edu.wpi.first.math.kinematics.SwerveDriveOdometry}.
029 *
030 * <p>{@link SwerveDrivePoseEstimator#update} should be called every robot loop.
031 *
032 * <p>{@link SwerveDrivePoseEstimator#addVisionMeasurement} can be called as infrequently as you
033 * want; if you never call it, then this class will behave as regular encoder odometry.
034 */
035public class SwerveDrivePoseEstimator {
036  private final SwerveDriveKinematics m_kinematics;
037  private final SwerveDriveOdometry m_odometry;
038  private final Matrix<N3, N1> m_q = new Matrix<>(Nat.N3(), Nat.N1());
039  private final int m_numModules;
040  private Matrix<N3, N3> m_visionK = new Matrix<>(Nat.N3(), Nat.N3());
041
042  private final TimeInterpolatableBuffer<InterpolationRecord> m_poseBuffer =
043      TimeInterpolatableBuffer.createBuffer(1.5);
044
045  /**
046   * Constructs a SwerveDrivePoseEstimator with default standard deviations for the model and vision
047   * measurements.
048   *
049   * <p>The default standard deviations of the model states are 0.1 meters for x, 0.1 meters for y,
050   * and 0.1 radians for heading. The default standard deviations of the vision measurements are 0.9
051   * meters for x, 0.9 meters for y, and 0.9 radians for heading.
052   *
053   * @param kinematics A correctly-configured kinematics object for your drivetrain.
054   * @param gyroAngle The current gyro angle.
055   * @param modulePositions The current distance measurements and rotations of the swerve modules.
056   * @param initialPoseMeters The starting pose estimate.
057   */
058  public SwerveDrivePoseEstimator(
059      SwerveDriveKinematics kinematics,
060      Rotation2d gyroAngle,
061      SwerveModulePosition[] modulePositions,
062      Pose2d initialPoseMeters) {
063    this(
064        kinematics,
065        gyroAngle,
066        modulePositions,
067        initialPoseMeters,
068        VecBuilder.fill(0.1, 0.1, 0.1),
069        VecBuilder.fill(0.9, 0.9, 0.9));
070  }
071
072  /**
073   * Constructs a SwerveDrivePoseEstimator.
074   *
075   * @param kinematics A correctly-configured kinematics object for your drivetrain.
076   * @param gyroAngle The current gyro angle.
077   * @param modulePositions The current distance and rotation measurements of the swerve modules.
078   * @param initialPoseMeters The starting pose estimate.
079   * @param stateStdDevs Standard deviations of the pose estimate (x position in meters, y position
080   *     in meters, and heading in radians). Increase these numbers to trust your state estimate
081   *     less.
082   * @param visionMeasurementStdDevs Standard deviations of the vision pose measurement (x position
083   *     in meters, y position in meters, and heading in radians). Increase these numbers to trust
084   *     the vision pose measurement less.
085   */
086  public SwerveDrivePoseEstimator(
087      SwerveDriveKinematics kinematics,
088      Rotation2d gyroAngle,
089      SwerveModulePosition[] modulePositions,
090      Pose2d initialPoseMeters,
091      Matrix<N3, N1> stateStdDevs,
092      Matrix<N3, N1> visionMeasurementStdDevs) {
093    m_kinematics = kinematics;
094    m_odometry = new SwerveDriveOdometry(kinematics, gyroAngle, modulePositions, initialPoseMeters);
095
096    for (int i = 0; i < 3; ++i) {
097      m_q.set(i, 0, stateStdDevs.get(i, 0) * stateStdDevs.get(i, 0));
098    }
099
100    m_numModules = modulePositions.length;
101
102    setVisionMeasurementStdDevs(visionMeasurementStdDevs);
103  }
104
105  /**
106   * Sets the pose estimator's trust of global measurements. This might be used to change trust in
107   * vision measurements after the autonomous period, or to change trust as distance to a vision
108   * target increases.
109   *
110   * @param visionMeasurementStdDevs Standard deviations of the vision measurements. Increase these
111   *     numbers to trust global measurements from vision less. This matrix is in the form [x, y,
112   *     theta]ᵀ, with units in meters and radians.
113   */
114  public void setVisionMeasurementStdDevs(Matrix<N3, N1> visionMeasurementStdDevs) {
115    var r = new double[3];
116    for (int i = 0; i < 3; ++i) {
117      r[i] = visionMeasurementStdDevs.get(i, 0) * visionMeasurementStdDevs.get(i, 0);
118    }
119
120    // Solve for closed form Kalman gain for continuous Kalman filter with A = 0
121    // and C = I. See wpimath/algorithms.md.
122    for (int row = 0; row < 3; ++row) {
123      if (m_q.get(row, 0) == 0.0) {
124        m_visionK.set(row, row, 0.0);
125      } else {
126        m_visionK.set(
127            row, row, m_q.get(row, 0) / (m_q.get(row, 0) + Math.sqrt(m_q.get(row, 0) * r[row])));
128      }
129    }
130  }
131
132  /**
133   * Resets the robot's position on the field.
134   *
135   * <p>The gyroscope angle does not need to be reset in the user's robot code. The library
136   * automatically takes care of offsetting the gyro angle.
137   *
138   * @param gyroAngle The angle reported by the gyroscope.
139   * @param modulePositions The current distance measurements and rotations of the swerve modules.
140   * @param poseMeters The position on the field that your robot is at.
141   */
142  public void resetPosition(
143      Rotation2d gyroAngle, SwerveModulePosition[] modulePositions, Pose2d poseMeters) {
144    // Reset state estimate and error covariance
145    m_odometry.resetPosition(gyroAngle, modulePositions, poseMeters);
146    m_poseBuffer.clear();
147  }
148
149  /**
150   * Gets the estimated robot pose.
151   *
152   * @return The estimated robot pose in meters.
153   */
154  public Pose2d getEstimatedPosition() {
155    return m_odometry.getPoseMeters();
156  }
157
158  /**
159   * Adds a vision measurement to the Kalman Filter. This will correct the odometry pose estimate
160   * while still accounting for measurement noise.
161   *
162   * <p>This method can be called as infrequently as you want, as long as you are calling {@link
163   * SwerveDrivePoseEstimator#update} every loop.
164   *
165   * <p>To promote stability of the pose estimate and make it robust to bad vision data, we
166   * recommend only adding vision measurements that are already within one meter or so of the
167   * current pose estimate.
168   *
169   * @param visionRobotPoseMeters The pose of the robot as measured by the vision camera.
170   * @param timestampSeconds The timestamp of the vision measurement in seconds. Note that if you
171   *     don't use your own time source by calling {@link
172   *     SwerveDrivePoseEstimator#updateWithTime(double,Rotation2d,SwerveModulePosition[])} then you
173   *     must use a timestamp with an epoch since FPGA startup (i.e., the epoch of this timestamp is
174   *     the same epoch as {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()}.) This means that
175   *     you should use {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()} as your time source
176   *     or sync the epochs.
177   */
178  public void addVisionMeasurement(Pose2d visionRobotPoseMeters, double timestampSeconds) {
179    // Step 1: Get the pose odometry measured at the moment the vision measurement was made.
180    var sample = m_poseBuffer.getSample(timestampSeconds);
181
182    if (sample.isEmpty()) {
183      return;
184    }
185
186    // Step 2: Measure the twist between the odometry pose and the vision pose.
187    var twist = sample.get().poseMeters.log(visionRobotPoseMeters);
188
189    // Step 3: We should not trust the twist entirely, so instead we scale this twist by a Kalman
190    // gain matrix representing how much we trust vision measurements compared to our current pose.
191    var k_times_twist = m_visionK.times(VecBuilder.fill(twist.dx, twist.dy, twist.dtheta));
192
193    // Step 4: Convert back to Twist2d.
194    var scaledTwist =
195        new Twist2d(k_times_twist.get(0, 0), k_times_twist.get(1, 0), k_times_twist.get(2, 0));
196
197    // Step 5: Reset Odometry to state at sample with vision adjustment.
198    m_odometry.resetPosition(
199        sample.get().gyroAngle,
200        sample.get().modulePositions,
201        sample.get().poseMeters.exp(scaledTwist));
202
203    // Step 6: Replay odometry inputs between sample time and latest recorded sample to update the
204    // pose buffer and correct odometry.
205    for (Map.Entry<Double, InterpolationRecord> entry :
206        m_poseBuffer.getInternalBuffer().tailMap(timestampSeconds).entrySet()) {
207      updateWithTime(entry.getKey(), entry.getValue().gyroAngle, entry.getValue().modulePositions);
208    }
209  }
210
211  /**
212   * Adds a vision measurement to the Kalman Filter. This will correct the odometry pose estimate
213   * while still accounting for measurement noise.
214   *
215   * <p>This method can be called as infrequently as you want, as long as you are calling {@link
216   * SwerveDrivePoseEstimator#update} every loop.
217   *
218   * <p>To promote stability of the pose estimate and make it robust to bad vision data, we
219   * recommend only adding vision measurements that are already within one meter or so of the
220   * current pose estimate.
221   *
222   * <p>Note that the vision measurement standard deviations passed into this method will continue
223   * to apply to future measurements until a subsequent call to {@link
224   * SwerveDrivePoseEstimator#setVisionMeasurementStdDevs(Matrix)} or this method.
225   *
226   * @param visionRobotPoseMeters The pose of the robot as measured by the vision camera.
227   * @param timestampSeconds The timestamp of the vision measurement in seconds. Note that if you
228   *     don't use your own time source by calling {@link
229   *     SwerveDrivePoseEstimator#updateWithTime(double,Rotation2d,SwerveModulePosition[])}, then
230   *     you must use a timestamp with an epoch since FPGA startup (i.e., the epoch of this
231   *     timestamp is the same epoch as {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()}. This
232   *     means that you should use {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()} as your
233   *     time source in this case.
234   * @param visionMeasurementStdDevs Standard deviations of the vision pose measurement (x position
235   *     in meters, y position in meters, and heading in radians). Increase these numbers to trust
236   *     the vision pose measurement less.
237   */
238  public void addVisionMeasurement(
239      Pose2d visionRobotPoseMeters,
240      double timestampSeconds,
241      Matrix<N3, N1> visionMeasurementStdDevs) {
242    setVisionMeasurementStdDevs(visionMeasurementStdDevs);
243    addVisionMeasurement(visionRobotPoseMeters, timestampSeconds);
244  }
245
246  /**
247   * Updates the pose estimator with wheel encoder and gyro information. This should be called every
248   * loop.
249   *
250   * @param gyroAngle The current gyro angle.
251   * @param modulePositions The current distance measurements and rotations of the swerve modules.
252   * @return The estimated pose of the robot in meters.
253   */
254  public Pose2d update(Rotation2d gyroAngle, SwerveModulePosition[] modulePositions) {
255    return updateWithTime(WPIUtilJNI.now() * 1.0e-6, gyroAngle, modulePositions);
256  }
257
258  /**
259   * Updates the pose estimator with wheel encoder and gyro information. This should be called every
260   * loop.
261   *
262   * @param currentTimeSeconds Time at which this method was called, in seconds.
263   * @param gyroAngle The current gyroscope angle.
264   * @param modulePositions The current distance measurements and rotations of the swerve modules.
265   * @return The estimated pose of the robot in meters.
266   */
267  public Pose2d updateWithTime(
268      double currentTimeSeconds, Rotation2d gyroAngle, SwerveModulePosition[] modulePositions) {
269    if (modulePositions.length != m_numModules) {
270      throw new IllegalArgumentException(
271          "Number of modules is not consistent with number of wheel locations provided in "
272              + "constructor");
273    }
274
275    var internalModulePositions = new SwerveModulePosition[m_numModules];
276
277    for (int i = 0; i < m_numModules; i++) {
278      internalModulePositions[i] =
279          new SwerveModulePosition(modulePositions[i].distanceMeters, modulePositions[i].angle);
280    }
281
282    m_odometry.update(gyroAngle, internalModulePositions);
283
284    m_poseBuffer.addSample(
285        currentTimeSeconds,
286        new InterpolationRecord(getEstimatedPosition(), gyroAngle, internalModulePositions));
287
288    return getEstimatedPosition();
289  }
290
291  /**
292   * Represents an odometry record. The record contains the inputs provided as well as the pose that
293   * was observed based on these inputs, as well as the previous record and its inputs.
294   */
295  private class InterpolationRecord implements Interpolatable<InterpolationRecord> {
296    // The pose observed given the current sensor inputs and the previous pose.
297    private final Pose2d poseMeters;
298
299    // The current gyro angle.
300    private final Rotation2d gyroAngle;
301
302    // The distances and rotations measured at each module.
303    private final SwerveModulePosition[] modulePositions;
304
305    /**
306     * Constructs an Interpolation Record with the specified parameters.
307     *
308     * @param pose The pose observed given the current sensor inputs and the previous pose.
309     * @param gyro The current gyro angle.
310     * @param wheelPositions The distances and rotations measured at each wheel.
311     */
312    private InterpolationRecord(
313        Pose2d poseMeters, Rotation2d gyro, SwerveModulePosition[] modulePositions) {
314      this.poseMeters = poseMeters;
315      this.gyroAngle = gyro;
316      this.modulePositions = modulePositions;
317    }
318
319    /**
320     * Return the interpolated record. This object is assumed to be the starting position, or lower
321     * bound.
322     *
323     * @param endValue The upper bound, or end.
324     * @param t How far between the lower and upper bound we are. This should be bounded in [0, 1].
325     * @return The interpolated value.
326     */
327    @Override
328    public InterpolationRecord interpolate(InterpolationRecord endValue, double t) {
329      if (t < 0) {
330        return this;
331      } else if (t >= 1) {
332        return endValue;
333      } else {
334        // Find the new wheel distances.
335        var modulePositions = new SwerveModulePosition[m_numModules];
336
337        // Find the distance travelled between this measurement and the interpolated measurement.
338        var moduleDeltas = new SwerveModulePosition[m_numModules];
339
340        for (int i = 0; i < m_numModules; i++) {
341          double ds =
342              MathUtil.interpolate(
343                  this.modulePositions[i].distanceMeters,
344                  endValue.modulePositions[i].distanceMeters,
345                  t);
346          Rotation2d theta =
347              this.modulePositions[i].angle.interpolate(endValue.modulePositions[i].angle, t);
348          modulePositions[i] = new SwerveModulePosition(ds, theta);
349          moduleDeltas[i] =
350              new SwerveModulePosition(ds - this.modulePositions[i].distanceMeters, theta);
351        }
352
353        // Find the new gyro angle.
354        var gyro_lerp = gyroAngle.interpolate(endValue.gyroAngle, t);
355
356        // Create a twist to represent this change based on the interpolated sensor inputs.
357        Twist2d twist = m_kinematics.toTwist2d(moduleDeltas);
358        twist.dtheta = gyro_lerp.minus(gyroAngle).getRadians();
359
360        return new InterpolationRecord(poseMeters.exp(twist), gyro_lerp, modulePositions);
361      }
362    }
363
364    @Override
365    public boolean equals(Object obj) {
366      if (this == obj) {
367        return true;
368      }
369      if (!(obj instanceof InterpolationRecord)) {
370        return false;
371      }
372      InterpolationRecord record = (InterpolationRecord) obj;
373      return Objects.equals(gyroAngle, record.gyroAngle)
374          && Objects.equals(modulePositions, record.modulePositions)
375          && Objects.equals(poseMeters, record.poseMeters);
376    }
377
378    @Override
379    public int hashCode() {
380      return Objects.hash(gyroAngle, modulePositions, poseMeters);
381    }
382  }
383}