001// Copyright (c) FIRST and other WPILib contributors.
002// Open Source Software; you can modify and/or share it under the terms of
003// the WPILib BSD license file in the root directory of this project.
004
005package edu.wpi.first.math.estimator;
006
007import edu.wpi.first.math.MathSharedStore;
008import edu.wpi.first.math.MathUtil;
009import edu.wpi.first.math.Matrix;
010import edu.wpi.first.math.Nat;
011import edu.wpi.first.math.VecBuilder;
012import edu.wpi.first.math.geometry.Pose2d;
013import edu.wpi.first.math.geometry.Rotation2d;
014import edu.wpi.first.math.geometry.Twist2d;
015import edu.wpi.first.math.interpolation.Interpolatable;
016import edu.wpi.first.math.interpolation.TimeInterpolatableBuffer;
017import edu.wpi.first.math.kinematics.DifferentialDriveKinematics;
018import edu.wpi.first.math.kinematics.DifferentialDriveOdometry;
019import edu.wpi.first.math.numbers.N1;
020import edu.wpi.first.math.numbers.N3;
021import java.util.Map;
022import java.util.NoSuchElementException;
023import java.util.Objects;
024
025/**
026 * This class wraps {@link DifferentialDriveOdometry Differential Drive Odometry} to fuse
027 * latency-compensated vision measurements with differential drive encoder measurements. It is
028 * intended to be a drop-in replacement for {@link DifferentialDriveOdometry}; in fact, if you never
029 * call {@link DifferentialDrivePoseEstimator#addVisionMeasurement} and only call {@link
030 * DifferentialDrivePoseEstimator#update} then this will behave exactly the same as
031 * DifferentialDriveOdometry.
032 *
033 * <p>{@link DifferentialDrivePoseEstimator#update} should be called every robot loop.
034 *
035 * <p>{@link DifferentialDrivePoseEstimator#addVisionMeasurement} can be called as infrequently as
036 * you want; if you never call it then this class will behave exactly like regular encoder odometry.
037 */
038public class DifferentialDrivePoseEstimator {
039  private final DifferentialDriveKinematics m_kinematics;
040  private final DifferentialDriveOdometry m_odometry;
041  private final Matrix<N3, N1> m_q = new Matrix<>(Nat.N3(), Nat.N1());
042  private Matrix<N3, N3> m_visionK = new Matrix<>(Nat.N3(), Nat.N3());
043
044  private static final double kBufferDuration = 1.5;
045
046  private final TimeInterpolatableBuffer<InterpolationRecord> m_poseBuffer =
047      TimeInterpolatableBuffer.createBuffer(kBufferDuration);
048
049  /**
050   * Constructs a DifferentialDrivePoseEstimator with default standard deviations for the model and
051   * vision measurements.
052   *
053   * <p>The default standard deviations of the model states are 0.02 meters for x, 0.02 meters for
054   * y, and 0.01 radians for heading. The default standard deviations of the vision measurements are
055   * 0.1 meters for x, 0.1 meters for y, and 0.1 radians for heading.
056   *
057   * @param kinematics A correctly-configured kinematics object for your drivetrain.
058   * @param gyroAngle The current gyro angle.
059   * @param leftDistanceMeters The distance traveled by the left encoder.
060   * @param rightDistanceMeters The distance traveled by the right encoder.
061   * @param initialPoseMeters The starting pose estimate.
062   */
063  public DifferentialDrivePoseEstimator(
064      DifferentialDriveKinematics kinematics,
065      Rotation2d gyroAngle,
066      double leftDistanceMeters,
067      double rightDistanceMeters,
068      Pose2d initialPoseMeters) {
069    this(
070        kinematics,
071        gyroAngle,
072        leftDistanceMeters,
073        rightDistanceMeters,
074        initialPoseMeters,
075        VecBuilder.fill(0.02, 0.02, 0.01),
076        VecBuilder.fill(0.1, 0.1, 0.1));
077  }
078
079  /**
080   * Constructs a DifferentialDrivePoseEstimator.
081   *
082   * @param kinematics A correctly-configured kinematics object for your drivetrain.
083   * @param gyroAngle The gyro angle of the robot.
084   * @param leftDistanceMeters The distance traveled by the left encoder.
085   * @param rightDistanceMeters The distance traveled by the right encoder.
086   * @param initialPoseMeters The estimated initial pose.
087   * @param stateStdDevs Standard deviations of the pose estimate (x position in meters, y position
088   *     in meters, and heading in radians). Increase these numbers to trust your state estimate
089   *     less.
090   * @param visionMeasurementStdDevs Standard deviations of the vision pose measurement (x position
091   *     in meters, y position in meters, and heading in radians). Increase these numbers to trust
092   *     the vision pose measurement less.
093   */
094  public DifferentialDrivePoseEstimator(
095      DifferentialDriveKinematics kinematics,
096      Rotation2d gyroAngle,
097      double leftDistanceMeters,
098      double rightDistanceMeters,
099      Pose2d initialPoseMeters,
100      Matrix<N3, N1> stateStdDevs,
101      Matrix<N3, N1> visionMeasurementStdDevs) {
102    m_kinematics = kinematics;
103    m_odometry =
104        new DifferentialDriveOdometry(
105            gyroAngle, leftDistanceMeters, rightDistanceMeters, initialPoseMeters);
106
107    for (int i = 0; i < 3; ++i) {
108      m_q.set(i, 0, stateStdDevs.get(i, 0) * stateStdDevs.get(i, 0));
109    }
110
111    // Initialize vision R
112    setVisionMeasurementStdDevs(visionMeasurementStdDevs);
113  }
114
115  /**
116   * Sets the pose estimator's trust of global measurements. This might be used to change trust in
117   * vision measurements after the autonomous period, or to change trust as distance to a vision
118   * target increases.
119   *
120   * @param visionMeasurementStdDevs Standard deviations of the vision measurements. Increase these
121   *     numbers to trust global measurements from vision less. This matrix is in the form [x, y,
122   *     theta]ᵀ, with units in meters and radians.
123   */
124  public void setVisionMeasurementStdDevs(Matrix<N3, N1> visionMeasurementStdDevs) {
125    var r = new double[3];
126    for (int i = 0; i < 3; ++i) {
127      r[i] = visionMeasurementStdDevs.get(i, 0) * visionMeasurementStdDevs.get(i, 0);
128    }
129
130    // Solve for closed form Kalman gain for continuous Kalman filter with A = 0
131    // and C = I. See wpimath/algorithms.md.
132    for (int row = 0; row < 3; ++row) {
133      if (m_q.get(row, 0) == 0.0) {
134        m_visionK.set(row, row, 0.0);
135      } else {
136        m_visionK.set(
137            row, row, m_q.get(row, 0) / (m_q.get(row, 0) + Math.sqrt(m_q.get(row, 0) * r[row])));
138      }
139    }
140  }
141
142  /**
143   * Resets the robot's position on the field.
144   *
145   * <p>The gyroscope angle does not need to be reset here on the user's robot code. The library
146   * automatically takes care of offsetting the gyro angle.
147   *
148   * @param gyroAngle The angle reported by the gyroscope.
149   * @param leftPositionMeters The distance traveled by the left encoder.
150   * @param rightPositionMeters The distance traveled by the right encoder.
151   * @param poseMeters The position on the field that your robot is at.
152   */
153  public void resetPosition(
154      Rotation2d gyroAngle,
155      double leftPositionMeters,
156      double rightPositionMeters,
157      Pose2d poseMeters) {
158    // Reset state estimate and error covariance
159    m_odometry.resetPosition(gyroAngle, leftPositionMeters, rightPositionMeters, poseMeters);
160    m_poseBuffer.clear();
161  }
162
163  /**
164   * Gets the estimated robot pose.
165   *
166   * @return The estimated robot pose in meters.
167   */
168  public Pose2d getEstimatedPosition() {
169    return m_odometry.getPoseMeters();
170  }
171
172  /**
173   * Adds a vision measurement to the Kalman Filter. This will correct the odometry pose estimate
174   * while still accounting for measurement noise.
175   *
176   * <p>This method can be called as infrequently as you want, as long as you are calling {@link
177   * DifferentialDrivePoseEstimator#update} every loop.
178   *
179   * <p>To promote stability of the pose estimate and make it robust to bad vision data, we
180   * recommend only adding vision measurements that are already within one meter or so of the
181   * current pose estimate.
182   *
183   * @param visionRobotPoseMeters The pose of the robot as measured by the vision camera.
184   * @param timestampSeconds The timestamp of the vision measurement in seconds. Note that if you
185   *     don't use your own time source by calling {@link
186   *     DifferentialDrivePoseEstimator#updateWithTime(double,Rotation2d,double,double)} then you
187   *     must use a timestamp with an epoch since FPGA startup (i.e., the epoch of this timestamp is
188   *     the same epoch as {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()}.) This means that
189   *     you should use {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()} as your time source
190   *     or sync the epochs.
191   */
192  public void addVisionMeasurement(Pose2d visionRobotPoseMeters, double timestampSeconds) {
193    // Step 0: If this measurement is old enough to be outside the pose buffer's timespan, skip.
194    try {
195      if (m_poseBuffer.getInternalBuffer().lastKey() - kBufferDuration > timestampSeconds) {
196        return;
197      }
198    } catch (NoSuchElementException ex) {
199      return;
200    }
201
202    // Step 1: Get the pose odometry measured at the moment the vision measurement was made.
203    var sample = m_poseBuffer.getSample(timestampSeconds);
204
205    if (sample.isEmpty()) {
206      return;
207    }
208
209    // Step 2: Measure the twist between the odometry pose and the vision pose.
210    var twist = sample.get().poseMeters.log(visionRobotPoseMeters);
211
212    // Step 3: We should not trust the twist entirely, so instead we scale this twist by a Kalman
213    // gain matrix representing how much we trust vision measurements compared to our current pose.
214    var k_times_twist = m_visionK.times(VecBuilder.fill(twist.dx, twist.dy, twist.dtheta));
215
216    // Step 4: Convert back to Twist2d.
217    var scaledTwist =
218        new Twist2d(k_times_twist.get(0, 0), k_times_twist.get(1, 0), k_times_twist.get(2, 0));
219
220    // Step 5: Reset Odometry to state at sample with vision adjustment.
221    m_odometry.resetPosition(
222        sample.get().gyroAngle,
223        sample.get().leftMeters,
224        sample.get().rightMeters,
225        sample.get().poseMeters.exp(scaledTwist));
226
227    // Step 6: Record the current pose to allow multiple measurements from the same timestamp
228    m_poseBuffer.addSample(
229        timestampSeconds,
230        new InterpolationRecord(
231            getEstimatedPosition(),
232            sample.get().gyroAngle,
233            sample.get().leftMeters,
234            sample.get().rightMeters));
235
236    // Step 7: Replay odometry inputs between sample time and latest recorded sample to update the
237    // pose buffer and correct odometry.
238    for (Map.Entry<Double, InterpolationRecord> entry :
239        m_poseBuffer.getInternalBuffer().tailMap(timestampSeconds).entrySet()) {
240      updateWithTime(
241          entry.getKey(),
242          entry.getValue().gyroAngle,
243          entry.getValue().leftMeters,
244          entry.getValue().rightMeters);
245    }
246  }
247
248  /**
249   * Adds a vision measurement to the Kalman Filter. This will correct the odometry pose estimate
250   * while still accounting for measurement noise.
251   *
252   * <p>This method can be called as infrequently as you want, as long as you are calling {@link
253   * DifferentialDrivePoseEstimator#update} every loop.
254   *
255   * <p>To promote stability of the pose estimate and make it robust to bad vision data, we
256   * recommend only adding vision measurements that are already within one meter or so of the
257   * current pose estimate.
258   *
259   * <p>Note that the vision measurement standard deviations passed into this method will continue
260   * to apply to future measurements until a subsequent call to {@link
261   * DifferentialDrivePoseEstimator#setVisionMeasurementStdDevs(Matrix)} or this method.
262   *
263   * @param visionRobotPoseMeters The pose of the robot as measured by the vision camera.
264   * @param timestampSeconds The timestamp of the vision measurement in seconds. Note that if you
265   *     don't use your own time source by calling {@link
266   *     DifferentialDrivePoseEstimator#updateWithTime(double,Rotation2d,double,double)}, then you
267   *     must use a timestamp with an epoch since FPGA startup (i.e., the epoch of this timestamp is
268   *     the same epoch as {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()}). This means that
269   *     you should use {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()} as your time source
270   *     in this case.
271   * @param visionMeasurementStdDevs Standard deviations of the vision pose measurement (x position
272   *     in meters, y position in meters, and heading in radians). Increase these numbers to trust
273   *     the vision pose measurement less.
274   */
275  public void addVisionMeasurement(
276      Pose2d visionRobotPoseMeters,
277      double timestampSeconds,
278      Matrix<N3, N1> visionMeasurementStdDevs) {
279    setVisionMeasurementStdDevs(visionMeasurementStdDevs);
280    addVisionMeasurement(visionRobotPoseMeters, timestampSeconds);
281  }
282
283  /**
284   * Updates the pose estimator with wheel encoder and gyro information. This should be called every
285   * loop.
286   *
287   * @param gyroAngle The current gyro angle.
288   * @param distanceLeftMeters The total distance travelled by the left wheel in meters.
289   * @param distanceRightMeters The total distance travelled by the right wheel in meters.
290   * @return The estimated pose of the robot in meters.
291   */
292  public Pose2d update(
293      Rotation2d gyroAngle, double distanceLeftMeters, double distanceRightMeters) {
294    return updateWithTime(
295        MathSharedStore.getTimestamp(), gyroAngle, distanceLeftMeters, distanceRightMeters);
296  }
297
298  /**
299   * Updates the pose estimator with wheel encoder and gyro information. This should be called every
300   * loop.
301   *
302   * @param currentTimeSeconds Time at which this method was called, in seconds.
303   * @param gyroAngle The current gyro angle.
304   * @param distanceLeftMeters The total distance travelled by the left wheel in meters.
305   * @param distanceRightMeters The total distance travelled by the right wheel in meters.
306   * @return The estimated pose of the robot in meters.
307   */
308  public Pose2d updateWithTime(
309      double currentTimeSeconds,
310      Rotation2d gyroAngle,
311      double distanceLeftMeters,
312      double distanceRightMeters) {
313    m_odometry.update(gyroAngle, distanceLeftMeters, distanceRightMeters);
314    m_poseBuffer.addSample(
315        currentTimeSeconds,
316        new InterpolationRecord(
317            getEstimatedPosition(), gyroAngle, distanceLeftMeters, distanceRightMeters));
318
319    return getEstimatedPosition();
320  }
321
322  /**
323   * Represents an odometry record. The record contains the inputs provided as well as the pose that
324   * was observed based on these inputs, as well as the previous record and its inputs.
325   */
326  private class InterpolationRecord implements Interpolatable<InterpolationRecord> {
327    // The pose observed given the current sensor inputs and the previous pose.
328    private final Pose2d poseMeters;
329
330    // The current gyro angle.
331    private final Rotation2d gyroAngle;
332
333    // The distance traveled by the left encoder.
334    private final double leftMeters;
335
336    // The distance traveled by the right encoder.
337    private final double rightMeters;
338
339    /**
340     * Constructs an Interpolation Record with the specified parameters.
341     *
342     * @param pose The pose observed given the current sensor inputs and the previous pose.
343     * @param gyro The current gyro angle.
344     * @param leftMeters The distance traveled by the left encoder.
345     * @param rightMeters The distanced traveled by the right encoder.
346     */
347    private InterpolationRecord(
348        Pose2d poseMeters, Rotation2d gyro, double leftMeters, double rightMeters) {
349      this.poseMeters = poseMeters;
350      this.gyroAngle = gyro;
351      this.leftMeters = leftMeters;
352      this.rightMeters = rightMeters;
353    }
354
355    /**
356     * Return the interpolated record. This object is assumed to be the starting position, or lower
357     * bound.
358     *
359     * @param endValue The upper bound, or end.
360     * @param t How far between the lower and upper bound we are. This should be bounded in [0, 1].
361     * @return The interpolated value.
362     */
363    @Override
364    public InterpolationRecord interpolate(InterpolationRecord endValue, double t) {
365      if (t < 0) {
366        return this;
367      } else if (t >= 1) {
368        return endValue;
369      } else {
370        // Find the new left distance.
371        var left_lerp = MathUtil.interpolate(this.leftMeters, endValue.leftMeters, t);
372
373        // Find the new right distance.
374        var right_lerp = MathUtil.interpolate(this.rightMeters, endValue.rightMeters, t);
375
376        // Find the new gyro angle.
377        var gyro_lerp = gyroAngle.interpolate(endValue.gyroAngle, t);
378
379        // Create a twist to represent this change based on the interpolated sensor inputs.
380        Twist2d twist = m_kinematics.toTwist2d(left_lerp - leftMeters, right_lerp - rightMeters);
381        twist.dtheta = gyro_lerp.minus(gyroAngle).getRadians();
382
383        return new InterpolationRecord(poseMeters.exp(twist), gyro_lerp, left_lerp, right_lerp);
384      }
385    }
386
387    @Override
388    public boolean equals(Object obj) {
389      if (this == obj) {
390        return true;
391      }
392      if (!(obj instanceof InterpolationRecord)) {
393        return false;
394      }
395      InterpolationRecord record = (InterpolationRecord) obj;
396      return Objects.equals(gyroAngle, record.gyroAngle)
397          && Double.compare(leftMeters, record.leftMeters) == 0
398          && Double.compare(rightMeters, record.rightMeters) == 0
399          && Objects.equals(poseMeters, record.poseMeters);
400    }
401
402    @Override
403    public int hashCode() {
404      return Objects.hash(gyroAngle, leftMeters, rightMeters, poseMeters);
405    }
406  }
407}