001// Copyright (c) FIRST and other WPILib contributors.
002// Open Source Software; you can modify and/or share it under the terms of
003// the WPILib BSD license file in the root directory of this project.
004
005package edu.wpi.first.math.estimator;
006
007import edu.wpi.first.math.MathUtil;
008import edu.wpi.first.math.Matrix;
009import edu.wpi.first.math.Nat;
010import edu.wpi.first.math.VecBuilder;
011import edu.wpi.first.math.geometry.Pose2d;
012import edu.wpi.first.math.geometry.Rotation2d;
013import edu.wpi.first.math.geometry.Twist2d;
014import edu.wpi.first.math.interpolation.Interpolatable;
015import edu.wpi.first.math.interpolation.TimeInterpolatableBuffer;
016import edu.wpi.first.math.kinematics.DifferentialDriveKinematics;
017import edu.wpi.first.math.kinematics.DifferentialDriveOdometry;
018import edu.wpi.first.math.numbers.N1;
019import edu.wpi.first.math.numbers.N3;
020import edu.wpi.first.util.WPIUtilJNI;
021import java.util.Map;
022import java.util.Objects;
023
024/**
025 * This class wraps {@link DifferentialDriveOdometry Differential Drive Odometry} to fuse
026 * latency-compensated vision measurements with differential drive encoder measurements. It is
027 * intended to be a drop-in replacement for {@link DifferentialDriveOdometry}; in fact, if you never
028 * call {@link DifferentialDrivePoseEstimator#addVisionMeasurement} and only call {@link
029 * DifferentialDrivePoseEstimator#update} then this will behave exactly the same as
030 * DifferentialDriveOdometry.
031 *
032 * <p>{@link DifferentialDrivePoseEstimator#update} should be called every robot loop.
033 *
034 * <p>{@link DifferentialDrivePoseEstimator#addVisionMeasurement} can be called as infrequently as
035 * you want; if you never call it then this class will behave exactly like regular encoder odometry.
036 */
037public class DifferentialDrivePoseEstimator {
038  private final DifferentialDriveKinematics m_kinematics;
039  private final DifferentialDriveOdometry m_odometry;
040  private final Matrix<N3, N1> m_q = new Matrix<>(Nat.N3(), Nat.N1());
041  private Matrix<N3, N3> m_visionK = new Matrix<>(Nat.N3(), Nat.N3());
042
043  private final TimeInterpolatableBuffer<InterpolationRecord> m_poseBuffer =
044      TimeInterpolatableBuffer.createBuffer(1.5);
045
046  /**
047   * Constructs a DifferentialDrivePoseEstimator with default standard deviations for the model and
048   * vision measurements.
049   *
050   * <p>The default standard deviations of the model states are 0.02 meters for x, 0.02 meters for
051   * y, and 0.01 radians for heading. The default standard deviations of the vision measurements are
052   * 0.1 meters for x, 0.1 meters for y, and 0.1 radians for heading.
053   *
054   * @param kinematics A correctly-configured kinematics object for your drivetrain.
055   * @param gyroAngle The current gyro angle.
056   * @param leftDistanceMeters The distance traveled by the left encoder.
057   * @param rightDistanceMeters The distance traveled by the right encoder.
058   * @param initialPoseMeters The starting pose estimate.
059   */
060  public DifferentialDrivePoseEstimator(
061      DifferentialDriveKinematics kinematics,
062      Rotation2d gyroAngle,
063      double leftDistanceMeters,
064      double rightDistanceMeters,
065      Pose2d initialPoseMeters) {
066    this(
067        kinematics,
068        gyroAngle,
069        leftDistanceMeters,
070        rightDistanceMeters,
071        initialPoseMeters,
072        VecBuilder.fill(0.02, 0.02, 0.01),
073        VecBuilder.fill(0.1, 0.1, 0.1));
074  }
075
076  /**
077   * Constructs a DifferentialDrivePoseEstimator.
078   *
079   * @param kinematics A correctly-configured kinematics object for your drivetrain.
080   * @param gyroAngle The gyro angle of the robot.
081   * @param leftDistanceMeters The distance traveled by the left encoder.
082   * @param rightDistanceMeters The distance traveled by the right encoder.
083   * @param initialPoseMeters The estimated initial pose.
084   * @param stateStdDevs Standard deviations of the pose estimate (x position in meters, y position
085   *     in meters, and heading in radians). Increase these numbers to trust your state estimate
086   *     less.
087   * @param visionMeasurementStdDevs Standard deviations of the vision pose measurement (x position
088   *     in meters, y position in meters, and heading in radians). Increase these numbers to trust
089   *     the vision pose measurement less.
090   */
091  public DifferentialDrivePoseEstimator(
092      DifferentialDriveKinematics kinematics,
093      Rotation2d gyroAngle,
094      double leftDistanceMeters,
095      double rightDistanceMeters,
096      Pose2d initialPoseMeters,
097      Matrix<N3, N1> stateStdDevs,
098      Matrix<N3, N1> visionMeasurementStdDevs) {
099    m_kinematics = kinematics;
100    m_odometry =
101        new DifferentialDriveOdometry(
102            gyroAngle, leftDistanceMeters, rightDistanceMeters, initialPoseMeters);
103
104    for (int i = 0; i < 3; ++i) {
105      m_q.set(i, 0, stateStdDevs.get(i, 0) * stateStdDevs.get(i, 0));
106    }
107
108    // Initialize vision R
109    setVisionMeasurementStdDevs(visionMeasurementStdDevs);
110  }
111
112  /**
113   * Sets the pose estimator's trust of global measurements. This might be used to change trust in
114   * vision measurements after the autonomous period, or to change trust as distance to a vision
115   * target increases.
116   *
117   * @param visionMeasurementStdDevs Standard deviations of the vision measurements. Increase these
118   *     numbers to trust global measurements from vision less. This matrix is in the form [x, y,
119   *     theta]ᵀ, with units in meters and radians.
120   */
121  public void setVisionMeasurementStdDevs(Matrix<N3, N1> visionMeasurementStdDevs) {
122    var r = new double[3];
123    for (int i = 0; i < 3; ++i) {
124      r[i] = visionMeasurementStdDevs.get(i, 0) * visionMeasurementStdDevs.get(i, 0);
125    }
126
127    // Solve for closed form Kalman gain for continuous Kalman filter with A = 0
128    // and C = I. See wpimath/algorithms.md.
129    for (int row = 0; row < 3; ++row) {
130      if (m_q.get(row, 0) == 0.0) {
131        m_visionK.set(row, row, 0.0);
132      } else {
133        m_visionK.set(
134            row, row, m_q.get(row, 0) / (m_q.get(row, 0) + Math.sqrt(m_q.get(row, 0) * r[row])));
135      }
136    }
137  }
138
139  /**
140   * Resets the robot's position on the field.
141   *
142   * <p>The gyroscope angle does not need to be reset here on the user's robot code. The library
143   * automatically takes care of offsetting the gyro angle.
144   *
145   * @param gyroAngle The angle reported by the gyroscope.
146   * @param leftPositionMeters The distance traveled by the left encoder.
147   * @param rightPositionMeters The distance traveled by the right encoder.
148   * @param poseMeters The position on the field that your robot is at.
149   */
150  public void resetPosition(
151      Rotation2d gyroAngle,
152      double leftPositionMeters,
153      double rightPositionMeters,
154      Pose2d poseMeters) {
155    // Reset state estimate and error covariance
156    m_odometry.resetPosition(gyroAngle, leftPositionMeters, rightPositionMeters, poseMeters);
157    m_poseBuffer.clear();
158  }
159
160  /**
161   * Gets the estimated robot pose.
162   *
163   * @return The estimated robot pose in meters.
164   */
165  public Pose2d getEstimatedPosition() {
166    return m_odometry.getPoseMeters();
167  }
168
169  /**
170   * Adds a vision measurement to the Kalman Filter. This will correct the odometry pose estimate
171   * while still accounting for measurement noise.
172   *
173   * <p>This method can be called as infrequently as you want, as long as you are calling {@link
174   * DifferentialDrivePoseEstimator#update} every loop.
175   *
176   * <p>To promote stability of the pose estimate and make it robust to bad vision data, we
177   * recommend only adding vision measurements that are already within one meter or so of the
178   * current pose estimate.
179   *
180   * @param visionRobotPoseMeters The pose of the robot as measured by the vision camera.
181   * @param timestampSeconds The timestamp of the vision measurement in seconds. Note that if you
182   *     don't use your own time source by calling {@link
183   *     DifferentialDrivePoseEstimator#updateWithTime(double,Rotation2d,double,double)} then you
184   *     must use a timestamp with an epoch since FPGA startup (i.e., the epoch of this timestamp is
185   *     the same epoch as {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()}.) This means that
186   *     you should use {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()} as your time source
187   *     or sync the epochs.
188   */
189  public void addVisionMeasurement(Pose2d visionRobotPoseMeters, double timestampSeconds) {
190    // Step 1: Get the pose odometry measured at the moment the vision measurement was made.
191    var sample = m_poseBuffer.getSample(timestampSeconds);
192
193    if (sample.isEmpty()) {
194      return;
195    }
196
197    // Step 2: Measure the twist between the odometry pose and the vision pose.
198    var twist = sample.get().poseMeters.log(visionRobotPoseMeters);
199
200    // Step 3: We should not trust the twist entirely, so instead we scale this twist by a Kalman
201    // gain matrix representing how much we trust vision measurements compared to our current pose.
202    var k_times_twist = m_visionK.times(VecBuilder.fill(twist.dx, twist.dy, twist.dtheta));
203
204    // Step 4: Convert back to Twist2d.
205    var scaledTwist =
206        new Twist2d(k_times_twist.get(0, 0), k_times_twist.get(1, 0), k_times_twist.get(2, 0));
207
208    // Step 5: Reset Odometry to state at sample with vision adjustment.
209    m_odometry.resetPosition(
210        sample.get().gyroAngle,
211        sample.get().leftMeters,
212        sample.get().rightMeters,
213        sample.get().poseMeters.exp(scaledTwist));
214
215    // Step 6: Replay odometry inputs between sample time and latest recorded sample to update the
216    // pose buffer and correct odometry.
217    for (Map.Entry<Double, InterpolationRecord> entry :
218        m_poseBuffer.getInternalBuffer().tailMap(timestampSeconds).entrySet()) {
219      updateWithTime(
220          entry.getKey(),
221          entry.getValue().gyroAngle,
222          entry.getValue().leftMeters,
223          entry.getValue().rightMeters);
224    }
225  }
226
227  /**
228   * Adds a vision measurement to the Kalman Filter. This will correct the odometry pose estimate
229   * while still accounting for measurement noise.
230   *
231   * <p>This method can be called as infrequently as you want, as long as you are calling {@link
232   * DifferentialDrivePoseEstimator#update} every loop.
233   *
234   * <p>To promote stability of the pose estimate and make it robust to bad vision data, we
235   * recommend only adding vision measurements that are already within one meter or so of the
236   * current pose estimate.
237   *
238   * <p>Note that the vision measurement standard deviations passed into this method will continue
239   * to apply to future measurements until a subsequent call to {@link
240   * DifferentialDrivePoseEstimator#setVisionMeasurementStdDevs(Matrix)} or this method.
241   *
242   * @param visionRobotPoseMeters The pose of the robot as measured by the vision camera.
243   * @param timestampSeconds The timestamp of the vision measurement in seconds. Note that if you
244   *     don't use your own time source by calling {@link
245   *     DifferentialDrivePoseEstimator#updateWithTime(double,Rotation2d,double,double)}, then you
246   *     must use a timestamp with an epoch since FPGA startup (i.e., the epoch of this timestamp is
247   *     the same epoch as {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()}. This means that
248   *     you should use {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp()} as your time source
249   *     in this case.
250   * @param visionMeasurementStdDevs Standard deviations of the vision pose measurement (x position
251   *     in meters, y position in meters, and heading in radians). Increase these numbers to trust
252   *     the vision pose measurement less.
253   */
254  public void addVisionMeasurement(
255      Pose2d visionRobotPoseMeters,
256      double timestampSeconds,
257      Matrix<N3, N1> visionMeasurementStdDevs) {
258    setVisionMeasurementStdDevs(visionMeasurementStdDevs);
259    addVisionMeasurement(visionRobotPoseMeters, timestampSeconds);
260  }
261
262  /**
263   * Updates the pose estimator with wheel encoder and gyro information. This should be called every
264   * loop.
265   *
266   * @param gyroAngle The current gyro angle.
267   * @param distanceLeftMeters The total distance travelled by the left wheel in meters.
268   * @param distanceRightMeters The total distance travelled by the right wheel in meters.
269   * @return The estimated pose of the robot in meters.
270   */
271  public Pose2d update(
272      Rotation2d gyroAngle, double distanceLeftMeters, double distanceRightMeters) {
273    return updateWithTime(
274        WPIUtilJNI.now() * 1.0e-6, gyroAngle, distanceLeftMeters, distanceRightMeters);
275  }
276
277  /**
278   * Updates the pose estimator with wheel encoder and gyro information. This should be called every
279   * loop.
280   *
281   * @param currentTimeSeconds Time at which this method was called, in seconds.
282   * @param gyroAngle The current gyro angle.
283   * @param distanceLeftMeters The total distance travelled by the left wheel in meters.
284   * @param distanceRightMeters The total distance travelled by the right wheel in meters.
285   * @return The estimated pose of the robot in meters.
286   */
287  public Pose2d updateWithTime(
288      double currentTimeSeconds,
289      Rotation2d gyroAngle,
290      double distanceLeftMeters,
291      double distanceRightMeters) {
292    m_odometry.update(gyroAngle, distanceLeftMeters, distanceRightMeters);
293    m_poseBuffer.addSample(
294        currentTimeSeconds,
295        new InterpolationRecord(
296            getEstimatedPosition(), gyroAngle, distanceLeftMeters, distanceRightMeters));
297
298    return getEstimatedPosition();
299  }
300
301  /**
302   * Represents an odometry record. The record contains the inputs provided as well as the pose that
303   * was observed based on these inputs, as well as the previous record and its inputs.
304   */
305  private class InterpolationRecord implements Interpolatable<InterpolationRecord> {
306    // The pose observed given the current sensor inputs and the previous pose.
307    private final Pose2d poseMeters;
308
309    // The current gyro angle.
310    private final Rotation2d gyroAngle;
311
312    // The distance traveled by the left encoder.
313    private final double leftMeters;
314
315    // The distance traveled by the right encoder.
316    private final double rightMeters;
317
318    /**
319     * Constructs an Interpolation Record with the specified parameters.
320     *
321     * @param pose The pose observed given the current sensor inputs and the previous pose.
322     * @param gyro The current gyro angle.
323     * @param leftMeters The distance traveled by the left encoder.
324     * @param rightMeters The distanced traveled by the right encoder.
325     */
326    private InterpolationRecord(
327        Pose2d poseMeters, Rotation2d gyro, double leftMeters, double rightMeters) {
328      this.poseMeters = poseMeters;
329      this.gyroAngle = gyro;
330      this.leftMeters = leftMeters;
331      this.rightMeters = rightMeters;
332    }
333
334    /**
335     * Return the interpolated record. This object is assumed to be the starting position, or lower
336     * bound.
337     *
338     * @param endValue The upper bound, or end.
339     * @param t How far between the lower and upper bound we are. This should be bounded in [0, 1].
340     * @return The interpolated value.
341     */
342    @Override
343    public InterpolationRecord interpolate(InterpolationRecord endValue, double t) {
344      if (t < 0) {
345        return this;
346      } else if (t >= 1) {
347        return endValue;
348      } else {
349        // Find the new left distance.
350        var left_lerp = MathUtil.interpolate(this.leftMeters, endValue.leftMeters, t);
351
352        // Find the new right distance.
353        var right_lerp = MathUtil.interpolate(this.rightMeters, endValue.rightMeters, t);
354
355        // Find the new gyro angle.
356        var gyro_lerp = gyroAngle.interpolate(endValue.gyroAngle, t);
357
358        // Create a twist to represent this change based on the interpolated sensor inputs.
359        Twist2d twist = m_kinematics.toTwist2d(left_lerp - leftMeters, right_lerp - rightMeters);
360        twist.dtheta = gyro_lerp.minus(gyroAngle).getRadians();
361
362        return new InterpolationRecord(poseMeters.exp(twist), gyro_lerp, left_lerp, right_lerp);
363      }
364    }
365
366    @Override
367    public boolean equals(Object obj) {
368      if (this == obj) {
369        return true;
370      }
371      if (!(obj instanceof InterpolationRecord)) {
372        return false;
373      }
374      InterpolationRecord record = (InterpolationRecord) obj;
375      return Objects.equals(gyroAngle, record.gyroAngle)
376          && Double.compare(leftMeters, record.leftMeters) == 0
377          && Double.compare(rightMeters, record.rightMeters) == 0
378          && Objects.equals(poseMeters, record.poseMeters);
379    }
380
381    @Override
382    public int hashCode() {
383      return Objects.hash(gyroAngle, leftMeters, rightMeters, poseMeters);
384    }
385  }
386}