AprilTag Integration
Combining VingVing Robot with AprilTag vision for precise positioning
Vision-Assisted Navigation
AprilTags provide absolute field positions. Combine them with VingVing Robot's path following for centimeter-accurate autonomous routines that adapt to actual field conditions.
Example 1: AprilTag Detection
AprilTagDetector.java
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import com.vingvingrobot.Robot;
import com.vingvingrobot.follower.Follower;
@Autonomous(name = "AprilTag Detection")
public class AprilTagDetector extends LinearOpMode {
Robot robot;
Follower follower;
AprilTagProcessor aprilTag;
VisionPortal visionPortal;
@Override
public void runOpMode() {
// Initialize VingVing Robot
robot = new Robot(hardwareMap, RobotConfig.class);
robot.initializeLocalizer(hardwareMap);
follower = new Follower(hardwareMap);
// Initialize AprilTag processor
aprilTag = new AprilTagProcessor.Builder()
.build();
visionPortal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.addProcessor(aprilTag)
.build();
telemetry.addLine("Waiting for camera...");
telemetry.update();
while (!isStarted() && !isStopRequested()) {
// Look for AprilTags during init
List<AprilTagDetection> detections = aprilTag.getDetections();
if (detections.size() > 0) {
telemetry.addLine("AprilTags Detected:");
for (AprilTagDetection detection : detections) {
telemetry.addData("Tag ID", detection.id);
telemetry.addData("Distance", "%.1f in", detection.ftcPose.range);
telemetry.addData("Bearing", "%.1f°", Math.toDegrees(detection.ftcPose.bearing));
}
} else {
telemetry.addLine("No AprilTags detected");
}
telemetry.update();
}
waitForStart();
// Use AprilTag to determine which path to follow
AprilTagDetection targetTag = getDetection(2); // Look for tag ID 2
if (targetTag != null) {
telemetry.addLine("Tag found! Navigating...");
navigateToTag(targetTag);
} else {
telemetry.addLine("Tag not found - using default path");
// Fallback path
}
}
private AprilTagDetection getDetection(int targetId) {
List<AprilTagDetection> detections = aprilTag.getDetections();
for (AprilTagDetection detection : detections) {
if (detection.id == targetId) {
return detection;
}
}
return null;
}
private void navigateToTag(AprilTagDetection detection) {
// Convert AprilTag position to field coordinates
// (This is simplified - actual implementation depends on camera mounting)
double targetX = robot.getLocalizer().getX() + detection.ftcPose.x;
double targetY = robot.getLocalizer().getY() + detection.ftcPose.y;
follower.followPath(
new BezierLine(new Point(targetX, targetY, Point.FIELD))
.setLinearHeadingInterpolation(Math.toRadians(90))
);
while (!follower.isPathComplete() && opModeIsActive()) {
robot.update();
follower.update();
robot.setDrivePowers(follower.getMotorPowers());
}
}
}Example 2: Vision-Corrected Navigation
VisionCorrectedAuto.java
@Autonomous(name = "Vision-Corrected Auto")
public class VisionCorrectedAuto extends LinearOpMode {
Robot robot;
Follower follower;
AprilTagProcessor aprilTag;
VisionPortal visionPortal;
@Override
public void runOpMode() {
robot = new Robot(hardwareMap, RobotConfig.class);
robot.initializeLocalizer(hardwareMap);
follower = new Follower(hardwareMap);
// Setup vision
initializeVision();
waitForStart();
// Navigate near backdrop
follower.followPath(
new BezierCurve(
new Point(0, 0, Point.CARTESIAN),
new Point(0, 0, Point.CARTESIAN),
new Point(24, 12, Point.CARTESIAN),
new Point(36, 36, Point.CARTESIAN),
new Point(42, 48, Point.CARTESIAN)
).setTangentHeadingInterpolation()
.addCallback(0.7, () -> {
// Slow down for vision processing
follower.setMaxVelocity(0.4);
})
);
// Execute initial approach
while (!follower.isPathComplete() && opModeIsActive()) {
robot.update();
follower.update();
robot.setDrivePowers(follower.getMotorPowers());
}
// Stop and look for AprilTag
robot.setDrivePowers(new double[]{0, 0, 0, 0});
sleep(500);
// Get precise position from AprilTag
AprilTagDetection backdropTag = getDetection(5); // Backdrop tag
if (backdropTag != null) {
telemetry.addLine("✓ AprilTag located - correcting position");
// Calculate correction based on tag
double correctionX = backdropTag.ftcPose.x;
double correctionY = backdropTag.ftcPose.y;
// Final approach with correction
follower.followPath(
new BezierLine(
new Point(
robot.getLocalizer().getX() + correctionX,
robot.getLocalizer().getY() + correctionY,
Math.toRadians(90),
Point.FIELD
)
).setConstantHeadingInterpolation(Math.toRadians(90))
.addDeceleration(0.5, 1.0, 0.2)
);
while (!follower.isPathComplete() && opModeIsActive()) {
robot.update();
follower.update();
robot.setDrivePowers(follower.getMotorPowers());
}
telemetry.addLine("✓ Precise positioning complete!");
} else {
telemetry.addLine("⚠ AprilTag not found - using odometry");
}
telemetry.update();
}
private void initializeVision() {
aprilTag = new AprilTagProcessor.Builder()
.setDrawAxes(true)
.setDrawCubeProjection(true)
.setDrawTagOutline(true)
.build();
visionPortal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.addProcessor(aprilTag)
.build();
}
private AprilTagDetection getDetection(int targetId) {
List<AprilTagDetection> detections = aprilTag.getDetections();
for (AprilTagDetection detection : detections) {
if (detection.id == targetId) {
return detection;
}
}
return null;
}
}Example 3: Continuous Vision Tracking
ContinuousTracking.java
@Autonomous(name = "Continuous Vision Tracking")
public class ContinuousTracking extends LinearOpMode {
Robot robot;
Follower follower;
AprilTagProcessor aprilTag;
VisionPortal visionPortal;
@Override
public void runOpMode() {
robot = new Robot(hardwareMap, RobotConfig.class);
robot.initializeLocalizer(hardwareMap);
follower = new Follower(hardwareMap);
initializeVision();
waitForStart();
// Approach with continuous vision correction
follower.followPath(
new BezierCurve(
new Point(0, 0, Point.CARTESIAN),
new Point(0, 0, Point.CARTESIAN),
new Point(24, 12, Point.CARTESIAN),
new Point(36, 36, Point.CARTESIAN),
new Point(44, 50, Math.toRadians(90), Point.CARTESIAN)
).setLinearHeadingInterpolation()
.addDeceleration(0.6, 1.0, 0.3)
);
// Path following with real-time vision correction
while (!follower.isPathComplete() && opModeIsActive()) {
robot.update();
// Check for AprilTag updates
AprilTagDetection tag = getDetection(5);
if (tag != null && tag.ftcPose.range < 24) { // Within 24 inches
// Apply minor correction to path
double lateralError = tag.ftcPose.y;
// Adjust follower based on vision
if (Math.abs(lateralError) > 1.0) {
// Shift target slightly
telemetry.addData("Vision Correction", "%.1f in", lateralError);
}
}
follower.update();
robot.setDrivePowers(follower.getMotorPowers());
// Telemetry
telemetry.addData("Progress", "%.0f%%", follower.getPathProgress() * 100);
if (tag != null) {
telemetry.addData("Tag Range", "%.1f in", tag.ftcPose.range);
telemetry.addData("Tag Bearing", "%.1f°",
Math.toDegrees(tag.ftcPose.bearing));
}
telemetry.update();
}
telemetry.addLine("✓ Vision-guided navigation complete!");
telemetry.update();
}
private void initializeVision() {
aprilTag = new AprilTagProcessor.Builder().build();
visionPortal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.addProcessor(aprilTag)
.build();
}
private AprilTagDetection getDetection(int targetId) {
List<AprilTagDetection> detections = aprilTag.getDetections();
for (AprilTagDetection detection : detections) {
if (detection.id == targetId) return detection;
}
return null;
}
}Vision Integration Tips
💡 Use Vision for Final Positioning
Odometry gets you close, vision gets you perfect. Navigate near the target with paths, then use AprilTags for final centimeter-accurate positioning.
💡 Slow Down for Vision
Image processing needs time. Add deceleration zones when approaching tags so camera has clear, stable views for detection.
💡 Always Have a Fallback
Vision can fail (lighting, obstructions, alignment). Always have odometry-only paths as backup when tags aren't detected.