Skip to content

Commit

Permalink
Fixing issues with vision system
Browse files Browse the repository at this point in the history
	~ Various changes in the GetDistanceFromTarget command
	+ The VisionSystem subsystem now contains a CameraServer
  • Loading branch information
Endoman123 committed Jan 21, 2017
1 parent e0e7704 commit 39671c9
Show file tree
Hide file tree
Showing 5 changed files with 85 additions and 25 deletions.
9 changes: 9 additions & 0 deletions bash.exe.stackdump
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
Stack trace:
Frame Function Args
000005F6128 001800720DE (0018026270D, 00180217E46, 000005F6128, 000005F5020)
000005F6128 00180046E02 (000005F6088, 00000000000, 00000000000, 000FFFFFFFF)
000005F6128 00180046E42 (001802627C9, 000005F5FD8, 000005F6128, 00000000000)
000005F6128 001800BE61F (00000000000, 00000000000, 00000000000, 00000000000)
000005F6128 001800BE83F (000005F6150, 00000000000, 00000000000, 00000000000)
000005F61D0 001800BFAFA (000005F6150, 00000000000, 00000000000, 00000000000)
End of stack trace
2 changes: 1 addition & 1 deletion src/org/usfirst/frc/team1089/robot/Robot.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ public class Robot extends IterativeRobot {
// Declare subsystems (public static so there is only ever one instance)
public static ExampleSubsystem exampleSubsystem;
public static DriveTrain driveTrain;
public static VisionSystem targetingSystem;
public static VisionSystem visionSystem;
public static OI oi;

Command autonomousCommand;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
package org.usfirst.frc.team1089.robot.commands;

import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.vision.VisionThread;

import org.opencv.core.MatOfPoint;
import org.opencv.core.Rect;
import org.opencv.core.*;
import org.opencv.imgproc.Imgproc;
import org.usfirst.frc.team1089.robot.Robot;
import org.usfirst.frc.team1089.robot.util.GRIPPipeline;

import edu.wpi.cscore.CvSink;
import edu.wpi.cscore.CvSource;
import edu.wpi.first.wpilibj.CameraServer;
import edu.wpi.first.wpilibj.command.Command;


public class GetDistanceFromTarget extends Command {
private VisionThread vThread;
private Thread vThread;
private Object imgLock;
private GRIPPipeline pipeline;
private double targetWidth, targetHeight;

private final int NUM_TARGETS = 2;
Expand All @@ -25,18 +27,70 @@ public class GetDistanceFromTarget extends Command {

public GetDistanceFromTarget() {
// Use requires() here to declare subsystem dependencies
requires(Robot.targetingSystem);
requires(Robot.visionSystem);
}

// Called just before this Command runs the first time
@Override
protected void initialize() {
// Create synchronization object
imgLock = new Object();

// Create pipeline
pipeline = new GRIPPipeline();

vThread = new Thread(() -> {
// Get a CvSink. This will capture Mats from the camera
CvSink cvSink = CameraServer.getInstance().getVideo();
// Setup a CvSource. This will send images back to the Dashboard
CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);

// Mats are very memory expensive. Lets reuse this Mat.
Mat mat = new Mat();

// This cannot be 'true'. The program will never exit if it is. This
// lets the robot stop this thread when restarting robot code or
// deploying.
while (!Thread.interrupted()) {
// Tell the CvSink to grab a frame from the camera and put it
// in the source mat. If there is an error notify the output.
if (cvSink.grabFrame(mat) == 0) {
// Send the output the error.
outputStream.notifyError(cvSink.getError());
// skip the rest of the current iteration
continue;
}
// Let's process the image for the target
pipeline.process(mat);

// Only if we find the correct number of targets
// will we even bother doing math.
MatOfPoint[] contours = pipeline.filterContoursOutput().toArray(new MatOfPoint[1]);
if (contours.length == NUM_TARGETS) {
Rect
target1 = Imgproc.boundingRect(contours[0]),
target2 = Imgproc.boundingRect(contours[1]);

targetWidth = target2.x + target2.width - target1.x; // Full width between targets
targetHeight = target1.x / 2 + target2.x / 2; // Get the average height

// Draw a rectangle around what we found so we can see what's been found.
Imgproc.rectangle(mat,
new Point(target1.x, target1.y),
new Point(target2.x, target2.y + target2.height),
new Scalar(255, 0, 0), 2);
} else {
System.out.println("Targets not found! Number of targets found: " + contours.length);
}

// Give the output stream a new image to display
outputStream.putFrame(mat);
}
});

// Create VisionThread that allows
// image processing off the main robot thread
vThread = new VisionThread(Robot.targetingSystem.AXIS_CAMERA, new GRIPPipeline(), pipeline -> {
/*vThread = new VisionThread(Robot.targetingSystem.AXIS_CAMERA, new GRIPPipeline(), pipeline -> {
if (pipeline.filterContoursOutput().size() == NUM_TARGETS) {
MatOfPoint[] contours = pipeline.filterContoursOutput().toArray(new MatOfPoint[1]);
Rect
Expand All @@ -48,9 +102,13 @@ protected void initialize() {
targetHeight = (target1.height + target2.height) / 2;
}
}
});
});*/



// Begin the thread
// Make the thread a daemon and
// begin the thread
vThread.setDaemon(true);
vThread.start();
}

Expand All @@ -60,9 +118,9 @@ protected void execute() {
// Using proportions, we can convert the target width in pixels
// to the target width in feet.
// TODO: Fix this; it does not work.
double dist = (TARGET_WIDTH_INCHES / IN_TO_FT) * (Robot.targetingSystem.IMG_WIDTH / targetWidth);
double dist = (TARGET_WIDTH_INCHES / IN_TO_FT) * (Robot.visionSystem.IMG_WIDTH / targetWidth);
dist /= 2.0;
dist /= Math.tan(Math.toRadians(Robot.targetingSystem.HFOV / 2));
dist /= Math.tan(Math.toRadians(Robot.visionSystem.HFOV / 2));

System.out.println("Distance (u.) = " + dist);
}
Expand Down
15 changes: 4 additions & 11 deletions src/org/usfirst/frc/team1089/robot/subsystems/VisionSystem.java
Original file line number Diff line number Diff line change
@@ -1,22 +1,13 @@
package org.usfirst.frc.team1089.robot.subsystems;

import java.util.ArrayList;
import java.util.List;

import org.opencv.core.*;
import org.opencv.imgproc.Imgproc;
import org.usfirst.frc.team1089.robot.commands.GetDistanceFromTarget;
import org.usfirst.frc.team1089.robot.util.GRIPPipeline;

import edu.wpi.cscore.AxisCamera;
import edu.wpi.first.wpilibj.CameraServer;
import edu.wpi.first.wpilibj.command.Subsystem;
import edu.wpi.first.wpilibj.networktables.NetworkTable;
import edu.wpi.first.wpilibj.vision.VisionPipeline;
import edu.wpi.first.wpilibj.vision.VisionThread;


public class VisionSystem extends Subsystem {
public final CameraServer CAM_SERVER;
public final AxisCamera AXIS_CAMERA;
public final int
IMG_WIDTH = 320,
Expand All @@ -26,8 +17,10 @@ public class VisionSystem extends Subsystem {
// Initializes the AXIS camera and
// sets the proper resolution
public VisionSystem() {
// Set up camera server
CAM_SERVER = CameraServer.getInstance();
// Set up axis camera
AXIS_CAMERA = CameraServer.getInstance().addAxisCamera("axis-1089.local");
AXIS_CAMERA = CAM_SERVER.addAxisCamera("axis-1089.local");
AXIS_CAMERA.setResolution(IMG_WIDTH, IMG_HEIGHT);
}

Expand Down
2 changes: 1 addition & 1 deletion src/org/usfirst/frc/team1089/robot/util/GRIPPipeline.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public class GRIPPipeline implements VisionPipeline {
/**
* This is the primary method that runs the entire pipeline and updates the outputs.
*/
@Override public void process(Mat source0) {
public void process(Mat source0) {
// Step HSL_Threshold0:
Mat hslThresholdInput = source0;
double[] hslThresholdHue = {79.31654676258992, 109.35153583617746};
Expand Down

0 comments on commit 39671c9

Please sign in to comment.