Detector based on OpenCV working
This commit is contained in:
parent
8551abfc5d
commit
50b579705e
@ -11,7 +11,7 @@ abstract public class Detector {
|
||||
private boolean extendDetection = false;
|
||||
|
||||
// Countdown parameters
|
||||
private int COUNTDOWN_TIME = 1000; // milliseconds
|
||||
private int COUNTDOWN_TIME = 3000; // milliseconds
|
||||
private int COUNTDOWN_POLLING_TIME = 100; // milliseconds
|
||||
|
||||
/** Constructor - takes context of current activity */
|
||||
|
@ -37,12 +37,8 @@ import java.util.concurrent.ExecutionException;
|
||||
|
||||
/**
|
||||
* Video Detector inherits some methods from abstract Detector class (more info there)
|
||||
*
|
||||
*
|
||||
* USE FROM MAIN ACTIVITY:
|
||||
*
|
||||
* VideoDetector vd = new VideoDetector(this);
|
||||
*
|
||||
* */
|
||||
|
||||
|
||||
@ -56,22 +52,18 @@ public class VideoDetector extends Detector {
|
||||
private Boolean isDetectionRunning = false;
|
||||
// Detection
|
||||
private Mat previousImage = null;
|
||||
private Mat lastThresh;
|
||||
private Mat currentOut;
|
||||
|
||||
public ImageView imageView1 = null;
|
||||
public ImageView imageView2 = null;
|
||||
|
||||
|
||||
private int frameCnt = 0;
|
||||
|
||||
|
||||
// Parameters
|
||||
private static final float PIXEL_THRESHOLD = 30f; // Luminosity (brightness channel of YUV_420_888)
|
||||
private static final float PIXEL_THRESHOLD = 40f; // Luminosity (brightness channel of YUV_420_888)
|
||||
private static final int BLUR_KERNEL_SIZE = 5;
|
||||
private static final int DILATE_KERNEL_SIZE = 5;
|
||||
private static final float CONTOUR_THRESHOLD = 100;
|
||||
private static final float ALARM_THRESHOLD = 0.2f; // Percent of pixels changed
|
||||
private static final float CONTOUR_THRESHOLD = 250;
|
||||
private static final float ALARM_THRESHOLD = 0.5f; // Percent of pixels changed
|
||||
private static final long START_DELAY = 1000; // milliseconds
|
||||
private static final android.util.Size IMAGE_RES = new android.util.Size(640, 480);
|
||||
|
||||
@ -87,7 +79,6 @@ public class VideoDetector extends Detector {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Starts the Video Detection
|
||||
* */
|
||||
@ -148,17 +139,23 @@ public class VideoDetector extends Detector {
|
||||
Image image = imageProxy.getImage();
|
||||
assert image != null;
|
||||
|
||||
if (image != null) {
|
||||
|
||||
Mat mat = extractYChannel(image);
|
||||
|
||||
debugMat(mat, imageView2);
|
||||
|
||||
//mat = alternativeProcessImage(mat);
|
||||
mat = processImage(mat);
|
||||
debugMat(mat, imageView1);
|
||||
|
||||
if (frameCnt <= 5) {
|
||||
reportViolation("Video", 0);
|
||||
frameCnt++;
|
||||
int n = 0;
|
||||
n = countNonZeroPixels(mat);
|
||||
|
||||
int pixelCount = image.getWidth() * image.getHeight();
|
||||
float percentChanged = (float)n / pixelCount;
|
||||
// report violation
|
||||
if (percentChanged * 100 > ALARM_THRESHOLD) {
|
||||
reportViolation("Video", n);
|
||||
}
|
||||
}
|
||||
}
|
||||
imageProxy.close();
|
||||
@ -170,7 +167,7 @@ public class VideoDetector extends Detector {
|
||||
// Specify which Camera to use
|
||||
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
|
||||
// Delay till start
|
||||
new CountDownTimer((long)(START_DELAY), 1000){
|
||||
new CountDownTimer((long)(START_DELAY), 100){
|
||||
@Override
|
||||
public void onTick(long millisUntilFinished) {}
|
||||
@Override
|
||||
@ -180,6 +177,20 @@ public class VideoDetector extends Detector {
|
||||
}.start();
|
||||
}
|
||||
|
||||
private Mat processImage(Mat image){
|
||||
if (previousImage == null) {
|
||||
previousImage = image;
|
||||
return null;
|
||||
}
|
||||
|
||||
Mat mat = addGaussianBlur(image, BLUR_KERNEL_SIZE);
|
||||
mat = thresholdPixels(mat, previousImage, PIXEL_THRESHOLD);
|
||||
mat = dilateNonZero(mat, DILATE_KERNEL_SIZE);
|
||||
mat = thresholdContourArea(mat, CONTOUR_THRESHOLD);
|
||||
|
||||
previousImage = image.clone();
|
||||
return mat;
|
||||
}
|
||||
|
||||
private Mat extractYChannel(@NonNull Image img) {
|
||||
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer();
|
||||
@ -192,34 +203,6 @@ public class VideoDetector extends Detector {
|
||||
return yMat;
|
||||
}
|
||||
|
||||
private Mat processImage(Mat currentImage){
|
||||
if (previousImage == null) {
|
||||
previousImage = currentImage;
|
||||
return null;
|
||||
}
|
||||
|
||||
Mat mat = new Mat();
|
||||
currentImage = addGaussianBlur(currentImage, BLUR_KERNEL_SIZE);
|
||||
|
||||
mat = thresholdPixels(currentImage, previousImage, PIXEL_THRESHOLD);
|
||||
mat = dilateNonZero(mat, DILATE_KERNEL_SIZE);
|
||||
mat = thresholdContourArea(mat, CONTOUR_THRESHOLD);
|
||||
|
||||
previousImage = currentImage.clone();
|
||||
return mat;
|
||||
}
|
||||
|
||||
private Mat alternativeProcessImage(Mat currentImage){
|
||||
if (previousImage == null) {
|
||||
previousImage = currentImage;
|
||||
return null;
|
||||
} else if (lastThresh == null) {
|
||||
lastThresh = thresholdPixels(currentImage, previousImage, PIXEL_THRESHOLD);
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
private Mat thresholdPixels(Mat inputMat, Mat previousImage, float luminosityThreshold){
|
||||
Mat diffImage = new Mat();
|
||||
@ -264,7 +247,10 @@ public class VideoDetector extends Detector {
|
||||
}
|
||||
|
||||
private int countNonZeroPixels(Mat inputImage) {
|
||||
if (inputImage != null)
|
||||
return Core.countNonZero(inputImage);
|
||||
else
|
||||
return 0;
|
||||
}
|
||||
|
||||
private void debugMat(Mat mat, ImageView imageView) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user