|
|
|
|
|
|
|
|
package com.example.ueberwachungssystem.Detection; |
|
|
package com.example.ueberwachungssystem.Detection; |
|
|
|
|
|
|
|
|
import android.Manifest; |
|
|
import android.Manifest; |
|
|
|
|
|
import android.annotation.SuppressLint; |
|
|
import android.app.Activity; |
|
|
import android.app.Activity; |
|
|
import android.content.Context; |
|
|
import android.content.Context; |
|
|
import android.content.pm.PackageManager; |
|
|
import android.content.pm.PackageManager; |
|
|
|
|
|
|
|
|
import android.media.Image; |
|
|
import android.media.Image; |
|
|
import android.os.CountDownTimer; |
|
|
import android.os.CountDownTimer; |
|
|
import android.util.Log; |
|
|
import android.util.Log; |
|
|
|
|
|
import android.view.Surface; |
|
|
import android.widget.ImageView; |
|
|
import android.widget.ImageView; |
|
|
|
|
|
import android.widget.Toast; |
|
|
|
|
|
|
|
|
import androidx.annotation.NonNull; |
|
|
import androidx.annotation.NonNull; |
|
|
|
|
|
import androidx.annotation.Nullable; |
|
|
import androidx.camera.core.CameraSelector; |
|
|
import androidx.camera.core.CameraSelector; |
|
|
import androidx.camera.core.ExperimentalGetImage; |
|
|
import androidx.camera.core.ExperimentalGetImage; |
|
|
import androidx.camera.core.ImageAnalysis; |
|
|
import androidx.camera.core.ImageAnalysis; |
|
|
import androidx.camera.core.Preview; |
|
|
import androidx.camera.core.Preview; |
|
|
|
|
|
import androidx.camera.core.VideoCapture; |
|
|
import androidx.camera.lifecycle.ProcessCameraProvider; |
|
|
import androidx.camera.lifecycle.ProcessCameraProvider; |
|
|
import androidx.core.app.ActivityCompat; |
|
|
import androidx.core.app.ActivityCompat; |
|
|
import androidx.core.content.ContextCompat; |
|
|
import androidx.core.content.ContextCompat; |
|
|
|
|
|
|
|
|
import org.opencv.core.Size; |
|
|
import org.opencv.core.Size; |
|
|
import org.opencv.imgproc.Imgproc; |
|
|
import org.opencv.imgproc.Imgproc; |
|
|
|
|
|
|
|
|
|
|
|
import java.io.File; |
|
|
import java.nio.ByteBuffer; |
|
|
import java.nio.ByteBuffer; |
|
|
import java.util.ArrayList; |
|
|
import java.util.ArrayList; |
|
|
import java.util.Collections; |
|
|
import java.util.Collections; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Calling Activity |
|
|
// Calling Activity |
|
|
private final Context context; |
|
|
private final Context context; |
|
|
|
|
|
|
|
|
// Permission handling |
|
|
// Permission handling |
|
|
private static final int CAMERA_PERMISSION_REQUEST_CODE = 3691; |
|
|
private static final int CAMERA_PERMISSION_REQUEST_CODE = 3691; |
|
|
|
|
|
|
|
|
// Camera Provider |
|
|
// Camera Provider |
|
|
private ProcessCameraProvider cameraProvider; |
|
|
private ProcessCameraProvider cameraProvider; |
|
|
|
|
|
private final ImageAnalysis imageAnalysis; |
|
|
|
|
|
private final VideoCapture videoCapture; |
|
|
|
|
|
|
|
|
|
|
|
// Logic |
|
|
private boolean isDetectionRunning = false; |
|
|
private boolean isDetectionRunning = false; |
|
|
|
|
|
private boolean allowReportViolation = false; |
|
|
|
|
|
|
|
|
// Debugging |
|
|
|
|
|
|
|
|
// Image Processing |
|
|
private Mat previousImage = null; |
|
|
private Mat previousImage = null; |
|
|
public ImageView imageView1 = null; |
|
|
|
|
|
public ImageView imageView2 = null; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Debugging |
|
|
|
|
|
private ImageView inputImageView = null; |
|
|
|
|
|
private ImageView outputImageView = null; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Parameters |
|
|
// Parameters |
|
|
private static final float ALARM_THRESHOLD = 0.5f; // Percent of pixels changed |
|
|
private static final float ALARM_THRESHOLD = 0.5f; // Percent of pixels changed |
|
|
private static final long START_DELAY = 1000; // milliseconds |
|
|
|
|
|
|
|
|
private static final long START_DELAY = 5000; // milliseconds |
|
|
private static final android.util.Size IMAGE_RES = new android.util.Size(640, 480); |
|
|
private static final android.util.Size IMAGE_RES = new android.util.Size(640, 480); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** |
|
|
|
|
|
* Constructor |
|
|
|
|
|
* @param context: the context of calling activity (usually "this") |
|
|
|
|
|
* */ |
|
|
|
|
|
|
|
|
/** Constructor */ |
|
|
public VideoDetector(Context context) { |
|
|
public VideoDetector(Context context) { |
|
|
super(); |
|
|
super(); |
|
|
this.context = context; |
|
|
this.context = context; |
|
|
|
|
|
this.imageAnalysis = setupImageAnalysis(); |
|
|
|
|
|
this.videoCapture = setupVideoCapture(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** |
|
|
|
|
|
* Get State of the Detector |
|
|
|
|
|
*/ |
|
|
|
|
|
|
|
|
/** Get State of the Detector */ |
|
|
public boolean isRunning() { |
|
|
public boolean isRunning() { |
|
|
return isDetectionRunning; |
|
|
return isDetectionRunning; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** |
|
|
|
|
|
* Starts the Video Detection |
|
|
|
|
|
* */ |
|
|
|
|
|
|
|
|
/** Starts the Video Detection */ |
|
|
@Override |
|
|
@Override |
|
|
public void startDetection() { |
|
|
public void startDetection() { |
|
|
|
|
|
// Check States |
|
|
if (isDetectionRunning) |
|
|
if (isDetectionRunning) |
|
|
return; |
|
|
return; |
|
|
|
|
|
|
|
|
if (!isCameraAccessAllowed()){ |
|
|
|
|
|
|
|
|
if (!isCameraAccessAllowed()) { |
|
|
getCameraAccess(); |
|
|
getCameraAccess(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
// Open CV startup check |
|
|
// Open CV startup check |
|
|
if (!OpenCVLoader.initDebug()) { |
|
|
if (!OpenCVLoader.initDebug()) { |
|
|
Log.e("OpenCV", "Unable to load OpenCV!"); |
|
|
Log.e("OpenCV", "Unable to load OpenCV!"); |
|
|
|
|
|
|
|
|
} else |
|
|
} else |
|
|
Log.d("OpenCV", "OpenCV loaded Successfully!"); |
|
|
Log.d("OpenCV", "OpenCV loaded Successfully!"); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Request Camera Provider |
|
|
// Request Camera Provider |
|
|
final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(context); |
|
|
final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(context); |
|
|
//Check for Camera availability |
|
|
//Check for Camera availability |
|
|
cameraProviderFuture.addListener(() -> { |
|
|
cameraProviderFuture.addListener(() -> { |
|
|
try { |
|
|
try { |
|
|
cameraProvider = cameraProviderFuture.get(); |
|
|
cameraProvider = cameraProviderFuture.get(); |
|
|
bindAnalysis(cameraProvider); |
|
|
|
|
|
|
|
|
bindCameraProvider(cameraProvider); |
|
|
isDetectionRunning = true; |
|
|
isDetectionRunning = true; |
|
|
} catch (ExecutionException | InterruptedException e) { |
|
|
} catch (ExecutionException | InterruptedException e) { |
|
|
// No errors need to be handled for this Future. This should never be reached. |
|
|
// No errors need to be handled for this Future. This should never be reached. |
|
|
} |
|
|
} |
|
|
},ContextCompat.getMainExecutor(context)); |
|
|
|
|
|
|
|
|
}, ContextCompat.getMainExecutor(context)); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
/** |
|
|
|
|
|
* Stops the Video Detection |
|
|
|
|
|
* */ |
|
|
|
|
|
|
|
|
/** Stops the Video Detection */ |
|
|
@Override |
|
|
@Override |
|
|
public void stopDetection() { |
|
|
public void stopDetection() { |
|
|
if (!isDetectionRunning) |
|
|
|
|
|
|
|
|
if (!isDetectionRunning || imageAnalysis == null) |
|
|
return; |
|
|
return; |
|
|
cameraProvider.unbindAll(); |
|
|
|
|
|
|
|
|
cameraProvider.unbind(imageAnalysis); |
|
|
isDetectionRunning = false; |
|
|
isDetectionRunning = false; |
|
|
|
|
|
allowReportViolation = false; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
/** |
|
|
|
|
|
* Permission handling |
|
|
|
|
|
*/ |
|
|
|
|
|
|
|
|
/** Permission handling */ |
|
|
private boolean isCameraAccessAllowed() { |
|
|
private boolean isCameraAccessAllowed() { |
|
|
return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED; |
|
|
return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** |
|
|
|
|
|
* Binds the Luminosity Analyzer (configure and run Analysis) |
|
|
|
|
|
* @param cameraProvider: CameraProvider of Context passed by Constructor |
|
|
|
|
|
* */ |
|
|
|
|
|
private void bindAnalysis(@NonNull ProcessCameraProvider cameraProvider) { |
|
|
|
|
|
|
|
|
/** Binds the Luminosity Analyzer (configure and run Analysis) */ |
|
|
|
|
|
private void bindCameraProvider(@NonNull ProcessCameraProvider cameraProvider) { |
|
|
|
|
|
// Create Preview |
|
|
|
|
|
//Preview preview = new Preview.Builder().build(); |
|
|
|
|
|
// Specify which Camera to use |
|
|
|
|
|
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build(); |
|
|
|
|
|
|
|
|
|
|
|
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis); |
|
|
|
|
|
|
|
|
|
|
|
// Delay till violation is allowed |
|
|
|
|
|
startViolationTimer(); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** Start delay until Violation Report is allowed */ |
|
|
|
|
|
private void startViolationTimer() { |
|
|
|
|
|
new CountDownTimer((long) (START_DELAY), 100) { |
|
|
|
|
|
@Override |
|
|
|
|
|
public void onTick(long millisUntilFinished) { |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
@Override |
|
|
|
|
|
public void onFinish() { |
|
|
|
|
|
allowReportViolation = true; |
|
|
|
|
|
} |
|
|
|
|
|
}.start(); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private ImageAnalysis setupImageAnalysis() { |
|
|
// Configure and create Image Analysis |
|
|
// Configure and create Image Analysis |
|
|
ImageAnalysis.Builder builder = new ImageAnalysis.Builder(); |
|
|
ImageAnalysis.Builder builder = new ImageAnalysis.Builder(); |
|
|
builder.setTargetResolution(IMAGE_RES); |
|
|
builder.setTargetResolution(IMAGE_RES); |
|
|
builder.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST); |
|
|
builder.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST); |
|
|
builder.setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888); |
|
|
builder.setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888); |
|
|
ImageAnalysis imageAnalysis = builder.build(); |
|
|
ImageAnalysis imageAnalysis = builder.build(); |
|
|
|
|
|
|
|
|
// Set Analyzer |
|
|
// Set Analyzer |
|
|
imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(context), imageProxy -> { |
|
|
imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(context), imageProxy -> { |
|
|
if (imageProxy.getFormat() == ImageFormat.YUV_420_888) { |
|
|
if (imageProxy.getFormat() == ImageFormat.YUV_420_888) { |
|
|
Image image = imageProxy.getImage(); |
|
|
Image image = imageProxy.getImage(); |
|
|
assert image != null; |
|
|
assert image != null; |
|
|
|
|
|
|
|
|
Mat currentMat = extractYChannel(image); |
|
|
|
|
|
|
|
|
|
|
|
Mat mat = currentMat.clone(); |
|
|
|
|
|
debugMat(mat, imageView2); |
|
|
|
|
|
|
|
|
|
|
|
Mat processed = processImage(mat); |
|
|
|
|
|
|
|
|
|
|
|
debugMat(processed, imageView1); |
|
|
|
|
|
|
|
|
|
|
|
int n = countNonZeroPixels(processed); |
|
|
|
|
|
|
|
|
// Violation Handling |
|
|
|
|
|
Mat processed = processImage(image); |
|
|
|
|
|
int n = OpenCVHelper.countNonZeroPixels(processed); |
|
|
int pixelCount = image.getWidth() * image.getHeight(); |
|
|
int pixelCount = image.getWidth() * image.getHeight(); |
|
|
float percentChanged = (float)n / pixelCount; |
|
|
|
|
|
// report violation |
|
|
|
|
|
if (percentChanged * 100 > ALARM_THRESHOLD) { |
|
|
|
|
|
reportViolation("Video", n); |
|
|
|
|
|
|
|
|
float percentChanged = (float) n / pixelCount; |
|
|
|
|
|
|
|
|
|
|
|
// Violation Condition |
|
|
|
|
|
if (percentChanged * 100 > ALARM_THRESHOLD) { |
|
|
|
|
|
if (allowReportViolation) |
|
|
|
|
|
reportViolation("Video", n); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
imageProxy.close(); |
|
|
imageProxy.close(); |
|
|
}); |
|
|
}); |
|
|
// Create Preview |
|
|
|
|
|
Preview preview = new Preview.Builder().build(); |
|
|
|
|
|
// Specify which Camera to use |
|
|
|
|
|
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build(); |
|
|
|
|
|
// Delay till start |
|
|
|
|
|
new CountDownTimer((long)(START_DELAY), 100){ |
|
|
|
|
|
@Override |
|
|
|
|
|
public void onTick(long millisUntilFinished) {} |
|
|
|
|
|
@Override |
|
|
|
|
|
public void onFinish() { |
|
|
|
|
|
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, preview); |
|
|
|
|
|
} |
|
|
|
|
|
}.start(); |
|
|
|
|
|
|
|
|
return imageAnalysis; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
@SuppressLint("RestrictedApi") |
|
|
|
|
|
private VideoCapture setupVideoCapture() { |
|
|
|
|
|
return new VideoCapture.Builder() |
|
|
|
|
|
.setVideoFrameRate(30) |
|
|
|
|
|
.setTargetRotation(Surface.ROTATION_0) |
|
|
|
|
|
.build(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** |
|
|
|
|
|
* Process Image to be used for Motion Detection |
|
|
|
|
|
* |
|
|
|
|
|
* @param image: OpenCV Mat file that should be processed |
|
|
|
|
|
*/ |
|
|
|
|
|
private Mat processImage(Mat image){ |
|
|
|
|
|
|
|
|
/** Process Image to be used for Motion Detection */ |
|
|
|
|
|
private Mat processImage(Image image){ |
|
|
|
|
|
|
|
|
|
|
|
// Image Transformation |
|
|
|
|
|
Mat imageMat = OpenCVHelper.extractYChannel(image); |
|
|
|
|
|
|
|
|
|
|
|
// Show Input Image |
|
|
|
|
|
if (inputImageView != null) |
|
|
|
|
|
OpenCVHelper.debugMat(imageMat, inputImageView); |
|
|
|
|
|
|
|
|
// Preprocess Image |
|
|
// Preprocess Image |
|
|
Mat preprocessed = image.clone(); |
|
|
|
|
|
preprocessed = addGaussianBlur(preprocessed, new Size(21, 21)); |
|
|
|
|
|
preprocessed = addBlur(preprocessed, new Size(3, 3)); |
|
|
|
|
|
|
|
|
Mat preprocessed = imageMat; |
|
|
|
|
|
preprocessed = OpenCVHelper.addGaussianBlur(preprocessed, new Size(21, 21)); |
|
|
|
|
|
preprocessed = OpenCVHelper.addBlur(preprocessed, new Size(3, 3)); |
|
|
|
|
|
|
|
|
if (previousImage == null) { |
|
|
if (previousImage == null) { |
|
|
previousImage = preprocessed; |
|
|
previousImage = preprocessed; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Process Image |
|
|
// Process Image |
|
|
Mat processed = preprocessed.clone(); |
|
|
Mat processed = preprocessed.clone(); |
|
|
processed = thresholdPixels(processed, previousImage, 25); |
|
|
|
|
|
processed = dilateBinaryMat(processed, new Size(3,3)); |
|
|
|
|
|
processed = dilateBinaryMat(processed, new Size(3,3)); |
|
|
|
|
|
processed = thresholdContourArea(processed, 500); |
|
|
|
|
|
|
|
|
processed = OpenCVHelper.thresholdPixels(processed, previousImage, 25); |
|
|
|
|
|
processed = OpenCVHelper.dilateBinaryMat(processed, new Size(3,3)); |
|
|
|
|
|
processed = OpenCVHelper.dilateBinaryMat(processed, new Size(3,3)); |
|
|
|
|
|
processed = OpenCVHelper.thresholdContourArea(processed, 500); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Output |
|
|
previousImage = preprocessed.clone(); |
|
|
previousImage = preprocessed.clone(); |
|
|
|
|
|
// Show Output Image |
|
|
|
|
|
if (outputImageView != null) |
|
|
|
|
|
OpenCVHelper.debugMat(processed, outputImageView); |
|
|
return processed; |
|
|
return processed; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** OpenCV helper methods **/ |
|
|
|
|
|
private Mat addGaussianBlur(Mat inputMat, Size kernelSize){ |
|
|
|
|
|
Mat outputMat = new Mat(); |
|
|
|
|
|
Imgproc.GaussianBlur(inputMat, outputMat, kernelSize, 0); |
|
|
|
|
|
return outputMat; |
|
|
|
|
|
|
|
|
public void debugProcessing(ImageView inputImageView, ImageView outputImageView){ |
|
|
|
|
|
this.inputImageView = inputImageView; |
|
|
|
|
|
this.outputImageView = outputImageView; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
private Mat addBlur(Mat inputMat, Size kernelSize){ |
|
|
|
|
|
Mat outputMat = new Mat(); |
|
|
|
|
|
Imgproc.blur(inputMat, outputMat, kernelSize); |
|
|
|
|
|
return outputMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private Mat extractYChannel(@NonNull Image img) { |
|
|
|
|
|
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer(); |
|
|
|
|
|
byte[] yData = new byte[yBuffer.remaining()]; |
|
|
|
|
|
yBuffer.get(yData); |
|
|
|
|
|
|
|
|
|
|
|
Mat yMat = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1); |
|
|
|
|
|
yMat.put(0, 0, yData); |
|
|
|
|
|
|
|
|
|
|
|
return yMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private Mat thresholdPixels(Mat inputMat, Mat previousImage, int threshold){ |
|
|
|
|
|
Mat diffImage = new Mat(); |
|
|
|
|
|
Core.absdiff(inputMat, previousImage, diffImage); |
|
|
|
|
|
Mat binaryMat = new Mat(); |
|
|
|
|
|
Imgproc.threshold(diffImage, binaryMat, threshold, 255, Imgproc.THRESH_BINARY); |
|
|
|
|
|
return binaryMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private Mat thresholdContourArea(Mat inputMat, float areaThreshold){ |
|
|
|
|
|
List<MatOfPoint> contours = new ArrayList<>(); |
|
|
|
|
|
Mat hierarchy = new Mat(); |
|
|
|
|
|
Imgproc.findContours(inputMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); |
|
|
|
|
|
|
|
|
|
|
|
Mat outputMat = new Mat(inputMat.size(), inputMat.type(), new Scalar(0)); |
|
|
|
|
|
// Iterate over the contours and draw only the larger contours on the outputMat |
|
|
|
|
|
for (MatOfPoint contour : contours) { |
|
|
|
|
|
double contourArea = Imgproc.contourArea(contour); |
|
|
|
|
|
if (contourArea > areaThreshold) { |
|
|
|
|
|
Imgproc.drawContours(outputMat, Collections.singletonList(contour), 0, new Scalar(255), -1); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private static class OpenCVHelper{ |
|
|
|
|
|
private OpenCVHelper() {} |
|
|
|
|
|
|
|
|
|
|
|
/** OpenCV helper methods **/ |
|
|
|
|
|
private static Mat addGaussianBlur(Mat inputMat, Size kernelSize){ |
|
|
|
|
|
Mat outputMat = new Mat(); |
|
|
|
|
|
Imgproc.GaussianBlur(inputMat, outputMat, kernelSize, 0); |
|
|
|
|
|
return outputMat; |
|
|
} |
|
|
} |
|
|
// Apply the outputMat as a mask to the dilatedImage |
|
|
|
|
|
Mat maskedImage = new Mat(); |
|
|
|
|
|
inputMat.copyTo(maskedImage, outputMat); |
|
|
|
|
|
return outputMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private Mat dilateBinaryMat(Mat inputMat, Size kernelSize){ |
|
|
|
|
|
Mat dilatedMat = new Mat(); |
|
|
|
|
|
Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, kernelSize); |
|
|
|
|
|
Imgproc.dilate(inputMat, dilatedMat, kernel); |
|
|
|
|
|
return dilatedMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
private static Mat addBlur(Mat inputMat, Size kernelSize){ |
|
|
|
|
|
Mat outputMat = new Mat(); |
|
|
|
|
|
Imgproc.blur(inputMat, outputMat, kernelSize); |
|
|
|
|
|
return outputMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
private int countNonZeroPixels(Mat inputImage) { |
|
|
|
|
|
if (inputImage != null) |
|
|
|
|
|
return Core.countNonZero(inputImage); |
|
|
|
|
|
else |
|
|
|
|
|
return 0; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
private static Mat extractYChannel(@NonNull Image img) { |
|
|
|
|
|
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer(); |
|
|
|
|
|
byte[] yData = new byte[yBuffer.remaining()]; |
|
|
|
|
|
yBuffer.get(yData); |
|
|
|
|
|
|
|
|
|
|
|
Mat yMat = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1); |
|
|
|
|
|
yMat.put(0, 0, yData); |
|
|
|
|
|
|
|
|
|
|
|
return yMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
private void debugMat(Mat mat, ImageView imageView) { |
|
|
|
|
|
if (imageView == null || mat == null) |
|
|
|
|
|
return; |
|
|
|
|
|
|
|
|
private static Mat thresholdPixels(Mat inputMat, Mat previousImage, int threshold){ |
|
|
|
|
|
Mat diffImage = new Mat(); |
|
|
|
|
|
Core.absdiff(inputMat, previousImage, diffImage); |
|
|
|
|
|
Mat binaryMat = new Mat(); |
|
|
|
|
|
Imgproc.threshold(diffImage, binaryMat, threshold, 255, Imgproc.THRESH_BINARY); |
|
|
|
|
|
return binaryMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888); |
|
|
|
|
|
Utils.matToBitmap(mat, bitmap); |
|
|
|
|
|
|
|
|
private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){ |
|
|
|
|
|
List<MatOfPoint> contours = new ArrayList<>(); |
|
|
|
|
|
Mat hierarchy = new Mat(); |
|
|
|
|
|
Imgproc.findContours(inputMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); |
|
|
|
|
|
|
|
|
|
|
|
Mat outputMat = new Mat(inputMat.size(), inputMat.type(), new Scalar(0)); |
|
|
|
|
|
// Iterate over the contours and draw only the larger contours on the outputMat |
|
|
|
|
|
for (MatOfPoint contour : contours) { |
|
|
|
|
|
double contourArea = Imgproc.contourArea(contour); |
|
|
|
|
|
if (contourArea > areaThreshold) { |
|
|
|
|
|
Imgproc.drawContours(outputMat, Collections.singletonList(contour), 0, new Scalar(255), -1); |
|
|
|
|
|
} |
|
|
|
|
|
} |
|
|
|
|
|
// Apply the outputMat as a mask to the dilatedImage |
|
|
|
|
|
Mat maskedImage = new Mat(); |
|
|
|
|
|
inputMat.copyTo(maskedImage, outputMat); |
|
|
|
|
|
return outputMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private static Mat dilateBinaryMat(Mat inputMat, Size kernelSize){ |
|
|
|
|
|
Mat dilatedMat = new Mat(); |
|
|
|
|
|
Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, kernelSize); |
|
|
|
|
|
Imgproc.dilate(inputMat, dilatedMat, kernel); |
|
|
|
|
|
return dilatedMat; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private static int countNonZeroPixels(Mat inputImage) { |
|
|
|
|
|
if (inputImage != null) |
|
|
|
|
|
return Core.countNonZero(inputImage); |
|
|
|
|
|
else |
|
|
|
|
|
return 0; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
// Display the bitmap in an ImageView |
|
|
|
|
|
imageView.setImageBitmap(bitmap); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private static void debugMat(Mat mat, ImageView imageView) { |
|
|
|
|
|
if (imageView == null || mat == null) |
|
|
|
|
|
return; |
|
|
|
|
|
|
|
|
|
|
|
Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888); |
|
|
|
|
|
Utils.matToBitmap(mat, bitmap); |
|
|
|
|
|
|
|
|
|
|
|
// Display the bitmap in an ImageView |
|
|
|
|
|
imageView.setImageBitmap(bitmap); |
|
|
|
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
} |